diff --git a/.cargo/config.toml b/.cargo/config.toml index 767d03a..302ce48 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,3 +1,6 @@ +[target.'cfg(unix)'] +runner = "sudo -E" + [alias] # command aliases rr = "run --release" bb = "build --release" diff --git a/.forgejo/workflows/build-rust.yml b/.forgejo/workflows/build-rust.yml deleted file mode 100644 index 9ed49e1..0000000 --- a/.forgejo/workflows/build-rust.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Build Rust - -on: - push: - branches: - - main - pull_request: - branches: - - "**" - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - rust: - name: Cargo Test - runs-on: [self-hosted, linux, x86_64, burrow-forge] - steps: - - name: Checkout - shell: bash - run: | - set -euo pipefail - repo_url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git" - if [ ! -d .git ]; then - git init . - fi - if git remote get-url origin >/dev/null 2>&1; then - git remote set-url origin "${repo_url}" - else - git remote add origin "${repo_url}" - fi - git fetch --force --tags origin "${GITHUB_SHA}" - git checkout --force --detach FETCH_HEAD - git clean -ffdqx - - - name: Test - shell: bash - run: | - set -euo pipefail - nix develop .#ci -c cargo test --workspace --all-features diff --git a/.forgejo/workflows/build-site.yml b/.forgejo/workflows/build-site.yml deleted file mode 100644 index 67be5bb..0000000 --- a/.forgejo/workflows/build-site.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Build Site - -on: - push: - branches: - - main - pull_request: - branches: - - "**" - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - site: - name: Next.js Build - runs-on: [self-hosted, linux, x86_64, burrow-forge] - steps: - - name: Checkout - shell: bash - run: | - set -euo pipefail - repo_url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git" - if [ ! -d .git ]; then - git init . - fi - if git remote get-url origin >/dev/null 2>&1; then - git remote set-url origin "${repo_url}" - else - git remote add origin "${repo_url}" - fi - git fetch --force --tags origin "${GITHUB_SHA}" - git checkout --force --detach FETCH_HEAD - git clean -ffdqx - - - name: Build - shell: bash - run: | - set -euo pipefail - nix develop .#ci -c bash -lc 'cd site && npm ci --no-audit --no-fund && npm run build' diff --git a/.forgejo/workflows/lint-governance.yml b/.forgejo/workflows/lint-governance.yml deleted file mode 100644 index 2db94cc..0000000 --- a/.forgejo/workflows/lint-governance.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Lint Governance - -on: - push: - branches: - - main - pull_request: - branches: - - "**" - workflow_dispatch: - -jobs: - governance: - name: BEP Metadata - runs-on: [self-hosted, linux, x86_64, burrow-forge] - steps: - - name: Checkout - shell: bash - run: | - set -euo pipefail - repo_url="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git" - if [ ! -d .git ]; then - git init . - fi - if git remote get-url origin >/dev/null 2>&1; then - git remote set-url origin "${repo_url}" - else - git remote add origin "${repo_url}" - fi - git fetch --force --tags origin "${GITHUB_SHA}" - git checkout --force --detach FETCH_HEAD - git clean -ffdqx - - - name: Validate BEP metadata - shell: bash - run: | - set -euo pipefail - python3 Scripts/check-bep-metadata.py diff --git a/.forgejo/workflows/release.yml b/.forgejo/workflows/release.yml deleted file mode 100644 index 3d1e92a..0000000 --- a/.forgejo/workflows/release.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Release - -on: - push: - tags: - - "v*" - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: false - -jobs: - release: - name: Release Build - runs-on: namespace-profile-linux-medium - steps: - - name: Checkout - uses: https://code.forgejo.org/actions/checkout@v4 - with: - token: ${{ github.token }} - fetch-depth: 0 - - - name: Bootstrap Nix - shell: bash - run: | - set -euo pipefail - chmod +x Scripts/ci/ensure-nix.sh - Scripts/ci/ensure-nix.sh - - - name: Build release artifacts - shell: bash - env: - RELEASE_REF: ${{ github.ref_name }} - run: | - set -euo pipefail - ref="${RELEASE_REF:-manual-${GITHUB_SHA::7}}" - export RELEASE_REF="${ref}" - chmod +x Scripts/ci/build-release-artifacts.sh - nix develop .#ci -c Scripts/ci/build-release-artifacts.sh - - - name: Upload release artifacts - uses: https://code.forgejo.org/actions/upload-artifact@v4 - with: - name: burrow-release-${{ github.ref_name }} - path: dist/* - if-no-files-found: error - - - name: Publish Forgejo release - if: startsWith(github.ref, 'refs/tags/') - shell: bash - env: - RELEASE_TAG: ${{ github.ref_name }} - API_URL: ${{ github.api_url }} - REPOSITORY: ${{ github.repository }} - TOKEN: ${{ github.token }} - run: | - set -euo pipefail - chmod +x Scripts/ci/publish-forgejo-release.sh - nix develop .#ci -c Scripts/ci/publish-forgejo-release.sh diff --git a/.github/actions/archive/action.yml b/.github/actions/archive/action.yml index e49eb0d..c34bd3c 100644 --- a/.github/actions/archive/action.yml +++ b/.github/actions/archive/action.yml @@ -26,18 +26,18 @@ runs: run: | echo "${{ inputs.app-store-key }}" > AuthKey_${{ inputs.app-store-key-id }}.p8 - xcodebuild clean archive \ + xcodebuild archive \ -allowProvisioningUpdates \ -allowProvisioningDeviceRegistration \ - -skipPackagePluginValidation \ - -skipMacroValidation \ - -onlyUsePackageVersionsFromResolvedFile \ -authenticationKeyID ${{ inputs.app-store-key-id }} \ -authenticationKeyIssuerID ${{ inputs.app-store-key-issuer-id }} \ -authenticationKeyPath "${PWD}/AuthKey_${{ inputs.app-store-key-id }}.p8" \ + -onlyUsePackageVersionsFromResolvedFile \ -scheme '${{ inputs.scheme }}' \ -destination '${{ inputs.destination }}' \ -archivePath '${{ inputs.archive-path }}' \ -resultBundlePath BuildResults.xcresult + ./Tools/xcresulttool-github BuildResults.xcresult + rm -rf AuthKey_${{ inputs.app-store-key-id }}.p8 diff --git a/.github/actions/build-for-testing/action.yml b/.github/actions/build-for-testing/action.yml index 185c4ab..ce91b43 100644 --- a/.github/actions/build-for-testing/action.yml +++ b/.github/actions/build-for-testing/action.yml @@ -18,36 +18,32 @@ inputs: runs: using: composite steps: - - name: Xcode Cache + - name: Cache Swift Packages uses: actions/cache@v3 with: path: | Apple/PackageCache Apple/SourcePackages - Apple/DerivedData key: ${{ runner.os }}-${{ inputs.scheme }}-${{ hashFiles('**/Package.resolved') }} restore-keys: | - ${{ runner.os }}-${{ inputs.scheme }}-${{ hashFiles('**/Package.resolved') }} ${{ runner.os }}-${{ inputs.scheme }}- - ${{ runner.os }}- - name: Build shell: bash working-directory: Apple run: | echo "${{ inputs.app-store-key }}" > AuthKey_${{ inputs.app-store-key-id }}.p8 - xcodebuild build-for-testing \ + xcodebuild clean build-for-testing \ -allowProvisioningUpdates \ -allowProvisioningDeviceRegistration \ - -skipPackagePluginValidation \ - -skipMacroValidation \ - -onlyUsePackageVersionsFromResolvedFile \ -authenticationKeyID ${{ inputs.app-store-key-id }} \ -authenticationKeyIssuerID ${{ inputs.app-store-key-issuer-id }} \ -authenticationKeyPath "${PWD}/AuthKey_${{ inputs.app-store-key-id }}.p8" \ + -onlyUsePackageVersionsFromResolvedFile \ -clonedSourcePackagesDirPath SourcePackages \ -packageCachePath $PWD/PackageCache \ - -derivedDataPath $PWD/DerivedData \ + -skipPackagePluginValidation \ + -skipMacroValidation \ -scheme '${{ inputs.scheme }}' \ -destination '${{ inputs.destination }}' \ -resultBundlePath BuildResults.xcresult diff --git a/.github/actions/download-profiles/action.yml b/.github/actions/download-profiles/action.yml deleted file mode 100644 index 32b615c..0000000 --- a/.github/actions/download-profiles/action.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Download Provisioning Profiles -inputs: - app-store-key: - description: App Store key in PEM PKCS#8 format - required: true - app-store-key-id: - description: App Store key ID - required: true - app-store-key-issuer-id: - description: App Store key issuer ID - required: true -runs: - using: composite - steps: - - shell: bash - env: - FASTLANE_OPT_OUT_USAGE: 'YES' - run: | - APP_STORE_KEY=$(echo "${{ inputs.app-store-key }}" | jq -sR .) - cat << EOF > api-key.json - { - "key_id": "${{ inputs.app-store-key-id }}", - "issuer_id": "${{ inputs.app-store-key-issuer-id }}", - "key": $APP_STORE_KEY - } - EOF - - fastlane sigh download_all --api_key_path api-key.json - - rm -rf api-key.json diff --git a/.github/actions/export/action.yml b/.github/actions/export/action.yml index 75b748f..bf007a7 100644 --- a/.github/actions/export/action.yml +++ b/.github/actions/export/action.yml @@ -1,4 +1,4 @@ -name: Export +name: Notarize inputs: app-store-key: description: App Store key in PEM PKCS#8 format @@ -12,8 +12,11 @@ inputs: archive-path: description: Xcode archive path required: true - export-options: - description: The export options in JSON format + destination: + description: The Xcode export destination. This can either be "export" or "upload" + required: true + method: + description: The Xcode export method. This can be one of app-store, validation, ad-hoc, package, enterprise, development, developer-id, or mac-application. required: true export-path: description: The path to export the archive to @@ -21,20 +24,19 @@ inputs: runs: using: composite steps: - - shell: bash + - id: notarize + shell: bash working-directory: Apple run: | echo "${{ inputs.app-store-key }}" > AuthKey_${{ inputs.app-store-key-id }}.p8 - echo '${{ inputs.export-options }}' | plutil -convert xml1 -o ExportOptions.plist - + echo '{"destination":"${{ inputs.destination }}","method":"${{ inputs.method }}"}' \ + | plutil -convert xml1 -o ExportOptions.plist - xcodebuild \ -exportArchive \ -allowProvisioningUpdates \ -allowProvisioningDeviceRegistration \ - -skipPackagePluginValidation \ - -skipMacroValidation \ - -onlyUsePackageVersionsFromResolvedFile \ -authenticationKeyID ${{ inputs.app-store-key-id }} \ -authenticationKeyIssuerID ${{ inputs.app-store-key-issuer-id }} \ -authenticationKeyPath "${PWD}/AuthKey_${{ inputs.app-store-key-id }}.p8" \ diff --git a/.github/actions/notarize/action.yml b/.github/actions/notarize/action.yml deleted file mode 100644 index efd2159..0000000 --- a/.github/actions/notarize/action.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Notarize -inputs: - app-store-key: - description: App Store key in PEM PKCS#8 format - required: true - app-store-key-id: - description: App Store key ID - required: true - app-store-key-issuer-id: - description: App Store key issuer ID - required: true -runs: - using: composite - steps: - - id: notarize - shell: bash - working-directory: Apple - run: | - echo "${{ inputs.app-store-key }}" > AuthKey_${{ inputs.app-store-key-id }}.p8 - - ditto -c -k --keepParent Release/Burrow.app Upload.zip - xcrun notarytool submit --wait --issuer ${{ inputs.app-store-key-issuer-id }} --key-id ${{ inputs.app-store-key-id }} --key "${PWD}/AuthKey_${{ inputs.app-store-key-id }}.p8" Upload.zip - xcrun stapler staple Release/Burrow.app - - rm -rf AuthKey_${{ inputs.app-store-key-id }}.p8 Release diff --git a/.github/actions/test-without-building/action.yml b/.github/actions/test-without-building/action.yml index a097d4a..5903d07 100644 --- a/.github/actions/test-without-building/action.yml +++ b/.github/actions/test-without-building/action.yml @@ -18,6 +18,9 @@ inputs: runs: using: composite steps: + - shell: bash + id: vars + run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - shell: bash working-directory: Apple run: | @@ -25,10 +28,10 @@ runs: -scheme '${{ inputs.scheme }}' \ -destination '${{ inputs.destination }}' \ ${{ inputs.test-plan && '-testPlan ' }}${{ inputs.test-plan }} \ - -resultBundlePath "${{ inputs.artifact-prefix }}.xcresult" + -resultBundlePath "${{ inputs.artifact-prefix }}-${{ steps.vars.outputs.sha_short }}.xcresult" - uses: kishikawakatsumi/xcresulttool@v1 if: always() with: - path: Apple/${{ inputs.artifact-prefix }}.xcresult + path: Apple/${{ inputs.artifact-prefix }}-${{ steps.vars.outputs.sha_short }}.xcresult title: ${{ inputs.check-name }} show-passed-tests: false diff --git a/.github/workflows/build-appimage.yml b/.github/workflows/build-appimage.yml index bd29b07..ef5c525 100644 --- a/.github/workflows/build-appimage.yml +++ b/.github/workflows/build-appimage.yml @@ -1,14 +1,8 @@ name: Build AppImage on: push: - branches: - - main + branches: [main] pull_request: - branches: - - "*" -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true jobs: appimage: name: Build AppImage @@ -23,7 +17,7 @@ jobs: docker cp temp:/app/burrow-gtk/build-appimage/Burrow-x86_64.AppImage . docker rm temp - uses: actions/upload-artifact@v4 - name: Upload to GitHub with: name: AppImage path: Burrow-x86_64.AppImage + diff --git a/.github/workflows/build-apple.yml b/.github/workflows/build-apple.yml index 5a135b4..da0f56a 100644 --- a/.github/workflows/build-apple.yml +++ b/.github/workflows/build-apple.yml @@ -1,7 +1,7 @@ -name: Build Apple Apps +name: Apple Build on: push: - branches: + branches: - main pull_request: branches: @@ -12,7 +12,7 @@ concurrency: jobs: build: name: Build App (${{ matrix.platform }}) - runs-on: macos-14 + runs-on: macos-13 strategy: fail-fast: false matrix: @@ -24,7 +24,7 @@ jobs: rust-targets: - aarch64-apple-ios - scheme: App - destination: platform=iOS Simulator,OS=18.0,name=iPhone 15 Pro + destination: platform=iOS Simulator,OS=17.2,name=iPhone 15 Pro platform: iOS Simulator sdk-name: iphonesimulator rust-targets: @@ -38,8 +38,7 @@ jobs: - x86_64-apple-darwin - aarch64-apple-darwin env: - DEVELOPER_DIR: /Applications/Xcode_16.0.app/Contents/Developer - PROTOC_PATH: /opt/homebrew/bin/protoc + DEVELOPER_DIR: /Applications/Xcode_15.2.app/Contents/Developer steps: - name: Checkout uses: actions/checkout@v3 @@ -54,11 +53,8 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@stable with: - toolchain: 1.85.0 + toolchain: stable targets: ${{ join(matrix.rust-targets, ', ') }} - - name: Install Protobuf - shell: bash - run: brew install protobuf - name: Build id: build uses: ./.github/actions/build-for-testing @@ -68,7 +64,7 @@ jobs: app-store-key: ${{ secrets.APPSTORE_KEY }} app-store-key-id: ${{ secrets.APPSTORE_KEY_ID }} app-store-key-issuer-id: ${{ secrets.APPSTORE_KEY_ISSUER_ID }} - - name: Run Unit Tests + - name: Xcode Unit Test if: ${{ matrix.xcode-unit-test != '' }} continue-on-error: true uses: ./.github/actions/test-without-building @@ -78,7 +74,7 @@ jobs: test-plan: ${{ matrix.xcode-unit-test }} artifact-prefix: unit-tests-${{ matrix.sdk-name }} check-name: Xcode Unit Tests (${{ matrix.platform }}) - - name: Run UI Tests + - name: Xcode UI Test if: ${{ matrix.xcode-ui-test != '' }} continue-on-error: true uses: ./.github/actions/test-without-building diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 6a3dae1..1ce7a9a 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -6,9 +6,6 @@ on: pull_request: branches: - "*" -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true jobs: build: name: Build Docker Image @@ -36,7 +33,6 @@ jobs: images: ghcr.io/${{ github.repository }} tags: | type=sha - type=match,pattern=builds/(.*),group=1 type=raw,value=latest,enable={{is_default_branch}} - name: Build and Push uses: docker/build-push-action@v4 diff --git a/.github/workflows/build-rpm.yml b/.github/workflows/build-rpm.yml index 029bf16..fd5837c 100644 --- a/.github/workflows/build-rpm.yml +++ b/.github/workflows/build-rpm.yml @@ -1,11 +1,16 @@ -on: workflow_dispatch name: Build RPM +on: + push: + branches: [ "main" ] + pull_request: + branches: + - "*" jobs: build: name: Build RPM runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 - name: Install RPM run: cargo install cargo-generate-rpm @@ -15,3 +20,4 @@ jobs: strip -s target/release/burrow - name: Build RPM run: cargo generate-rpm -p burrow + diff --git a/.github/workflows/build-rust.yml b/.github/workflows/build-rust.yml index cbbdd81..4c3782a 100644 --- a/.github/workflows/build-rust.yml +++ b/.github/workflows/build-rust.yml @@ -1,4 +1,4 @@ -name: Build Rust Crate +name: Rust Build on: push: branches: @@ -6,9 +6,6 @@ on: pull_request: branches: - "*" -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true jobs: build: name: Build Crate (${{ matrix.platform }}) @@ -24,21 +21,15 @@ jobs: - x86_64-unknown-linux-gnu targets: - aarch64-unknown-linux-gnu - - os: macos-13 - platform: macOS (Intel) - xcode: /Applications/Xcode_15.2.app + - os: macos-12 + platform: macOS test-targets: - x86_64-apple-darwin targets: - - x86_64-apple-ios - - os: macos-14 - platform: macOS - xcode: /Applications/Xcode_16.0.app - test-targets: - aarch64-apple-darwin - targets: - aarch64-apple-ios - aarch64-apple-ios-sim + - x86_64-apple-ios - os: windows-2022 platform: Windows test-targets: @@ -47,11 +38,10 @@ jobs: - aarch64-pc-windows-msvc runs-on: ${{ matrix.os }} env: - DEVELOPER_DIR: ${{ matrix.xcode }}/Contents/Developer + DEVELOPER_DIR: /Applications/Xcode_14.2.app/Contents/Developer CARGO_INCREMENTAL: 0 CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc RUST_BACKTRACE: short - PROTOC_VERSION: 3.25.1 steps: - name: Checkout uses: actions/checkout@v3 @@ -64,25 +54,21 @@ jobs: run: | sudo apt-get update sudo apt-get install -y ${{ join(matrix.packages, ' ') }} - - name: Configure LLVM + - name: Install Windows Deps if: matrix.os == 'windows-2022' shell: bash run: echo "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Tools\Llvm\x64\bin" >> $GITHUB_PATH - - name: Install protoc - uses: taiki-e/install-action@v2 - with: - tool: protoc@${{ env.PROTOC_VERSION }} - name: Install Rust uses: dtolnay/rust-toolchain@stable with: - toolchain: 1.85.0 + toolchain: stable components: rustfmt targets: ${{ join(matrix.targets, ', ') }} - name: Setup Rust Cache uses: Swatinem/rust-cache@v2 - name: Build shell: bash - run: cargo build --locked --verbose --workspace --all-features --target ${{ join(matrix.targets, ' --target ') }} --target ${{ join(matrix.test-targets, ' --target ') }} + run: cargo build --verbose --workspace --all-features --target ${{ join(matrix.targets, ' --target ') }} --target ${{ join(matrix.test-targets, ' --target ') }} - name: Test shell: bash - run: cargo test --locked --verbose --workspace --all-features --target ${{ join(matrix.test-targets, ' --target ') }} + run: cargo test --verbose --workspace --all-features --target ${{ join(matrix.test-targets, ' --target ') }} diff --git a/.github/workflows/lint-git.yml b/.github/workflows/lint-git.yml index 2f7c72e..aefe199 100644 --- a/.github/workflows/lint-git.yml +++ b/.github/workflows/lint-git.yml @@ -8,14 +8,13 @@ jobs: name: Git Lint runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: 0 - - name: Install - shell: bash - run: python -m pip install gitlint - - name: Lint - shell: bash - run: gitlint --commits "${{ github.event.pull_request.base.sha }}..HEAD" + - uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + fetch-depth: 0 + - name: Install Gitlint + shell: bash + run: python -m pip install gitlint + - name: Run Gitlint + shell: bash + run: gitlint --commits "${{ github.event.pull_request.base.sha }}..HEAD" diff --git a/.github/workflows/lint-governance.yml b/.github/workflows/lint-governance.yml deleted file mode 100644 index 08b665c..0000000 --- a/.github/workflows/lint-governance.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Governance Lint - -on: - pull_request: - branches: - - "*" - -jobs: - governance: - name: BEP Metadata - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: 0 - - - name: Validate BEP metadata - shell: bash - run: | - set -euo pipefail - python3 Scripts/check-bep-metadata.py diff --git a/.github/workflows/lint-swift.yml b/.github/workflows/lint-swift.yml index 857f575..7e62afd 100644 --- a/.github/workflows/lint-swift.yml +++ b/.github/workflows/lint-swift.yml @@ -1,5 +1,8 @@ name: Swift Lint on: + push: + branches: + - main pull_request: branches: - "*" @@ -11,6 +14,8 @@ jobs: image: ghcr.io/realm/swiftlint:latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v3 + with: + ssh-key: ${{ secrets.DEPLOY_KEY }} - name: Lint - run: swiftlint lint --strict --reporter github-actions-logging + run: swiftlint lint --reporter github-actions-logging diff --git a/.github/workflows/release-apple.yml b/.github/workflows/release-apple.yml index b36ed73..3ea185d 100644 --- a/.github/workflows/release-apple.yml +++ b/.github/workflows/release-apple.yml @@ -1,120 +1,65 @@ -name: Release (Apple) +name: Build Apple Release on: release: types: - created jobs: build: - name: Build ${{ matrix.platform }} Release - runs-on: macos-14 - permissions: - contents: write + name: Build ${{ matrix.configuration['platform'] }} Release + runs-on: macos-13 strategy: fail-fast: false matrix: - include: - - platform: iOS - rust-targets: - - aarch64-apple-ios - - platform: macOS - rust-targets: - - x86_64-apple-darwin - - aarch64-apple-darwin + configuration: + - scheme: App (iOS) + destination: generic/platform=iOS + platform: iOS + method: ad-hoc + artifact-file: Apple/Release/Burrow.ipa + - scheme: App (macOS) + destination: generic/platform=macOS + platform: macOS + method: mac-application + artifact-file: Burrow.app.txz env: - DEVELOPER_DIR: /Applications/Xcode_16.0.app/Contents/Developer - PROTOC_PATH: /opt/homebrew/bin/protoc + DEVELOPER_DIR: /Applications/Xcode_15.2.app/Contents/Developer steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v3 with: - fetch-depth: 0 + ssh-key: ${{ secrets.DEPLOY_KEY }} + submodules: recursive - name: Import Certificate uses: ./.github/actions/import-cert with: certificate: ${{ secrets.DEVELOPER_CERT }} password: ${{ secrets.DEVELOPER_CERT_PASSWORD }} - - name: Download Provisioning Profiles - uses: ./.github/actions/download-profiles - with: - app-store-key: ${{ secrets.APPSTORE_KEY }} - app-store-key-id: ${{ secrets.APPSTORE_KEY_ID }} - app-store-key-issuer-id: ${{ secrets.APPSTORE_KEY_ISSUER_ID }} - - name: Install Provisioning Profiles - shell: bash - run: | - mkdir -p ~/Library/MobileDevice/Provisioning\ Profiles/ - cp -f Apple/Profiles/* ~/Library/MobileDevice/Provisioning\ Profiles/ - - name: Install Rust - uses: dtolnay/rust-toolchain@stable - with: - toolchain: 1.85.0 - targets: ${{ join(matrix.rust-targets, ', ') }} - - name: Install Protobuf - shell: bash - run: brew install protobuf - - name: Configure Version - id: version - shell: bash - run: echo "BUILD_NUMBER=$(Tools/version.sh)" >> $GITHUB_OUTPUT - name: Archive uses: ./.github/actions/archive with: - scheme: App - destination: generic/platform=${{ matrix.platform }} + scheme: ${{ matrix.configuration['scheme'] }} + destination: ${{ matrix.configuration['destination'] }} app-store-key: ${{ secrets.APPSTORE_KEY }} app-store-key-id: ${{ secrets.APPSTORE_KEY_ID }} app-store-key-issuer-id: ${{ secrets.APPSTORE_KEY_ISSUER_ID }} archive-path: Burrow.xcarchive - - name: Export + - name: Export Locally uses: ./.github/actions/export with: - method: ${{ matrix.platform == 'macOS' && 'developer-id' || 'ad-hoc' }} + method: ${{ matrix.configuration['method'] }} destination: export app-store-key: ${{ secrets.APPSTORE_KEY }} app-store-key-id: ${{ secrets.APPSTORE_KEY_ID }} app-store-key-issuer-id: ${{ secrets.APPSTORE_KEY_ISSUER_ID }} archive-path: Burrow.xcarchive - export-options: | - {"teamID":"P6PV2R9443","destination":"export","method":"developer-id","provisioningProfiles":{"com.hackclub.burrow":"Burrow Developer ID","com.hackclub.burrow.network":"Burrow Network Developer ID"},"signingCertificate":"Developer ID Application","signingStyle":"manual"} export-path: Release - - name: Notarize - if: ${{ matrix.platform == 'macOS' }} - uses: ./.github/actions/notarize - with: - app-store-key: ${{ secrets.APPSTORE_KEY }} - app-store-key-id: ${{ secrets.APPSTORE_KEY_ID }} - app-store-key-issuer-id: ${{ secrets.APPSTORE_KEY_ISSUER_ID }} - - name: Compress (iOS) - if: ${{ matrix.platform == 'iOS' }} + - name: Compress + if: ${{ matrix.configuration['platform'] == 'macOS' }} shell: bash - run: | - cp Apple/Release/Burrow.ipa Burrow.ipa - aa archive -a lzma -b 8m -d Apple -subdir Burrow.xcarchive -o Burrow-${{ matrix.platform }}.xcarchive.aar - rm -rf Apple/Release - - name: Compress (macOS) - if: ${{ matrix.platform == 'macOS' }} - shell: bash - run: | - aa archive -a lzma -b 8m -d Apple/Release -subdir Burrow.app -o Burrow.app.aar - aa archive -a lzma -b 8m -d Apple -subdir Burrow.xcarchive -o Burrow-${{ matrix.platform }}.xcarchive.aar - rm -rf Apple/Release - - name: Upload to GitHub - uses: SierraSoftworks/gh-releases@v1.0.7 + run: tar --options xz:compression-level=9 -C Apple/Release -cJf Burrow.app.txz ./ + - name: Attach Artifact + uses: SierraSoftworks/gh-releases@v1.0.6 with: token: ${{ secrets.GITHUB_TOKEN }} - release_tag: ${{ github.ref_name }} - overwrite: 'true' - files: | - ${{ matrix.platform == 'macOS' && 'Burrow.aap.aar' || 'Burrow.ipa' }} - Burrow-${{ matrix.platform }}.xcarchive.aar - - name: Upload to App Store Connect - if: ${{ matrix.platform == 'iOS' }} - uses: ./.github/actions/export - with: - app-store-key: ${{ secrets.APPSTORE_KEY }} - app-store-key-id: ${{ secrets.APPSTORE_KEY_ID }} - app-store-key-issuer-id: ${{ secrets.APPSTORE_KEY_ISSUER_ID }} - archive-path: Burrow.xcarchive - export-options: | - {"method": "app-store", "destination": "upload"} - export-path: Release + overwrite: 'false' + files: ${{ matrix.configuration['artifact-file'] }} diff --git a/.github/workflows/release-if-needed.yaml b/.github/workflows/release-if-needed.yaml deleted file mode 100644 index 79f0d63..0000000 --- a/.github/workflows/release-if-needed.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: Create Release If Needed -on: - workflow_dispatch: - schedule: - - cron: '0 10 * * *' -concurrency: - group: ${{ github.workflow }} -jobs: - create: - name: Create Release If Needed - runs-on: ubuntu-latest - env: - GH_TOKEN: ${{ github.token }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - shell: bash - run: | - if [[ $(Tools/version.sh status) == "dirty" ]]; then - gh workflow run release-now.yml - fi diff --git a/.github/workflows/release-linux.yml b/.github/workflows/release-linux.yml deleted file mode 100644 index 7db9bcf..0000000 --- a/.github/workflows/release-linux.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Release (Linux) -on: - release: - types: - - created -jobs: - appimage: - name: Build AppImage - runs-on: ubuntu-latest - container: docker - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Build AppImage - run: | - docker build -t appimage-builder . -f burrow-gtk/build-aux/Dockerfile - docker create --name temp appimage-builder - docker cp temp:/app/burrow-gtk/build-appimage/Burrow-x86_64.AppImage . - docker rm temp - - name: Attach Artifacts - uses: SierraSoftworks/gh-releases@v1.0.7 - with: - token: ${{ secrets.GITHUB_TOKEN }} - release_tag: ${{ github.ref_name }} - overwrite: "true" - files: | - Burrow-x86_64.AppImage diff --git a/.github/workflows/release-now.yml b/.github/workflows/release-now.yml deleted file mode 100644 index 229f6c9..0000000 --- a/.github/workflows/release-now.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Create Release -on: workflow_dispatch -concurrency: - group: ${{ github.workflow }} -jobs: - create: - env: - GH_TOKEN: ${{ secrets.GH_RELEASE_TOKEN }} - name: Create Release - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - shell: bash - run: Tools/version.sh increment diff --git a/.gitignore b/.gitignore index 7efe903..dc886ed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,20 +1,8 @@ # Xcode xcuserdata -Apple/build/ - -# Swift -Apple/Package/.swiftpm/ # Rust target/ -.env .DS_STORE .idea/ - -tmp/ -intake/ - -*.db -*.sqlite3 -*.sock diff --git a/.swiftlint.yml b/.swiftlint.yml index 8efc85e..d609718 100644 --- a/.swiftlint.yml +++ b/.swiftlint.yml @@ -30,6 +30,7 @@ opt_in_rules: - function_default_parameter_at_end - ibinspectable_in_extension - identical_operands +- implicitly_unwrapped_optional - indentation_width - joined_default_parameter - last_where @@ -45,6 +46,7 @@ opt_in_rules: - multiline_parameters - multiline_parameters_brackets - no_extension_access_modifier +- no_grouping_extension - nslocalizedstring_key - nslocalizedstring_require_bundle - number_separator @@ -74,7 +76,9 @@ opt_in_rules: - sorted_first_last - sorted_imports - static_operator +- strict_fileprivate - strong_iboutlet +- switch_case_on_newline - test_case_accessibility - toggle_bool - trailing_closure @@ -93,5 +97,3 @@ disabled_rules: - force_try - nesting - todo -- trailing_comma -- switch_case_on_newline diff --git a/.vscode/settings.json b/.vscode/settings.json index eb85504..3c714be 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,26 +1,18 @@ { - "files.autoSave": "onFocusChange", - "files.defaultLanguage": "rust", - "editor.formatOnPaste": true, - "editor.formatOnSave": true, - "files.trimTrailingWhitespace": true, - "editor.suggest.preview": true, - "editor.acceptSuggestionOnEnter": "on", - "rust-analyzer.restartServerOnConfigChange": true, - "rust-analyzer.cargo.features": "all", - "rust-analyzer.rustfmt.extraArgs": ["+nightly"], - "[rust]": { - "editor.defaultFormatter": "rust-lang.rust-analyzer" - }, - "rust-analyzer.inlayHints.typeHints.enable": false, - "rust-analyzer.linkedProjects": [ - "./burrow/Cargo.toml" - ], - "[yaml]": { - "editor.insertSpaces": true, - "editor.tabSize": 2, - "editor.autoIndent": "advanced", - "diffEditor.ignoreTrimWhitespace": false, - "editor.formatOnSave": false - } -} + "files.autoSave": "onFocusChange", + "files.defaultLanguage": "rust", + "editor.formatOnPaste": true, + "editor.formatOnSave": true, + "files.trimTrailingWhitespace": true, + "editor.suggest.preview": true, + "editor.acceptSuggestionOnEnter": "on", + "rust-analyzer.restartServerOnConfigChange": true, + "rust-analyzer.cargo.features": "all", + "rust-analyzer.rustfmt.extraArgs": [ + "+nightly" + ], + "[rust]": { + "editor.defaultFormatter": "rust-lang.rust-analyzer", + }, + "rust-analyzer.inlayHints.typeHints.enable": false +} \ No newline at end of file diff --git a/AGENTS.md b/AGENTS.md deleted file mode 100644 index 0ca7ced..0000000 --- a/AGENTS.md +++ /dev/null @@ -1,14 +0,0 @@ -# instructions for agents - -1. Spell the project name as `Burrow` in user-facing copy and `burrow` in code, package, and protocol identifiers unless an existing integration requires a different literal. -2. Read [CONSTITUTION.md](CONSTITUTION.md) before changing Apple clients, the daemon, the control plane, forge infrastructure, identity, or security-sensitive code. -3. Anchor non-trivial changes in a Burrow Evolution Proposal (BEP) under [evolution/](evolution/README.md) so future contributors can inherit the rationale, safeguards, and rollout shape. -4. Before touching the Apple app, daemon IPC, or Tailnet flows, review: - - [evolution/proposals/BEP-0002-control-plane-bootstrap-and-local-auth.md](evolution/proposals/BEP-0002-control-plane-bootstrap-and-local-auth.md) - - [evolution/proposals/BEP-0003-connect-ip-and-negotiation-roadmap.md](evolution/proposals/BEP-0003-connect-ip-and-negotiation-roadmap.md) - - [evolution/proposals/BEP-0005-daemon-ipc-and-apple-boundary.md](evolution/proposals/BEP-0005-daemon-ipc-and-apple-boundary.md) - - [evolution/proposals/BEP-0006-tailnet-authority-first-control-plane.md](evolution/proposals/BEP-0006-tailnet-authority-first-control-plane.md) -5. Apple clients must talk only to the daemon over gRPC. Do not add direct HTTP, control-plane, or helper-process calls from Swift UI code. -6. Treat Tailnet as one protocol family. Tailscale-managed and self-hosted Headscale-style deployments differ by authority, policy, and auth details, not by a separate user-facing protocol surface. -7. Maintain canonical identity and operator metadata in [contributors.nix](contributors.nix). If Burrow forge, Authentik, Headscale, or admin/group mappings need to change, edit that registry first and derive runtime configuration from it. -8. When process or architecture is unclear, stop and draft or update a BEP instead of improvising durable behavior in code. diff --git a/Apple/App/App-iOS.entitlements b/Apple/App/App-iOS.entitlements index 53fcbb7..02ee960 100644 --- a/Apple/App/App-iOS.entitlements +++ b/Apple/App/App-iOS.entitlements @@ -2,11 +2,6 @@ - com.apple.developer.associated-domains - - applinks:burrow.rs?mode=developer - webcredentials:burrow.rs?mode=developer - com.apple.developer.networking.networkextension packet-tunnel-provider diff --git a/Apple/App/App-macOS.entitlements b/Apple/App/App-macOS.entitlements index 53fcbb7..02ee960 100644 --- a/Apple/App/App-macOS.entitlements +++ b/Apple/App/App-macOS.entitlements @@ -2,11 +2,6 @@ - com.apple.developer.associated-domains - - applinks:burrow.rs?mode=developer - webcredentials:burrow.rs?mode=developer - com.apple.developer.networking.networkextension packet-tunnel-provider diff --git a/Apple/App/App.xcconfig b/Apple/App/App.xcconfig index 4e42ddc..1d63205 100644 --- a/Apple/App/App.xcconfig +++ b/Apple/App/App.xcconfig @@ -11,12 +11,7 @@ INFOPLIST_KEY_UIStatusBarStyle[sdk=iphone*] = UIStatusBarStyleDefault INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad[sdk=iphone*] = UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone[sdk=iphone*] = UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight TARGETED_DEVICE_FAMILY[sdk=iphone*] = 1,2 -EXCLUDED_SOURCE_FILE_NAMES = MainMenu.xib -EXCLUDED_SOURCE_FILE_NAMES[sdk=macosx*] = -INFOPLIST_KEY_LSUIElement[sdk=macosx*] = YES -INFOPLIST_KEY_NSMainNibFile[sdk=macosx*] = MainMenu -INFOPLIST_KEY_NSPrincipalClass[sdk=macosx*] = NSApplication INFOPLIST_KEY_LSApplicationCategoryType[sdk=macosx*] = public.app-category.utilities CODE_SIGN_ENTITLEMENTS = App/App-iOS.entitlements diff --git a/Apple/App/AppDelegate.swift b/Apple/App/AppDelegate.swift index c3cb4cb..f42b52f 100644 --- a/Apple/App/AppDelegate.swift +++ b/Apple/App/AppDelegate.swift @@ -1,13 +1,9 @@ #if os(macOS) import AppKit -import BurrowUI import SwiftUI -@main @MainActor class AppDelegate: NSObject, NSApplicationDelegate { - private var windowController: NSWindowController? - private let quitItem: NSMenuItem = { let quitItem = NSMenuItem( title: "Quit Burrow", @@ -19,19 +15,8 @@ class AppDelegate: NSObject, NSApplicationDelegate { return quitItem }() - private lazy var openItem: NSMenuItem = { - let item = NSMenuItem( - title: "Open Burrow", - action: #selector(openWindow), - keyEquivalent: "o" - ) - item.target = self - item.keyEquivalentModifierMask = .command - return item - }() - private let toggleItem: NSMenuItem = { - let toggleView = NSHostingView(rootView: MenuItemToggleView()) + let toggleView = NSHostingView(rootView: MenuItemToggleView(tunnel: BurrowApp.tunnel)) toggleView.frame.size = CGSize(width: 300, height: 32) toggleView.autoresizingMask = [.width] @@ -44,7 +29,6 @@ class AppDelegate: NSObject, NSApplicationDelegate { let menu = NSMenu() menu.items = [ toggleItem, - openItem, .separator(), quitItem ] @@ -55,7 +39,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { let statusBar = NSStatusBar.system let statusItem = statusBar.statusItem(withLength: NSStatusItem.squareLength) if let button = statusItem.button { - button.image = NSImage(systemSymbolName: "pipe.and.drop.fill", accessibilityDescription: nil) + button.image = NSImage(systemSymbolName: "network.badge.shield.half.filled", accessibilityDescription: nil) } return statusItem }() @@ -63,28 +47,5 @@ class AppDelegate: NSObject, NSApplicationDelegate { func applicationDidFinishLaunching(_ notification: Notification) { statusItem.menu = menu } - - @objc - private func openWindow() { - if let window = windowController?.window { - window.makeKeyAndOrderFront(nil) - NSApplication.shared.activate(ignoringOtherApps: true) - return - } - - let contentView = BurrowView() - let hostingController = NSHostingController(rootView: contentView) - let window = NSWindow(contentViewController: hostingController) - window.title = "Burrow" - window.setContentSize(NSSize(width: 820, height: 720)) - window.styleMask.insert([.titled, .closable, .miniaturizable, .resizable]) - window.center() - - let controller = NSWindowController(window: window) - controller.shouldCascadeWindows = true - controller.showWindow(nil) - windowController = controller - NSApplication.shared.activate(ignoringOtherApps: true) - } } #endif diff --git a/Apple/UI/Assets.xcassets/AccentColor.colorset/Contents.json b/Apple/App/Assets.xcassets/AccentColor.colorset/Contents.json similarity index 100% rename from Apple/UI/Assets.xcassets/AccentColor.colorset/Contents.json rename to Apple/App/Assets.xcassets/AccentColor.colorset/Contents.json diff --git a/Apple/App/Assets.xcassets/AppIcon.appiconset/Contents.json b/Apple/App/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..532cd72 --- /dev/null +++ b/Apple/App/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,63 @@ +{ + "images" : [ + { + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "16x16" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "16x16" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "32x32" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "32x32" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "128x128" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "128x128" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "256x256" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "256x256" + }, + { + "idiom" : "mac", + "scale" : "1x", + "size" : "512x512" + }, + { + "idiom" : "mac", + "scale" : "2x", + "size" : "512x512" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/Apple/UI/Assets.xcassets/Contents.json b/Apple/App/Assets.xcassets/Contents.json similarity index 100% rename from Apple/UI/Assets.xcassets/Contents.json rename to Apple/App/Assets.xcassets/Contents.json diff --git a/Apple/App/BurrowApp.swift b/Apple/App/BurrowApp.swift index 838ef54..e8aed86 100644 --- a/Apple/App/BurrowApp.swift +++ b/Apple/App/BurrowApp.swift @@ -1,14 +1,21 @@ -#if !os(macOS) -import BurrowUI import SwiftUI -@MainActor @main +@MainActor struct BurrowApp: App { + static let tunnel = Tunnel { manager, proto in + proto.serverAddress = "hackclub.com" + manager.localizedDescription = "Burrow" + } + + #if os(macOS) + @NSApplicationDelegateAdaptor(AppDelegate.self) + var delegate + #endif + var body: some Scene { WindowGroup { - BurrowView() + TunnelView(tunnel: Self.tunnel) } } } -#endif diff --git a/Apple/App/MainMenu.xib b/Apple/App/MainMenu.xib deleted file mode 100644 index 50ba431..0000000 --- a/Apple/App/MainMenu.xib +++ /dev/null @@ -1,679 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Default - - - - - - - Left to Right - - - - - - - Right to Left - - - - - - - - - - - Default - - - - - - - Left to Right - - - - - - - Right to Left - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Apple/App/Menu/MenuView.swift b/Apple/App/Menu/MenuView.swift new file mode 100644 index 0000000..eab8da2 --- /dev/null +++ b/Apple/App/Menu/MenuView.swift @@ -0,0 +1,60 @@ +// +// MenuView.swift +// App +// +// Created by Thomas Stubblefield on 5/13/23. +// + +import SwiftUI + +struct MenuItemToggleView: View { + var tunnel: Tunnel + + var body: some View { + HStack { + Text("Burrow") + .font(.headline) + Spacer() + Toggle("Burrow", isOn: tunnel.isOn) + .labelsHidden() + .disabled(tunnel.isDisabled) + .toggleStyle(.switch) + } + .padding(.horizontal, 4) + .padding(10) + .frame(minWidth: 300, minHeight: 32, maxHeight: 32) + } +} + +extension Tunnel { + var isDisabled: Bool { + switch self.status { + case .disconnected, .permissionRequired, .connected: + return false + case .unknown, .disabled, .connecting, .reasserting, .disconnecting, .invalid, .configurationReadWriteFailed: + return true + } + } + + var isOn: Binding { + Binding { + switch self.status { + case .connecting, .reasserting, .connected: + true + default: + false + } + } set: { newValue in + switch (self.status, newValue) { + case (.permissionRequired, true): + Task { try await self.configure() } + case (.disconnected, true): + try? self.start() + case (.connected, false): + self.stop() + default: + return + } + } + } +} diff --git a/Apple/UI/NetworkExtension+Async.swift b/Apple/App/NetworkExtension+Async.swift similarity index 82% rename from Apple/UI/NetworkExtension+Async.swift rename to Apple/App/NetworkExtension+Async.swift index 5820e7f..4833efb 100644 --- a/Apple/UI/NetworkExtension+Async.swift +++ b/Apple/App/NetworkExtension+Async.swift @@ -1,6 +1,6 @@ import NetworkExtension -extension NEVPNManager: @unchecked @retroactive Sendable { +extension NEVPNManager { func remove() async throws { _ = try await withUnsafeThrowingContinuation { continuation in removeFromPreferences(completionHandler: completion(continuation)) @@ -14,7 +14,7 @@ extension NEVPNManager: @unchecked @retroactive Sendable { } } -extension NETunnelProviderManager: @unchecked @retroactive Sendable { +extension NETunnelProviderManager { class var managers: [NETunnelProviderManager] { get async throws { try await withUnsafeThrowingContinuation { continuation in @@ -34,7 +34,7 @@ private func completion(_ continuation: UnsafeContinuation) -> (Err } } -private func completion(_ continuation: UnsafeContinuation) -> (T?, Error?) -> Void { +private func completion(_ continuation: UnsafeContinuation) -> (T?, Error?) -> Void { return { value, error in if let error { continuation.resume(throwing: error) diff --git a/Apple/App/Status.swift b/Apple/App/Status.swift new file mode 100644 index 0000000..c08cdd1 --- /dev/null +++ b/Apple/App/Status.swift @@ -0,0 +1,42 @@ +import Foundation +import NetworkExtension + +extension Tunnel { + enum Status: CustomStringConvertible, Equatable, Hashable { + case unknown + case permissionRequired + case disabled + case connecting + case connected(Date) + case disconnecting + case disconnected + case reasserting + case invalid + case configurationReadWriteFailed + + var description: String { + switch self { + case .unknown: + return "Unknown" + case .permissionRequired: + return "Permission Required" + case .disconnected: + return "Disconnected" + case .disabled: + return "Disabled" + case .connecting: + return "Connecting" + case .connected: + return "Connected" + case .disconnecting: + return "Disconnecting" + case .reasserting: + return "Reasserting" + case .invalid: + return "Invalid" + case .configurationReadWriteFailed: + return "System Error" + } + } + } +} diff --git a/Apple/App/Tunnel.swift b/Apple/App/Tunnel.swift new file mode 100644 index 0000000..5542170 --- /dev/null +++ b/Apple/App/Tunnel.swift @@ -0,0 +1,146 @@ +import BurrowShared +import NetworkExtension +import SwiftUI + +@Observable +class Tunnel { + private(set) var status: Status = .unknown + private var error: NEVPNError? + + private let logger = Logger.logger(for: Tunnel.self) + private let bundleIdentifier: String + private let configure: (NETunnelProviderManager, NETunnelProviderProtocol) -> Void + private var tasks: [Task] = [] + + // Each manager corresponds to one entry in the Settings app. + // Our goal is to maintain a single manager, so we create one if none exist and delete extra if there are any. + private var managers: [NEVPNManager]? { + didSet { status = currentStatus } + } + + private var currentStatus: Status { + guard let managers = managers else { + guard let error = error else { + return .unknown + } + + switch error.code { + case .configurationReadWriteFailed: + return .configurationReadWriteFailed + default: + return .unknown + } + } + + guard let manager = managers.first else { + return .permissionRequired + } + + guard manager.isEnabled else { + return .disabled + } + + return manager.connection.tunnelStatus + } + + convenience init(configure: @escaping (NETunnelProviderManager, NETunnelProviderProtocol) -> Void) { + self.init("com.hackclub.burrow.network", configure: configure) + } + + init(_ bundleIdentifier: String, configure: @escaping (NETunnelProviderManager, NETunnelProviderProtocol) -> Void) { + self.bundleIdentifier = bundleIdentifier + self.configure = configure + + let center = NotificationCenter.default + let configurationChanged = Task { + for try await _ in center.notifications(named: .NEVPNConfigurationChange).map({ _ in () }) { + await update() + } + } + let statusChanged = Task { + for try await _ in center.notifications(named: .NEVPNStatusDidChange).map({ _ in () }) { + await MainActor.run { + status = currentStatus + } + } + } + tasks = [configurationChanged, statusChanged] + + Task { await update() } + } + + private func update() async { + do { + let updated = try await NETunnelProviderManager.managers + await MainActor.run { + managers = updated + } + } catch let vpnError as NEVPNError { + error = vpnError + } catch { + logger.error("Failed to update VPN configurations: \(error)") + } + } + + func configure() async throws { + if managers == nil { + await update() + } + + guard let managers = managers else { return } + + if managers.count > 1 { + try await withThrowingTaskGroup(of: Void.self, returning: Void.self) { group in + for manager in managers.suffix(from: 1) { + group.addTask { try await manager.remove() } + } + try await group.waitForAll() + } + } + + if managers.isEmpty { + let manager = NETunnelProviderManager() + let proto = NETunnelProviderProtocol() + proto.providerBundleIdentifier = bundleIdentifier + configure(manager, proto) + + manager.protocolConfiguration = proto + try await manager.save() + } + } + + func start() throws { + guard let manager = managers?.first else { return } + try manager.connection.startVPNTunnel() + } + + func stop() { + guard let manager = managers?.first else { return } + manager.connection.stopVPNTunnel() + } + + deinit { + tasks.forEach { $0.cancel() } + } +} + +extension NEVPNConnection { + var tunnelStatus: Tunnel.Status { + switch status { + case .connected: + .connected(connectedDate!) + case .connecting: + .connecting + case .disconnecting: + .disconnecting + case .disconnected: + .disconnected + case .reasserting: + .reasserting + case .invalid: + .invalid + @unknown default: + .unknown + } + } +} diff --git a/Apple/App/TunnelView.swift b/Apple/App/TunnelView.swift new file mode 100644 index 0000000..dd91603 --- /dev/null +++ b/Apple/App/TunnelView.swift @@ -0,0 +1,34 @@ +import SwiftUI + +struct TunnelView: View { + var tunnel: Tunnel + + var body: some View { + VStack { + Text(verbatim: tunnel.status.description) + switch tunnel.status { + case .connected: + Button("Disconnect", action: stop) + case .permissionRequired: + Button("Allow", action: configure) + case .disconnected: + Button("Start", action: start) + default: + EmptyView() + } + } + .padding() + } + + private func start() { + try? tunnel.start() + } + + private func stop() { + tunnel.stop() + } + + private func configure() { + Task { try await tunnel.configure() } + } +} diff --git a/Apple/AppUITests/BurrowUITests.swift b/Apple/AppUITests/BurrowUITests.swift deleted file mode 100644 index b7d8111..0000000 --- a/Apple/AppUITests/BurrowUITests.swift +++ /dev/null @@ -1,439 +0,0 @@ -import XCTest -import UIKit - -@MainActor -final class BurrowTailnetLoginUITests: XCTestCase { - private enum TailnetLoginMode: String, Decodable { - case tailscale - case discovered - } - - private struct TestConfig: Decodable { - let email: String - let username: String - let password: String - let mode: TailnetLoginMode? - } - - override func setUpWithError() throws { - continueAfterFailure = false - } - - func testTailnetLoginThroughAuthentikWebSession() throws { - let config = try loadTestConfig() - let email = config.email - let username = config.username - let password = config.password - let mode = config.mode ?? .tailscale - let browserIdentity = mode == .tailscale ? email : username - - let app = XCUIApplication() - app.launch() - - let tailnetButton = app.buttons["quick-add-tailnet"] - XCTAssertTrue(tailnetButton.waitForExistence(timeout: 15), "Tailnet add button did not appear") - tailnetButton.tap() - - configureTailnetIfNeeded(in: app, mode: mode) - - let discoveryField = app.textFields["tailnet-discovery-email"] - XCTAssertTrue(discoveryField.waitForExistence(timeout: 10), "Tailnet discovery email field did not appear") - replaceText(in: discoveryField, with: email) - - let serverCard = app.descendants(matching: .any) - .matching(identifier: "tailnet-server-card") - .firstMatch - XCTAssertTrue(serverCard.waitForExistence(timeout: 5), "Tailnet server card did not appear") - - let signInButton = app.buttons["tailnet-start-sign-in"] - XCTAssertTrue(signInButton.waitForExistence(timeout: 10), "Tailnet sign-in button did not appear") - signInButton.tap() - - acceptAuthenticationPromptIfNeeded(in: app, timeout: 20) - - let webSession = webAuthenticationSession() - XCTAssertTrue(webSession.waitForExistence(timeout: 20), "Safari authentication session did not appear") - - signIntoAuthentik(in: webSession, username: browserIdentity, password: password) - - app.activate() - XCTAssertTrue( - waitForTailnetSignedIn(in: app, timeout: 60), - "Tailnet sign-in never reached the running state" - ) - } - - private func configureTailnetIfNeeded(in app: XCUIApplication, mode: TailnetLoginMode) { - guard mode == .discovered else { return } - - openTailnetMenu(in: app) - tapMenuButton(named: "Edit Custom Server", in: app) - - openTailnetMenu(in: app) - tapMenuButton(named: "Show Advanced Settings", in: app) - - let authorityField = app.textFields["tailnet-authority"] - XCTAssertTrue(authorityField.waitForExistence(timeout: 10), "Tailnet authority field did not appear") - replaceText(in: authorityField, with: "") - } - - private func openTailnetMenu(in app: XCUIApplication) { - let moreButton = app.buttons["More"] - XCTAssertTrue(moreButton.waitForExistence(timeout: 5), "Tailnet menu button did not appear") - moreButton.tap() - } - - private func tapMenuButton(named title: String, in app: XCUIApplication) { - let menuButton = firstExistingElement( - from: [ - app.buttons[title], - app.descendants(matching: .button)[title], - ], - timeout: 5 - ) - XCTAssertTrue(menuButton.exists, "Menu action \(title) did not appear") - menuButton.tap() - } - - private func acceptAuthenticationPromptIfNeeded( - in app: XCUIApplication, - timeout: TimeInterval - ) { - let springboard = XCUIApplication(bundleIdentifier: "com.apple.springboard") - let deadline = Date().addingTimeInterval(timeout) - - repeat { - let promptCandidates = [ - springboard.buttons["Continue"], - springboard.buttons["Allow"], - app.buttons["Continue"], - app.buttons["Allow"], - ] - - for button in promptCandidates where button.exists && button.isHittable { - button.tap() - return - } - - RunLoop.current.run(until: Date().addingTimeInterval(0.25)) - } while Date() < deadline - - let promptCandidates = [ - springboard.buttons["Continue"], - springboard.buttons["Allow"], - app.buttons["Continue"], - app.buttons["Allow"], - ] - - for button in promptCandidates where button.exists { - button.tap() - return - } - } - - private func webAuthenticationSession() -> XCUIApplication { - let safariViewService = XCUIApplication(bundleIdentifier: "com.apple.SafariViewService") - if safariViewService.waitForExistence(timeout: 5) { - return safariViewService - } - - let safari = XCUIApplication(bundleIdentifier: "com.apple.mobilesafari") - _ = safari.waitForExistence(timeout: 5) - return safari - } - - private func signIntoAuthentik(in webSession: XCUIApplication, username: String, password: String) { - followTailnetRedirectIfNeeded(in: webSession) - - if !webSession.exists { - return - } - - let immediatePasswordField = firstExistingSecureField(in: webSession, timeout: 2) - if immediatePasswordField.exists { - replaceSecureText(in: immediatePasswordField, within: webSession, with: password) - submitAuthenticationForm(in: webSession, focusedField: immediatePasswordField) - return - } - - let usernameField = firstExistingElement( - in: webSession, - queries: [ - { $0.textFields["Username"] }, - { $0.textFields["Email or Username"] }, - { $0.textFields["Email address"] }, - { $0.textFields["Email"] }, - { $0.webViews.textFields["Username"] }, - { $0.webViews.textFields["Email or Username"] }, - { $0.descendants(matching: .textField).firstMatch }, - ], - timeout: 12 - ) - if !usernameField.exists { - return - } - replaceText(in: usernameField, with: username) - - tapFirstExistingButton( - in: webSession, - titles: ["Continue", "Next", "Sign In", "Log in", "Login"], - timeout: 5 - ) - - let passwordField = firstExistingSecureField(in: webSession, timeout: 20) - XCTAssertTrue(passwordField.exists, "Authentik password field did not appear") - replaceSecureText(in: passwordField, within: webSession, with: password) - submitAuthenticationForm(in: webSession, focusedField: passwordField) - } - - private func followTailnetRedirectIfNeeded(in webSession: XCUIApplication) { - let redirectCandidates = [ - webSession.links["Found"], - webSession.webViews.links["Found"], - webSession.buttons["Found"], - webSession.webViews.buttons["Found"], - ] - - let redirectLink = firstExistingElement(from: redirectCandidates, timeout: 8) - if redirectLink.exists { - redirectLink.tap() - } - } - - private func firstExistingSecureField(in app: XCUIApplication, timeout: TimeInterval) -> XCUIElement { - let candidates = [ - app.descendants(matching: .secureTextField).firstMatch, - app.secureTextFields["Password"], - app.secureTextFields["Password or Token"], - app.webViews.secureTextFields["Password"], - app.webViews.secureTextFields["Password or Token"], - ] - - return firstExistingElement(from: candidates, timeout: timeout) - } - - private func tapFirstExistingButton( - in app: XCUIApplication, - titles: [String], - timeout: TimeInterval - ) { - let candidates = titles.flatMap { title in - [ - app.buttons[title], - app.webViews.buttons[title], - ] - } + [app.descendants(matching: .button).firstMatch] - - let button = firstExistingElement(from: candidates, timeout: timeout) - XCTAssertTrue(button.exists, "Expected one of \(titles.joined(separator: ", ")) to appear") - button.tap() - } - - private func submitAuthenticationForm(in app: XCUIApplication, focusedField: XCUIElement) { - focus(focusedField) - focusedField.typeText("\n") - if waitForAny( - [ - { !focusedField.exists }, - { !app.staticTexts["Burrow Tailnet Authentication"].exists }, - ], - timeout: 1.5 - ) { - return - } - - let keyboard = app.keyboards.firstMatch - if keyboard.waitForExistence(timeout: 2) { - let keyboardCandidates = [ - "Return", - "return", - "Go", - "go", - "Continue", - "continue", - "Done", - "done", - "Join", - "join", - "Sign In", - "Log In", - "Login", - ] - for title in keyboardCandidates { - let key = keyboard.buttons[title] - if key.exists && key.isHittable { - key.tap() - return - } - } - - if let lastKey = keyboard.buttons.allElementsBoundByIndex.last, - lastKey.exists, - lastKey.isHittable - { - lastKey.tap() - return - } - } - - tapFirstExistingButton( - in: app, - titles: ["Continue", "Sign In", "Log in", "Login"], - timeout: 5 - ) - } - - private func loadTestConfig() throws -> TestConfig { - let environment = ProcessInfo.processInfo.environment - if let email = nonEmptyEnvironment("BURROW_UI_TEST_EMAIL"), - let password = nonEmptyEnvironment("BURROW_UI_TEST_PASSWORD") - { - return TestConfig( - email: email, - username: nonEmptyEnvironment("BURROW_UI_TEST_USERNAME") ?? email, - password: password, - mode: nonEmptyEnvironment("BURROW_UI_TEST_TAILNET_MODE") - .flatMap(TailnetLoginMode.init(rawValue:)) - ) - } - - let configPath = environment["BURROW_UI_TEST_CONFIG_PATH"] ?? "/tmp/burrow-ui-test-config.json" - let configURL = URL(fileURLWithPath: configPath) - guard FileManager.default.fileExists(atPath: configURL.path) else { - throw XCTSkip( - "Missing UI test configuration. Expected env vars or config file at \(configURL.path)" - ) - } - - let data = try Data(contentsOf: configURL) - return try JSONDecoder().decode(TestConfig.self, from: data) - } - - private func nonEmptyEnvironment(_ key: String) -> String? { - guard let value = ProcessInfo.processInfo.environment[key]? - .trimmingCharacters(in: .whitespacesAndNewlines), - !value.isEmpty - else { - return nil - } - return value - } - - private func waitForFieldValue( - _ field: XCUIElement, - containing substring: String, - timeout: TimeInterval - ) -> Bool { - let predicate = NSPredicate(format: "value CONTAINS %@", substring) - let expectation = XCTNSPredicateExpectation(predicate: predicate, object: field) - return XCTWaiter.wait(for: [expectation], timeout: timeout) == .completed - } - - private func waitForButtonLabel( - _ button: XCUIElement, - equals expected: String, - timeout: TimeInterval - ) -> Bool { - let predicate = NSPredicate(format: "label == %@", expected) - let expectation = XCTNSPredicateExpectation(predicate: predicate, object: button) - return XCTWaiter.wait(for: [expectation], timeout: timeout) == .completed - } - - private func waitForTailnetSignedIn(in app: XCUIApplication, timeout: TimeInterval) -> Bool { - let button = app.buttons["tailnet-start-sign-in"] - let deadline = Date().addingTimeInterval(timeout) - - repeat { - acceptAuthenticationPromptIfNeeded(in: app, timeout: 1) - if button.exists, button.label == "Signed In" { - return true - } - RunLoop.current.run(until: Date().addingTimeInterval(0.3)) - } while Date() < deadline - - return button.exists && button.label == "Signed In" - } - - private func waitForAny(_ conditions: [() -> Bool], timeout: TimeInterval) -> Bool { - let deadline = Date().addingTimeInterval(timeout) - repeat { - if conditions.contains(where: { $0() }) { - return true - } - RunLoop.current.run(until: Date().addingTimeInterval(0.2)) - } while Date() < deadline - return conditions.contains(where: { $0() }) - } - - private func firstExistingElement( - in app: XCUIApplication, - queries: [(XCUIApplication) -> XCUIElement], - timeout: TimeInterval - ) -> XCUIElement { - firstExistingElement(from: queries.map { $0(app) }, timeout: timeout) - } - - private func firstExistingElement(from candidates: [XCUIElement], timeout: TimeInterval) -> XCUIElement { - let deadline = Date().addingTimeInterval(timeout) - repeat { - for candidate in candidates where candidate.exists { - return candidate - } - RunLoop.current.run(until: Date().addingTimeInterval(0.2)) - } while Date() < deadline - - return candidates[0] - } - - private func replaceText(in element: XCUIElement, with value: String) { - focus(element) - clearText(in: element) - element.typeText(value) - } - - private func replaceSecureText(in element: XCUIElement, within app: XCUIApplication, with value: String) { - UIPasteboard.general.string = value - focus(element) - for revealMenu in [ - { element.doubleTap() }, - { element.press(forDuration: 1.2) }, - ] { - revealMenu() - let pasteButton = firstExistingElement(from: pasteCandidates(in: app), timeout: 3) - if pasteButton.exists { - pasteButton.tap() - return - } - } - - focus(element) - element.typeText(value) - } - - private func clearText(in element: XCUIElement) { - guard let currentValue = element.value as? String, !currentValue.isEmpty else { - return - } - - let deleteSequence = String(repeating: XCUIKeyboardKey.delete.rawValue, count: currentValue.count) - element.typeText(deleteSequence) - } - - private func focus(_ element: XCUIElement) { - element.coordinate(withNormalizedOffset: CGVector(dx: 0.5, dy: 0.5)).tap() - RunLoop.current.run(until: Date().addingTimeInterval(0.3)) - } - - private func pasteCandidates(in app: XCUIApplication) -> [XCUIElement] { - let pasteLabels = ["Paste", "Incolla", "Paste from Clipboard"] - return pasteLabels.flatMap { label in - [ - app.menuItems[label], - app.buttons[label], - app.webViews.buttons[label], - app.descendants(matching: .button).matching(NSPredicate(format: "label == %@", label)).firstMatch, - app.descendants(matching: .menuItem).matching(NSPredicate(format: "label == %@", label)).firstMatch, - ] - } - } -} diff --git a/Apple/Burrow.xcodeproj/project.pbxproj b/Apple/Burrow.xcodeproj/project.pbxproj index 83d32e0..428d9ab 100644 --- a/Apple/Burrow.xcodeproj/project.pbxproj +++ b/Apple/Burrow.xcodeproj/project.pbxproj @@ -7,55 +7,42 @@ objects = { /* Begin PBXBuildFile section */ + 0B28F1562ABF463A000D44B0 /* DataTypes.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0B28F1552ABF463A000D44B0 /* DataTypes.swift */; }; + 0B46E8E02AC918CA00BA2A3C /* Client.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0B46E8DF2AC918CA00BA2A3C /* Client.swift */; }; + 43AA26D82A10004900F14CE6 /* MenuView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 43AA26D72A10004900F14CE6 /* MenuView.swift */; }; + D00117312B2FFFC900D87C25 /* NWConnection+Async.swift in Sources */ = {isa = PBXBuildFile; fileRef = D00117302B2FFFC900D87C25 /* NWConnection+Async.swift */; }; + D00117332B3001A400D87C25 /* NewlineProtocolFramer.swift in Sources */ = {isa = PBXBuildFile; fileRef = D00117322B3001A400D87C25 /* NewlineProtocolFramer.swift */; }; + D001173B2B30341C00D87C25 /* Logging.swift in Sources */ = {isa = PBXBuildFile; fileRef = D001173A2B30341C00D87C25 /* Logging.swift */; }; + D00117442B30372900D87C25 /* libBurrowShared.a in Frameworks */ = {isa = PBXBuildFile; fileRef = D00117382B30341C00D87C25 /* libBurrowShared.a */; }; + D00117452B30372C00D87C25 /* libBurrowShared.a in Frameworks */ = {isa = PBXBuildFile; fileRef = D00117382B30341C00D87C25 /* libBurrowShared.a */; }; D00AA8972A4669BC005C8102 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = D00AA8962A4669BC005C8102 /* AppDelegate.swift */; }; - D11000012F70000100112233 /* BurrowUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D11000042F70000100112233 /* BurrowUITests.swift */; }; D020F65829E4A697002790F6 /* PacketTunnelProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = D020F65729E4A697002790F6 /* PacketTunnelProvider.swift */; }; D020F65D29E4A697002790F6 /* BurrowNetworkExtension.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = D020F65329E4A697002790F6 /* BurrowNetworkExtension.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; }; - D03383AD2C8E67E300F7C44E /* SwiftProtobuf in Frameworks */ = {isa = PBXBuildFile; productRef = D078F7E22C8DA375008A8CEC /* SwiftProtobuf */; }; - D03383AE2C8E67E300F7C44E /* NIO in Frameworks */ = {isa = PBXBuildFile; productRef = D044EE902C8DAB2000778185 /* NIO */; }; - D03383AF2C8E67E300F7C44E /* NIOConcurrencyHelpers in Frameworks */ = {isa = PBXBuildFile; productRef = D044EE922C8DAB2000778185 /* NIOConcurrencyHelpers */; }; - D03383B02C8E67E300F7C44E /* NIOTransportServices in Frameworks */ = {isa = PBXBuildFile; productRef = D044EE952C8DAB2800778185 /* NIOTransportServices */; }; D05B9F7629E39EEC008CB1F9 /* BurrowApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = D05B9F7529E39EEC008CB1F9 /* BurrowApp.swift */; }; - D09150422B9D2AF700BE3CB0 /* MainMenu.xib in Resources */ = {isa = PBXBuildFile; fileRef = D09150412B9D2AF700BE3CB0 /* MainMenu.xib */; platformFilters = (macos, ); }; - D0B1D1102C436152004B7823 /* AsyncAlgorithms in Frameworks */ = {isa = PBXBuildFile; productRef = D0B1D10F2C436152004B7823 /* AsyncAlgorithms */; }; + D05B9F7829E39EEC008CB1F9 /* TunnelView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D05B9F7729E39EEC008CB1F9 /* TunnelView.swift */; }; + D05B9F7A29E39EED008CB1F9 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = D05B9F7929E39EED008CB1F9 /* Assets.xcassets */; }; + D08252762B5C9FC4005DA378 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = D08252752B5C9FC4005DA378 /* Constants.swift */; }; + D0BCC5FD2A086D4700AD070D /* NetworkExtension+Async.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0BCC5FC2A086D4700AD070D /* NetworkExtension+Async.swift */; }; + D0BCC5FF2A086E1C00AD070D /* Status.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0BCC5FE2A086E1C00AD070D /* Status.swift */; }; + D0BCC6082A0981FE00AD070D /* Tunnel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0B98FC629FDC5B5004E7149 /* Tunnel.swift */; }; D0BCC6092A09A03E00AD070D /* libburrow.a in Frameworks */ = {isa = PBXBuildFile; fileRef = D0BCC6032A09535900AD070D /* libburrow.a */; }; - D0BF09522C8E66F6000D8DEC /* BurrowConfiguration.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5622C8D9BF4007F820A /* BurrowConfiguration.framework */; }; - D0BF09552C8E66FD000D8DEC /* BurrowConfiguration.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5622C8D9BF4007F820A /* BurrowConfiguration.framework */; }; - D0D4E53A2C8D996F007F820A /* BurrowCore.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5312C8D996F007F820A /* BurrowCore.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; - D0D4E56B2C8D9C2F007F820A /* Logging.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E49A2C8D921A007F820A /* Logging.swift */; }; - D0D4E5702C8D9C62007F820A /* BurrowCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5312C8D996F007F820A /* BurrowCore.framework */; }; - D0D4E5722C8D9C6F007F820A /* Network.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E49E2C8D921A007F820A /* Network.swift */; }; - D0D4E5732C8D9C6F007F820A /* WireGuard.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E49F2C8D921A007F820A /* WireGuard.swift */; }; - D0D4E5742C8D9C6F007F820A /* BurrowView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4A22C8D921A007F820A /* BurrowView.swift */; }; - D0D4E5752C8D9C6F007F820A /* FloatingButtonStyle.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4A32C8D921A007F820A /* FloatingButtonStyle.swift */; }; - D0D4E5762C8D9C6F007F820A /* MenuItemToggleView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4A42C8D921A007F820A /* MenuItemToggleView.swift */; }; - D0D4E5772C8D9C6F007F820A /* NetworkCarouselView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4A52C8D921A007F820A /* NetworkCarouselView.swift */; }; - D0D4E5782C8D9C6F007F820A /* NetworkExtension+Async.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4A62C8D921A007F820A /* NetworkExtension+Async.swift */; }; - D0D4E5792C8D9C6F007F820A /* NetworkExtensionTunnel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4A72C8D921A007F820A /* NetworkExtensionTunnel.swift */; }; - D0D4E57A2C8D9C6F007F820A /* NetworkView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4A82C8D921A007F820A /* NetworkView.swift */; }; - D0D4E57C2C8D9C6F007F820A /* Tunnel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4AA2C8D921A007F820A /* Tunnel.swift */; }; - D0D4E57D2C8D9C6F007F820A /* TunnelButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4AB2C8D921A007F820A /* TunnelButton.swift */; }; - D0D4E57E2C8D9C6F007F820A /* TunnelStatusView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4AC2C8D921A007F820A /* TunnelStatusView.swift */; }; - D0D4E5892C8D9C94007F820A /* BurrowUI.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5582C8D9BF2007F820A /* BurrowUI.framework */; }; - D0D4E58A2C8D9C9E007F820A /* BurrowUI.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5582C8D9BF2007F820A /* BurrowUI.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; - D0D4E58B2C8D9CA4007F820A /* BurrowConfiguration.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5622C8D9BF4007F820A /* BurrowConfiguration.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; - D0D4E5922C8D9D15007F820A /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E58F2C8D9D0A007F820A /* Constants.swift */; }; - D0D4E5A62C8D9E65007F820A /* BurrowCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5312C8D996F007F820A /* BurrowCore.framework */; }; - D0F4FAD32C8DC79C0068730A /* BurrowCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D0D4E5312C8D996F007F820A /* BurrowCore.framework */; }; - D0F7594E2C8DAB6B00126CF3 /* GRPC in Frameworks */ = {isa = PBXBuildFile; productRef = D078F7E02C8DA375008A8CEC /* GRPC */; }; - D0FA10012D10200100112233 /* burrow.pb.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0FA10032D10200100112233 /* burrow.pb.swift */; }; - D0FA10022D10200100112233 /* burrow.grpc.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0FA10042D10200100112233 /* burrow.grpc.swift */; }; - D0F7597E2C8DB30500126CF3 /* CGRPCZlib in Frameworks */ = {isa = PBXBuildFile; productRef = D0F7597D2C8DB30500126CF3 /* CGRPCZlib */; }; - D0F7598D2C8DB3DA00126CF3 /* Client.swift in Sources */ = {isa = PBXBuildFile; fileRef = D0D4E4992C8D921A007F820A /* Client.swift */; }; + D0BCC60A2A09A0B800AD070D /* build-rust.sh in Resources */ = {isa = PBXBuildFile; fileRef = D0B98FBF29FD8072004E7149 /* build-rust.sh */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ - D11000022F70000100112233 /* PBXContainerItemProxy */ = { + D00117462B30373100D87C25 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; proxyType = 1; - remoteGlobalIDString = D05B9F7129E39EEC008CB1F9; - remoteInfo = App; + remoteGlobalIDString = D00117372B30341C00D87C25; + remoteInfo = Shared; + }; + D00117482B30373500D87C25 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; + proxyType = 1; + remoteGlobalIDString = D00117372B30341C00D87C25; + remoteInfo = Shared; }; D020F65B29E4A697002790F6 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; @@ -64,48 +51,6 @@ remoteGlobalIDString = D020F65229E4A697002790F6; remoteInfo = BurrowNetworkExtension; }; - D0BF09502C8E66F1000D8DEC /* PBXContainerItemProxy */ = { - isa = PBXContainerItemProxy; - containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; - proxyType = 1; - remoteGlobalIDString = D0D4E55A2C8D9BF4007F820A; - remoteInfo = Configuration; - }; - D0BF09532C8E66FA000D8DEC /* PBXContainerItemProxy */ = { - isa = PBXContainerItemProxy; - containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; - proxyType = 1; - remoteGlobalIDString = D0D4E55A2C8D9BF4007F820A; - remoteInfo = Configuration; - }; - D0D4E56E2C8D9C5D007F820A /* PBXContainerItemProxy */ = { - isa = PBXContainerItemProxy; - containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; - proxyType = 1; - remoteGlobalIDString = D0D4E5302C8D996F007F820A; - remoteInfo = Core; - }; - D0D4E57F2C8D9C78007F820A /* PBXContainerItemProxy */ = { - isa = PBXContainerItemProxy; - containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; - proxyType = 1; - remoteGlobalIDString = D0D4E5302C8D996F007F820A; - remoteInfo = Core; - }; - D0D4E5872C8D9C88007F820A /* PBXContainerItemProxy */ = { - isa = PBXContainerItemProxy; - containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; - proxyType = 1; - remoteGlobalIDString = D0D4E5502C8D9BF2007F820A; - remoteInfo = UI; - }; - D0F4FAD12C8DC7960068730A /* PBXContainerItemProxy */ = { - isa = PBXContainerItemProxy; - containerPortal = D05B9F6A29E39EEC008CB1F9 /* Project object */; - proxyType = 1; - remoteGlobalIDString = D0D4E5302C8D996F007F820A; - remoteInfo = Core; - }; /* End PBXContainerItemProxy section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -120,27 +65,19 @@ name = "Embed Foundation Extensions"; runOnlyForDeploymentPostprocessing = 0; }; - D0D4E53F2C8D996F007F820A /* Embed Frameworks */ = { - isa = PBXCopyFilesBuildPhase; - buildActionMask = 2147483647; - dstPath = ""; - dstSubfolderSpec = 10; - files = ( - D0D4E58B2C8D9CA4007F820A /* BurrowConfiguration.framework in Embed Frameworks */, - D0D4E58A2C8D9C9E007F820A /* BurrowUI.framework in Embed Frameworks */, - D0D4E53A2C8D996F007F820A /* BurrowCore.framework in Embed Frameworks */, - ); - name = "Embed Frameworks"; - runOnlyForDeploymentPostprocessing = 0; - }; /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ - D00117422B30348D00D87C25 /* Configuration.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Configuration.xcconfig; sourceTree = ""; }; + 0B28F1552ABF463A000D44B0 /* DataTypes.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DataTypes.swift; sourceTree = ""; }; + 0B46E8DF2AC918CA00BA2A3C /* Client.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Client.swift; sourceTree = ""; }; + 43AA26D72A10004900F14CE6 /* MenuView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MenuView.swift; sourceTree = ""; }; + D00117302B2FFFC900D87C25 /* NWConnection+Async.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "NWConnection+Async.swift"; sourceTree = ""; }; + D00117322B3001A400D87C25 /* NewlineProtocolFramer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NewlineProtocolFramer.swift; sourceTree = ""; }; + D00117382B30341C00D87C25 /* libBurrowShared.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libBurrowShared.a; sourceTree = BUILT_PRODUCTS_DIR; }; + D001173A2B30341C00D87C25 /* Logging.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Logging.swift; sourceTree = ""; }; + D00117412B30347800D87C25 /* module.modulemap */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.module-map"; path = module.modulemap; sourceTree = ""; }; + D00117422B30348D00D87C25 /* Shared.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Shared.xcconfig; sourceTree = ""; }; D00AA8962A4669BC005C8102 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; - D11000032F70000100112233 /* BurrowUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = BurrowUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; - D11000042F70000100112233 /* BurrowUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BurrowUITests.swift; sourceTree = ""; }; - D11000052F70000100112233 /* UITests.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = UITests.xcconfig; sourceTree = ""; }; D020F63D29E4A1FF002790F6 /* Identity.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = Identity.xcconfig; sourceTree = ""; }; D020F64029E4A1FF002790F6 /* Compiler.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = Compiler.xcconfig; sourceTree = ""; }; D020F64229E4A1FF002790F6 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; @@ -155,45 +92,23 @@ D020F66729E4A95D002790F6 /* NetworkExtension-iOS.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "NetworkExtension-iOS.entitlements"; sourceTree = ""; }; D020F66829E4AA74002790F6 /* App-iOS.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "App-iOS.entitlements"; sourceTree = ""; }; D020F66929E4AA74002790F6 /* App-macOS.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "App-macOS.entitlements"; sourceTree = ""; }; - D04A3E1D2BAF465F0043EC85 /* Version.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Version.xcconfig; sourceTree = ""; }; D05B9F7229E39EEC008CB1F9 /* Burrow.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Burrow.app; sourceTree = BUILT_PRODUCTS_DIR; }; D05B9F7529E39EEC008CB1F9 /* BurrowApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BurrowApp.swift; sourceTree = ""; }; - D09150412B9D2AF700BE3CB0 /* MainMenu.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = MainMenu.xib; sourceTree = ""; }; + D05B9F7729E39EEC008CB1F9 /* TunnelView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TunnelView.swift; sourceTree = ""; }; + D05B9F7929E39EED008CB1F9 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + D08252742B5C9DEB005DA378 /* Constants.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Constants.h; sourceTree = ""; }; + D08252752B5C9FC4005DA378 /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; D0B98FBF29FD8072004E7149 /* build-rust.sh */ = {isa = PBXFileReference; lastKnownFileType = text.script.sh; path = "build-rust.sh"; sourceTree = ""; }; + D0B98FC629FDC5B5004E7149 /* Tunnel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Tunnel.swift; sourceTree = ""; }; D0B98FD829FDDB6F004E7149 /* libburrow.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = libburrow.h; sourceTree = ""; }; D0B98FDC29FDDDCF004E7149 /* module.modulemap */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.module-map"; path = module.modulemap; sourceTree = ""; }; + D0BCC5FC2A086D4700AD070D /* NetworkExtension+Async.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "NetworkExtension+Async.swift"; sourceTree = ""; }; + D0BCC5FE2A086E1C00AD070D /* Status.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Status.swift; sourceTree = ""; }; D0BCC6032A09535900AD070D /* libburrow.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libburrow.a; sourceTree = BUILT_PRODUCTS_DIR; }; - D0BF09582C8E6789000D8DEC /* UI.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = UI.xcconfig; sourceTree = ""; }; - D0D4E4952C8D921A007F820A /* burrow.proto */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.protobuf; path = burrow.proto; sourceTree = ""; }; - D0D4E4992C8D921A007F820A /* Client.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Client.swift; sourceTree = ""; }; - D0D4E49A2C8D921A007F820A /* Logging.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Logging.swift; sourceTree = ""; }; - D0D4E49E2C8D921A007F820A /* Network.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Network.swift; sourceTree = ""; }; - D0D4E49F2C8D921A007F820A /* WireGuard.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WireGuard.swift; sourceTree = ""; }; - D0D4E4A12C8D921A007F820A /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - D0D4E4A22C8D921A007F820A /* BurrowView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BurrowView.swift; sourceTree = ""; }; - D0D4E4A32C8D921A007F820A /* FloatingButtonStyle.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FloatingButtonStyle.swift; sourceTree = ""; }; - D0D4E4A42C8D921A007F820A /* MenuItemToggleView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MenuItemToggleView.swift; sourceTree = ""; }; - D0D4E4A52C8D921A007F820A /* NetworkCarouselView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetworkCarouselView.swift; sourceTree = ""; }; - D0D4E4A62C8D921A007F820A /* NetworkExtension+Async.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "NetworkExtension+Async.swift"; sourceTree = ""; }; - D0D4E4A72C8D921A007F820A /* NetworkExtensionTunnel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetworkExtensionTunnel.swift; sourceTree = ""; }; - D0D4E4A82C8D921A007F820A /* NetworkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetworkView.swift; sourceTree = ""; }; - D0D4E4AA2C8D921A007F820A /* Tunnel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Tunnel.swift; sourceTree = ""; }; - D0D4E4AB2C8D921A007F820A /* TunnelButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TunnelButton.swift; sourceTree = ""; }; - D0D4E4AC2C8D921A007F820A /* TunnelStatusView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TunnelStatusView.swift; sourceTree = ""; }; - D0D4E4F62C8D932D007F820A /* Debug.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Debug.xcconfig; sourceTree = ""; }; - D0D4E4F72C8D941D007F820A /* Framework.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Framework.xcconfig; sourceTree = ""; }; - D0D4E5312C8D996F007F820A /* BurrowCore.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = BurrowCore.framework; sourceTree = BUILT_PRODUCTS_DIR; }; - D0D4E5582C8D9BF2007F820A /* BurrowUI.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = BurrowUI.framework; sourceTree = BUILT_PRODUCTS_DIR; }; - D0D4E5622C8D9BF4007F820A /* BurrowConfiguration.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = BurrowConfiguration.framework; sourceTree = BUILT_PRODUCTS_DIR; }; - D0D4E58E2C8D9D0A007F820A /* Constants.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Constants.h; sourceTree = ""; }; - D0D4E58F2C8D9D0A007F820A /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; - D0D4E5902C8D9D0A007F820A /* module.modulemap */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.module-map"; path = module.modulemap; sourceTree = ""; }; - D0FA10032D10200100112233 /* burrow.pb.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Generated/burrow.pb.swift; sourceTree = ""; }; - D0FA10042D10200100112233 /* burrow.grpc.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Generated/burrow.grpc.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ - D11000062F70000100112233 /* Frameworks */ = { + D00117352B30341C00D87C25 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( @@ -204,10 +119,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - D0BF09522C8E66F6000D8DEC /* BurrowConfiguration.framework in Frameworks */, - D0D4E5A62C8D9E65007F820A /* BurrowCore.framework in Frameworks */, + D00117442B30372900D87C25 /* libBurrowShared.a in Frameworks */, D0BCC6092A09A03E00AD070D /* libburrow.a in Frameworks */, - D0B1D1102C436152004B7823 /* AsyncAlgorithms in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -215,36 +128,41 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - D0BF09552C8E66FD000D8DEC /* BurrowConfiguration.framework in Frameworks */, - D0F4FAD32C8DC79C0068730A /* BurrowCore.framework in Frameworks */, - D0D4E5892C8D9C94007F820A /* BurrowUI.framework in Frameworks */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - D078F7CF2C8DA213008A8CEC /* Frameworks */ = { - isa = PBXFrameworksBuildPhase; - buildActionMask = 2147483647; - files = ( - D03383B02C8E67E300F7C44E /* NIOTransportServices in Frameworks */, - D03383AF2C8E67E300F7C44E /* NIOConcurrencyHelpers in Frameworks */, - D03383AE2C8E67E300F7C44E /* NIO in Frameworks */, - D03383AD2C8E67E300F7C44E /* SwiftProtobuf in Frameworks */, - D0F7594E2C8DAB6B00126CF3 /* GRPC in Frameworks */, - D0F7597E2C8DB30500126CF3 /* CGRPCZlib in Frameworks */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - D0D4E5532C8D9BF2007F820A /* Frameworks */ = { - isa = PBXFrameworksBuildPhase; - buildActionMask = 2147483647; - files = ( - D0D4E5702C8D9C62007F820A /* BurrowCore.framework in Frameworks */, + D00117452B30372C00D87C25 /* libBurrowShared.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 43AA26D62A0FFFD000F14CE6 /* Menu */ = { + isa = PBXGroup; + children = ( + 43AA26D72A10004900F14CE6 /* MenuView.swift */, + ); + path = Menu; + sourceTree = ""; + }; + D00117392B30341C00D87C25 /* Shared */ = { + isa = PBXGroup; + children = ( + D001173A2B30341C00D87C25 /* Logging.swift */, + D08252752B5C9FC4005DA378 /* Constants.swift */, + D00117422B30348D00D87C25 /* Shared.xcconfig */, + D001173F2B30347800D87C25 /* Constants */, + ); + path = Shared; + sourceTree = ""; + }; + D001173F2B30347800D87C25 /* Constants */ = { + isa = PBXGroup; + children = ( + D08252742B5C9DEB005DA378 /* Constants.h */, + D00117412B30347800D87C25 /* module.modulemap */, + ); + path = Constants; + sourceTree = ""; + }; D00117432B30372900D87C25 /* Frameworks */ = { isa = PBXGroup; children = ( @@ -258,14 +176,8 @@ D020F63D29E4A1FF002790F6 /* Identity.xcconfig */, D020F64A29E4A452002790F6 /* App.xcconfig */, D020F66329E4A703002790F6 /* Extension.xcconfig */, - D0D4E4F72C8D941D007F820A /* Framework.xcconfig */, D020F64029E4A1FF002790F6 /* Compiler.xcconfig */, - D0D4E4F62C8D932D007F820A /* Debug.xcconfig */, - D11000052F70000100112233 /* UITests.xcconfig */, - D04A3E1D2BAF465F0043EC85 /* Version.xcconfig */, D020F64229E4A1FF002790F6 /* Info.plist */, - D0D4E5912C8D9D0A007F820A /* Constants */, - D00117422B30348D00D87C25 /* Configuration.xcconfig */, ); path = Configuration; sourceTree = ""; @@ -274,6 +186,10 @@ isa = PBXGroup; children = ( D020F65729E4A697002790F6 /* PacketTunnelProvider.swift */, + 0B46E8DF2AC918CA00BA2A3C /* Client.swift */, + 0B28F1552ABF463A000D44B0 /* DataTypes.swift */, + D00117322B3001A400D87C25 /* NewlineProtocolFramer.swift */, + D00117302B2FFFC900D87C25 /* NWConnection+Async.swift */, D020F65929E4A697002790F6 /* Info.plist */, D020F66729E4A95D002790F6 /* NetworkExtension-iOS.entitlements */, D020F66629E4A95D002790F6 /* NetworkExtension-macOS.entitlements */, @@ -287,10 +203,8 @@ isa = PBXGroup; children = ( D05B9F7429E39EEC008CB1F9 /* App */, - D11000072F70000100112233 /* AppUITests */, D020F65629E4A697002790F6 /* NetworkExtension */, - D0D4E49C2C8D921A007F820A /* Core */, - D0D4E4AD2C8D921A007F820A /* UI */, + D00117392B30341C00D87C25 /* Shared */, D020F63C29E4A1FF002790F6 /* Configuration */, D05B9F7329E39EEC008CB1F9 /* Products */, D00117432B30372900D87C25 /* Frameworks */, @@ -301,12 +215,8 @@ isa = PBXGroup; children = ( D05B9F7229E39EEC008CB1F9 /* Burrow.app */, - D11000032F70000100112233 /* BurrowUITests.xctest */, D020F65329E4A697002790F6 /* BurrowNetworkExtension.appex */, - D0BCC6032A09535900AD070D /* libburrow.a */, - D0D4E5312C8D996F007F820A /* BurrowCore.framework */, - D0D4E5582C8D9BF2007F820A /* BurrowUI.framework */, - D0D4E5622C8D9BF4007F820A /* BurrowConfiguration.framework */, + D00117382B30341C00D87C25 /* libBurrowShared.a */, ); name = Products; sourceTree = ""; @@ -314,9 +224,14 @@ D05B9F7429E39EEC008CB1F9 /* App */ = { isa = PBXGroup; children = ( + 43AA26D62A0FFFD000F14CE6 /* Menu */, D05B9F7529E39EEC008CB1F9 /* BurrowApp.swift */, D00AA8962A4669BC005C8102 /* AppDelegate.swift */, - D09150412B9D2AF700BE3CB0 /* MainMenu.xib */, + D05B9F7729E39EEC008CB1F9 /* TunnelView.swift */, + D0B98FC629FDC5B5004E7149 /* Tunnel.swift */, + D0BCC5FE2A086E1C00AD070D /* Status.swift */, + D0BCC5FC2A086D4700AD070D /* NetworkExtension+Async.swift */, + D05B9F7929E39EED008CB1F9 /* Assets.xcassets */, D020F66829E4AA74002790F6 /* App-iOS.entitlements */, D020F66929E4AA74002790F6 /* App-macOS.entitlements */, D020F64929E4A34B002790F6 /* App.xcconfig */, @@ -324,103 +239,36 @@ path = App; sourceTree = ""; }; - D11000072F70000100112233 /* AppUITests */ = { - isa = PBXGroup; - children = ( - D11000042F70000100112233 /* BurrowUITests.swift */, - ); - path = AppUITests; - sourceTree = ""; - }; D0B98FD729FDDB57004E7149 /* libburrow */ = { isa = PBXGroup; children = ( D0B98FBF29FD8072004E7149 /* build-rust.sh */, D0B98FDC29FDDDCF004E7149 /* module.modulemap */, D0B98FD829FDDB6F004E7149 /* libburrow.h */, + D0BCC6032A09535900AD070D /* libburrow.a */, ); path = libburrow; sourceTree = ""; }; - D0D4E4982C8D921A007F820A /* Client */ = { - isa = PBXGroup; - children = ( - D0D4E4952C8D921A007F820A /* burrow.proto */, - D0FA10032D10200100112233 /* burrow.pb.swift */, - D0FA10042D10200100112233 /* burrow.grpc.swift */, - ); - path = Client; - sourceTree = ""; - }; - D0D4E49C2C8D921A007F820A /* Core */ = { - isa = PBXGroup; - children = ( - D0D4E49A2C8D921A007F820A /* Logging.swift */, - D0D4E4992C8D921A007F820A /* Client.swift */, - D0D4E4982C8D921A007F820A /* Client */, - ); - path = Core; - sourceTree = ""; - }; - D0D4E4A02C8D921A007F820A /* Networks */ = { - isa = PBXGroup; - children = ( - D0D4E49E2C8D921A007F820A /* Network.swift */, - D0D4E49F2C8D921A007F820A /* WireGuard.swift */, - ); - path = Networks; - sourceTree = ""; - }; - D0D4E4AD2C8D921A007F820A /* UI */ = { - isa = PBXGroup; - children = ( - D0D4E4A22C8D921A007F820A /* BurrowView.swift */, - D0D4E4A02C8D921A007F820A /* Networks */, - D0D4E4A32C8D921A007F820A /* FloatingButtonStyle.swift */, - D0D4E4A42C8D921A007F820A /* MenuItemToggleView.swift */, - D0D4E4A52C8D921A007F820A /* NetworkCarouselView.swift */, - D0D4E4A62C8D921A007F820A /* NetworkExtension+Async.swift */, - D0D4E4A72C8D921A007F820A /* NetworkExtensionTunnel.swift */, - D0D4E4A82C8D921A007F820A /* NetworkView.swift */, - D0D4E4AA2C8D921A007F820A /* Tunnel.swift */, - D0D4E4AB2C8D921A007F820A /* TunnelButton.swift */, - D0D4E4AC2C8D921A007F820A /* TunnelStatusView.swift */, - D0D4E4A12C8D921A007F820A /* Assets.xcassets */, - D0BF09582C8E6789000D8DEC /* UI.xcconfig */, - ); - path = UI; - sourceTree = ""; - }; - D0D4E5912C8D9D0A007F820A /* Constants */ = { - isa = PBXGroup; - children = ( - D0D4E58E2C8D9D0A007F820A /* Constants.h */, - D0D4E58F2C8D9D0A007F820A /* Constants.swift */, - D0D4E5902C8D9D0A007F820A /* module.modulemap */, - ); - path = Constants; - sourceTree = ""; - }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ - D11000082F70000100112233 /* BurrowUITests */ = { + D00117372B30341C00D87C25 /* Shared */ = { isa = PBXNativeTarget; - buildConfigurationList = D110000E2F70000100112233 /* Build configuration list for PBXNativeTarget "BurrowUITests" */; + buildConfigurationList = D001173C2B30341C00D87C25 /* Build configuration list for PBXNativeTarget "Shared" */; buildPhases = ( - D110000A2F70000100112233 /* Sources */, - D11000062F70000100112233 /* Frameworks */, - D11000092F70000100112233 /* Resources */, + D00117342B30341C00D87C25 /* Sources */, + D00117352B30341C00D87C25 /* Frameworks */, ); buildRules = ( ); dependencies = ( - D110000B2F70000100112233 /* PBXTargetDependency */, + D082527D2B5DEB80005DA378 /* PBXTargetDependency */, ); - name = BurrowUITests; - productName = BurrowUITests; - productReference = D11000032F70000100112233 /* BurrowUITests.xctest */; - productType = "com.apple.product-type.bundle.ui-testing"; + name = Shared; + productName = Shared; + productReference = D00117382B30341C00D87C25 /* libBurrowShared.a */; + productType = "com.apple.product-type.library.static"; }; D020F65229E4A697002790F6 /* NetworkExtension */ = { isa = PBXNativeTarget; @@ -429,12 +277,13 @@ D0BCC60B2A09A0C100AD070D /* Compile Rust */, D020F64F29E4A697002790F6 /* Sources */, D020F65029E4A697002790F6 /* Frameworks */, + D020F65129E4A697002790F6 /* Resources */, ); buildRules = ( ); dependencies = ( - D0BF09512C8E66F1000D8DEC /* PBXTargetDependency */, - D0D4E5802C8D9C78007F820A /* PBXTargetDependency */, + D08252792B5DEB78005DA378 /* PBXTargetDependency */, + D00117492B30373500D87C25 /* PBXTargetDependency */, ); name = NetworkExtension; productName = BurrowNetworkExtension; @@ -448,15 +297,13 @@ D05B9F6E29E39EEC008CB1F9 /* Sources */, D05B9F6F29E39EEC008CB1F9 /* Frameworks */, D05B9F7029E39EEC008CB1F9 /* Resources */, - D0D4E53F2C8D996F007F820A /* Embed Frameworks */, D020F66129E4A697002790F6 /* Embed Foundation Extensions */, ); buildRules = ( ); dependencies = ( - D0BF09542C8E66FA000D8DEC /* PBXTargetDependency */, - D0F4FAD22C8DC7960068730A /* PBXTargetDependency */, - D0D4E5882C8D9C88007F820A /* PBXTargetDependency */, + D082527B2B5DEB7D005DA378 /* PBXTargetDependency */, + D00117472B30373100D87C25 /* PBXTargetDependency */, D020F65C29E4A697002790F6 /* PBXTargetDependency */, ); name = App; @@ -464,69 +311,6 @@ productReference = D05B9F7229E39EEC008CB1F9 /* Burrow.app */; productType = "com.apple.product-type.application"; }; - D0D4E5302C8D996F007F820A /* Core */ = { - isa = PBXNativeTarget; - buildConfigurationList = D0D4E53C2C8D996F007F820A /* Build configuration list for PBXNativeTarget "Core" */; - buildPhases = ( - D0D4E52D2C8D996F007F820A /* Sources */, - D078F7CF2C8DA213008A8CEC /* Frameworks */, - ); - buildRules = ( - ); - dependencies = ( - D0F7598A2C8DB34200126CF3 /* PBXTargetDependency */, - ); - name = Core; - packageProductDependencies = ( - D078F7E02C8DA375008A8CEC /* GRPC */, - D078F7E22C8DA375008A8CEC /* SwiftProtobuf */, - D044EE902C8DAB2000778185 /* NIO */, - D044EE922C8DAB2000778185 /* NIOConcurrencyHelpers */, - D044EE952C8DAB2800778185 /* NIOTransportServices */, - D0F7597D2C8DB30500126CF3 /* CGRPCZlib */, - ); - productName = Core; - productReference = D0D4E5312C8D996F007F820A /* BurrowCore.framework */; - productType = "com.apple.product-type.framework"; - }; - D0D4E5502C8D9BF2007F820A /* UI */ = { - isa = PBXNativeTarget; - buildConfigurationList = D0D4E5552C8D9BF2007F820A /* Build configuration list for PBXNativeTarget "UI" */; - buildPhases = ( - D0D4E5522C8D9BF2007F820A /* Sources */, - D0D4E5532C8D9BF2007F820A /* Frameworks */, - D0D4E5542C8D9BF2007F820A /* Resources */, - ); - buildRules = ( - ); - dependencies = ( - D0D4E56F2C8D9C5D007F820A /* PBXTargetDependency */, - ); - name = UI; - packageProductDependencies = ( - ); - productName = Core; - productReference = D0D4E5582C8D9BF2007F820A /* BurrowUI.framework */; - productType = "com.apple.product-type.framework"; - }; - D0D4E55A2C8D9BF4007F820A /* Configuration */ = { - isa = PBXNativeTarget; - buildConfigurationList = D0D4E55F2C8D9BF4007F820A /* Build configuration list for PBXNativeTarget "Configuration" */; - buildPhases = ( - D0F759912C8DB49E00126CF3 /* Configure Version */, - D0D4E55C2C8D9BF4007F820A /* Sources */, - ); - buildRules = ( - ); - dependencies = ( - ); - name = Configuration; - packageProductDependencies = ( - ); - productName = Core; - productReference = D0D4E5622C8D9BF4007F820A /* BurrowConfiguration.framework */; - productType = "com.apple.product-type.framework"; - }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ @@ -534,12 +318,11 @@ isa = PBXProject; attributes = { BuildIndependentTargetsInParallel = 1; - LastSwiftUpdateCheck = 1600; - LastUpgradeCheck = 1520; + LastSwiftUpdateCheck = 1510; + LastUpgradeCheck = 1430; TargetAttributes = { - D11000082F70000100112233 = { - CreatedOnToolsVersion = 16.0; - TestTargetID = D05B9F7129E39EEC008CB1F9; + D00117372B30341C00D87C25 = { + CreatedOnToolsVersion = 15.1; }; D020F65229E4A697002790F6 = { CreatedOnToolsVersion = 14.3; @@ -547,9 +330,6 @@ D05B9F7129E39EEC008CB1F9 = { CreatedOnToolsVersion = 14.3; }; - D0D4E5302C8D996F007F820A = { - CreatedOnToolsVersion = 16.0; - }; }; }; buildConfigurationList = D05B9F6D29E39EEC008CB1F9 /* Build configuration list for PBXProject "Burrow" */; @@ -562,31 +342,25 @@ ); mainGroup = D05B9F6929E39EEC008CB1F9; packageReferences = ( - D0B1D10E2C436152004B7823 /* XCRemoteSwiftPackageReference "swift-async-algorithms" */, - D0D4E4822C8D8EF6007F820A /* XCRemoteSwiftPackageReference "grpc-swift" */, - D0D4E4852C8D8F29007F820A /* XCRemoteSwiftPackageReference "swift-protobuf" */, - D044EE8F2C8DAB2000778185 /* XCRemoteSwiftPackageReference "swift-nio" */, - D044EE942C8DAB2800778185 /* XCRemoteSwiftPackageReference "swift-nio-transport-services" */, + D08252772B5DEB6E005DA378 /* XCRemoteSwiftPackageReference "SwiftLint" */, ); productRefGroup = D05B9F7329E39EEC008CB1F9 /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( D05B9F7129E39EEC008CB1F9 /* App */, - D11000082F70000100112233 /* BurrowUITests */, D020F65229E4A697002790F6 /* NetworkExtension */, - D0D4E5502C8D9BF2007F820A /* UI */, - D0D4E5302C8D996F007F820A /* Core */, - D0D4E55A2C8D9BF4007F820A /* Configuration */, + D00117372B30341C00D87C25 /* Shared */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ - D11000092F70000100112233 /* Resources */ = { + D020F65129E4A697002790F6 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + D0BCC60A2A09A0B800AD070D /* build-rust.sh in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -594,14 +368,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - D09150422B9D2AF700BE3CB0 /* MainMenu.xib in Resources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - D0D4E5542C8D9BF2007F820A /* Resources */ = { - isa = PBXResourcesBuildPhase; - buildActionMask = 2147483647; - files = ( + D05B9F7A29E39EED008CB1F9 /* Assets.xcassets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -628,36 +395,15 @@ shellScript = "\"${PROJECT_DIR}/NetworkExtension/libburrow/build-rust.sh\"\n"; showEnvVarsInLog = 0; }; - D0F759912C8DB49E00126CF3 /* Configure Version */ = { - isa = PBXShellScriptBuildPhase; - buildActionMask = 2147483647; - files = ( - ); - inputFileListPaths = ( - ); - inputPaths = ( - "$(PROJECT_DIR)/../Tools/version.sh", - "$(PROJECT_DIR)/../.git", - ); - name = "Configure Version"; - outputFileListPaths = ( - ); - outputPaths = ( - "$(PROJECT_DIR)/Configuration/Version.xcconfig", - ); - runOnlyForDeploymentPostprocessing = 0; - shellPath = /bin/sh; - shellScript = "\"$PROJECT_DIR/../Tools/version.sh\"\n"; - showEnvVarsInLog = 0; - }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ - D110000A2F70000100112233 /* Sources */ = { + D00117342B30341C00D87C25 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - D11000012F70000100112233 /* BurrowUITests.swift in Sources */, + D001173B2B30341C00D87C25 /* Logging.swift in Sources */, + D08252762B5C9FC4005DA378 /* Constants.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -665,6 +411,10 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + D00117332B3001A400D87C25 /* NewlineProtocolFramer.swift in Sources */, + 0B28F1562ABF463A000D44B0 /* DataTypes.swift in Sources */, + D00117312B2FFFC900D87C25 /* NWConnection+Async.swift in Sources */, + 0B46E8E02AC918CA00BA2A3C /* Client.swift in Sources */, D020F65829E4A697002790F6 /* PacketTunnelProvider.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -673,109 +423,59 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + D0BCC6082A0981FE00AD070D /* Tunnel.swift in Sources */, + 43AA26D82A10004900F14CE6 /* MenuView.swift in Sources */, + D05B9F7829E39EEC008CB1F9 /* TunnelView.swift in Sources */, + D0BCC5FF2A086E1C00AD070D /* Status.swift in Sources */, D00AA8972A4669BC005C8102 /* AppDelegate.swift in Sources */, D05B9F7629E39EEC008CB1F9 /* BurrowApp.swift in Sources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - D0D4E52D2C8D996F007F820A /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - D0FA10012D10200100112233 /* burrow.pb.swift in Sources */, - D0FA10022D10200100112233 /* burrow.grpc.swift in Sources */, - D0F7598D2C8DB3DA00126CF3 /* Client.swift in Sources */, - D0D4E56B2C8D9C2F007F820A /* Logging.swift in Sources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - D0D4E5522C8D9BF2007F820A /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - D0D4E5722C8D9C6F007F820A /* Network.swift in Sources */, - D0D4E5732C8D9C6F007F820A /* WireGuard.swift in Sources */, - D0D4E5742C8D9C6F007F820A /* BurrowView.swift in Sources */, - D0D4E5752C8D9C6F007F820A /* FloatingButtonStyle.swift in Sources */, - D0D4E5762C8D9C6F007F820A /* MenuItemToggleView.swift in Sources */, - D0D4E5772C8D9C6F007F820A /* NetworkCarouselView.swift in Sources */, - D0D4E5782C8D9C6F007F820A /* NetworkExtension+Async.swift in Sources */, - D0D4E5792C8D9C6F007F820A /* NetworkExtensionTunnel.swift in Sources */, - D0D4E57A2C8D9C6F007F820A /* NetworkView.swift in Sources */, - D0D4E57C2C8D9C6F007F820A /* Tunnel.swift in Sources */, - D0D4E57D2C8D9C6F007F820A /* TunnelButton.swift in Sources */, - D0D4E57E2C8D9C6F007F820A /* TunnelStatusView.swift in Sources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; - D0D4E55C2C8D9BF4007F820A /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - D0D4E5922C8D9D15007F820A /* Constants.swift in Sources */, + D0BCC5FD2A086D4700AD070D /* NetworkExtension+Async.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin PBXTargetDependency section */ - D110000B2F70000100112233 /* PBXTargetDependency */ = { + D00117472B30373100D87C25 /* PBXTargetDependency */ = { isa = PBXTargetDependency; - target = D05B9F7129E39EEC008CB1F9 /* App */; - targetProxy = D11000022F70000100112233 /* PBXContainerItemProxy */; + target = D00117372B30341C00D87C25 /* Shared */; + targetProxy = D00117462B30373100D87C25 /* PBXContainerItemProxy */; + }; + D00117492B30373500D87C25 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = D00117372B30341C00D87C25 /* Shared */; + targetProxy = D00117482B30373500D87C25 /* PBXContainerItemProxy */; }; D020F65C29E4A697002790F6 /* PBXTargetDependency */ = { isa = PBXTargetDependency; target = D020F65229E4A697002790F6 /* NetworkExtension */; targetProxy = D020F65B29E4A697002790F6 /* PBXContainerItemProxy */; }; - D0BF09512C8E66F1000D8DEC /* PBXTargetDependency */ = { + D08252792B5DEB78005DA378 /* PBXTargetDependency */ = { isa = PBXTargetDependency; - target = D0D4E55A2C8D9BF4007F820A /* Configuration */; - targetProxy = D0BF09502C8E66F1000D8DEC /* PBXContainerItemProxy */; + productRef = D08252782B5DEB78005DA378 /* SwiftLintPlugin */; }; - D0BF09542C8E66FA000D8DEC /* PBXTargetDependency */ = { + D082527B2B5DEB7D005DA378 /* PBXTargetDependency */ = { isa = PBXTargetDependency; - target = D0D4E55A2C8D9BF4007F820A /* Configuration */; - targetProxy = D0BF09532C8E66FA000D8DEC /* PBXContainerItemProxy */; + productRef = D082527A2B5DEB7D005DA378 /* SwiftLintPlugin */; }; - D0D4E56F2C8D9C5D007F820A /* PBXTargetDependency */ = { + D082527D2B5DEB80005DA378 /* PBXTargetDependency */ = { isa = PBXTargetDependency; - target = D0D4E5302C8D996F007F820A /* Core */; - targetProxy = D0D4E56E2C8D9C5D007F820A /* PBXContainerItemProxy */; - }; - D0D4E5802C8D9C78007F820A /* PBXTargetDependency */ = { - isa = PBXTargetDependency; - target = D0D4E5302C8D996F007F820A /* Core */; - targetProxy = D0D4E57F2C8D9C78007F820A /* PBXContainerItemProxy */; - }; - D0D4E5882C8D9C88007F820A /* PBXTargetDependency */ = { - isa = PBXTargetDependency; - target = D0D4E5502C8D9BF2007F820A /* UI */; - targetProxy = D0D4E5872C8D9C88007F820A /* PBXContainerItemProxy */; - }; - D0F4FAD22C8DC7960068730A /* PBXTargetDependency */ = { - isa = PBXTargetDependency; - target = D0D4E5302C8D996F007F820A /* Core */; - targetProxy = D0F4FAD12C8DC7960068730A /* PBXContainerItemProxy */; - }; - D0F7598A2C8DB34200126CF3 /* PBXTargetDependency */ = { - isa = PBXTargetDependency; - productRef = D0F759892C8DB34200126CF3 /* GRPC */; + productRef = D082527C2B5DEB80005DA378 /* SwiftLintPlugin */; }; /* End PBXTargetDependency section */ /* Begin XCBuildConfiguration section */ - D110000C2F70000100112233 /* Debug */ = { + D001173D2B30341C00D87C25 /* Debug */ = { isa = XCBuildConfiguration; - baseConfigurationReference = D11000052F70000100112233 /* UITests.xcconfig */; + baseConfigurationReference = D00117422B30348D00D87C25 /* Shared.xcconfig */; buildSettings = { }; name = Debug; }; - D110000D2F70000100112233 /* Release */ = { + D001173E2B30341C00D87C25 /* Release */ = { isa = XCBuildConfiguration; - baseConfigurationReference = D11000052F70000100112233 /* UITests.xcconfig */; + baseConfigurationReference = D00117422B30348D00D87C25 /* Shared.xcconfig */; buildSettings = { }; name = Release; @@ -822,56 +522,14 @@ }; name = Release; }; - D0D4E53D2C8D996F007F820A /* Debug */ = { - isa = XCBuildConfiguration; - baseConfigurationReference = D0D4E4F72C8D941D007F820A /* Framework.xcconfig */; - buildSettings = { - }; - name = Debug; - }; - D0D4E53E2C8D996F007F820A /* Release */ = { - isa = XCBuildConfiguration; - baseConfigurationReference = D0D4E4F72C8D941D007F820A /* Framework.xcconfig */; - buildSettings = { - }; - name = Release; - }; - D0D4E5562C8D9BF2007F820A /* Debug */ = { - isa = XCBuildConfiguration; - baseConfigurationReference = D0BF09582C8E6789000D8DEC /* UI.xcconfig */; - buildSettings = { - }; - name = Debug; - }; - D0D4E5572C8D9BF2007F820A /* Release */ = { - isa = XCBuildConfiguration; - baseConfigurationReference = D0BF09582C8E6789000D8DEC /* UI.xcconfig */; - buildSettings = { - }; - name = Release; - }; - D0D4E5602C8D9BF4007F820A /* Debug */ = { - isa = XCBuildConfiguration; - baseConfigurationReference = D00117422B30348D00D87C25 /* Configuration.xcconfig */; - buildSettings = { - }; - name = Debug; - }; - D0D4E5612C8D9BF4007F820A /* Release */ = { - isa = XCBuildConfiguration; - baseConfigurationReference = D00117422B30348D00D87C25 /* Configuration.xcconfig */; - buildSettings = { - }; - name = Release; - }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - D110000E2F70000100112233 /* Build configuration list for PBXNativeTarget "BurrowUITests" */ = { + D001173C2B30341C00D87C25 /* Build configuration list for PBXNativeTarget "Shared" */ = { isa = XCConfigurationList; buildConfigurations = ( - D110000C2F70000100112233 /* Debug */, - D110000D2F70000100112233 /* Release */, + D001173D2B30341C00D87C25 /* Debug */, + D001173E2B30341C00D87C25 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; @@ -903,118 +561,34 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; - D0D4E53C2C8D996F007F820A /* Build configuration list for PBXNativeTarget "Core" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - D0D4E53D2C8D996F007F820A /* Debug */, - D0D4E53E2C8D996F007F820A /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; - D0D4E5552C8D9BF2007F820A /* Build configuration list for PBXNativeTarget "UI" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - D0D4E5562C8D9BF2007F820A /* Debug */, - D0D4E5572C8D9BF2007F820A /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; - D0D4E55F2C8D9BF4007F820A /* Build configuration list for PBXNativeTarget "Configuration" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - D0D4E5602C8D9BF4007F820A /* Debug */, - D0D4E5612C8D9BF4007F820A /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; /* End XCConfigurationList section */ /* Begin XCRemoteSwiftPackageReference section */ - D044EE8F2C8DAB2000778185 /* XCRemoteSwiftPackageReference "swift-nio" */ = { + D08252772B5DEB6E005DA378 /* XCRemoteSwiftPackageReference "SwiftLint" */ = { isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/apple/swift-nio.git"; + repositoryURL = "https://github.com/realm/SwiftLint.git"; requirement = { kind = upToNextMajorVersion; - minimumVersion = 2.72.0; - }; - }; - D044EE942C8DAB2800778185 /* XCRemoteSwiftPackageReference "swift-nio-transport-services" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/apple/swift-nio-transport-services.git"; - requirement = { - kind = upToNextMajorVersion; - minimumVersion = 1.21.0; - }; - }; - D0B1D10E2C436152004B7823 /* XCRemoteSwiftPackageReference "swift-async-algorithms" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/apple/swift-async-algorithms.git"; - requirement = { - kind = upToNextMajorVersion; - minimumVersion = 1.0.1; - }; - }; - D0D4E4822C8D8EF6007F820A /* XCRemoteSwiftPackageReference "grpc-swift" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/grpc/grpc-swift.git"; - requirement = { - kind = upToNextMajorVersion; - minimumVersion = 1.23.0; - }; - }; - D0D4E4852C8D8F29007F820A /* XCRemoteSwiftPackageReference "swift-protobuf" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/apple/swift-protobuf.git"; - requirement = { - kind = upToNextMajorVersion; - minimumVersion = 1.28.1; + minimumVersion = 0.54.0; }; }; /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ - D044EE902C8DAB2000778185 /* NIO */ = { + D08252782B5DEB78005DA378 /* SwiftLintPlugin */ = { isa = XCSwiftPackageProductDependency; - package = D044EE8F2C8DAB2000778185 /* XCRemoteSwiftPackageReference "swift-nio" */; - productName = NIO; + package = D08252772B5DEB6E005DA378 /* XCRemoteSwiftPackageReference "SwiftLint" */; + productName = "plugin:SwiftLintPlugin"; }; - D044EE922C8DAB2000778185 /* NIOConcurrencyHelpers */ = { + D082527A2B5DEB7D005DA378 /* SwiftLintPlugin */ = { isa = XCSwiftPackageProductDependency; - package = D044EE8F2C8DAB2000778185 /* XCRemoteSwiftPackageReference "swift-nio" */; - productName = NIOConcurrencyHelpers; + package = D08252772B5DEB6E005DA378 /* XCRemoteSwiftPackageReference "SwiftLint" */; + productName = "plugin:SwiftLintPlugin"; }; - D044EE952C8DAB2800778185 /* NIOTransportServices */ = { + D082527C2B5DEB80005DA378 /* SwiftLintPlugin */ = { isa = XCSwiftPackageProductDependency; - package = D044EE942C8DAB2800778185 /* XCRemoteSwiftPackageReference "swift-nio-transport-services" */; - productName = NIOTransportServices; - }; - D078F7E02C8DA375008A8CEC /* GRPC */ = { - isa = XCSwiftPackageProductDependency; - package = D0D4E4822C8D8EF6007F820A /* XCRemoteSwiftPackageReference "grpc-swift" */; - productName = GRPC; - }; - D078F7E22C8DA375008A8CEC /* SwiftProtobuf */ = { - isa = XCSwiftPackageProductDependency; - package = D0D4E4852C8D8F29007F820A /* XCRemoteSwiftPackageReference "swift-protobuf" */; - productName = SwiftProtobuf; - }; - D0B1D10F2C436152004B7823 /* AsyncAlgorithms */ = { - isa = XCSwiftPackageProductDependency; - package = D0B1D10E2C436152004B7823 /* XCRemoteSwiftPackageReference "swift-async-algorithms" */; - productName = AsyncAlgorithms; - }; - D0F7597D2C8DB30500126CF3 /* CGRPCZlib */ = { - isa = XCSwiftPackageProductDependency; - package = D0D4E4822C8D8EF6007F820A /* XCRemoteSwiftPackageReference "grpc-swift" */; - productName = CGRPCZlib; - }; - D0F759892C8DB34200126CF3 /* GRPC */ = { - isa = XCSwiftPackageProductDependency; - package = D0D4E4822C8D8EF6007F820A /* XCRemoteSwiftPackageReference "grpc-swift" */; - productName = GRPC; + package = D08252772B5DEB6E005DA378 /* XCRemoteSwiftPackageReference "SwiftLint" */; + productName = "plugin:SwiftLintPlugin"; }; /* End XCSwiftPackageProductDependency section */ }; diff --git a/Apple/Burrow.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Apple/Burrow.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index 739b77c..7522840 100644 --- a/Apple/Burrow.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/Apple/Burrow.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -1,123 +1,86 @@ { - "originHash" : "fa512b990383b7e309c5854a5279817052294a8191a6d3c55c49cfb38e88c0c3", "pins" : [ { - "identity" : "grpc-swift", + "identity" : "collectionconcurrencykit", "kind" : "remoteSourceControl", - "location" : "https://github.com/grpc/grpc-swift.git", + "location" : "https://github.com/JohnSundell/CollectionConcurrencyKit.git", "state" : { - "revision" : "6a90b7e77e29f9bda6c2b3a4165a40d6c02cfda1", - "version" : "1.23.0" + "revision" : "b4f23e24b5a1bff301efc5e70871083ca029ff95", + "version" : "0.2.0" } }, { - "identity" : "swift-async-algorithms", + "identity" : "cryptoswift", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-async-algorithms.git", + "location" : "https://github.com/krzyzanowskim/CryptoSwift.git", "state" : { - "revision" : "6ae9a051f76b81cc668305ceed5b0e0a7fd93d20", - "version" : "1.0.1" + "revision" : "7892a123f7e8d0fe62f9f03728b17bbd4f94df5c", + "version" : "1.8.1" } }, { - "identity" : "swift-atomics", + "identity" : "sourcekitten", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-atomics.git", + "location" : "https://github.com/jpsim/SourceKitten.git", "state" : { - "revision" : "cd142fd2f64be2100422d658e7411e39489da985", - "version" : "1.2.0" + "revision" : "b6dc09ee51dfb0c66e042d2328c017483a1a5d56", + "version" : "0.34.1" } }, { - "identity" : "swift-collections", + "identity" : "swift-argument-parser", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-collections.git", + "location" : "https://github.com/apple/swift-argument-parser.git", "state" : { - "revision" : "9bf03ff58ce34478e66aaee630e491823326fd06", - "version" : "1.1.3" + "revision" : "fee6933f37fde9a5e12a1e4aeaa93fe60116ff2a", + "version" : "1.2.2" } }, { - "identity" : "swift-http-types", + "identity" : "swift-syntax", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-http-types", + "location" : "https://github.com/apple/swift-syntax.git", "state" : { - "revision" : "ae67c8178eb46944fd85e4dc6dd970e1f3ed6ccd", - "version" : "1.3.0" + "revision" : "6ad4ea24b01559dde0773e3d091f1b9e36175036", + "version" : "509.0.2" } }, { - "identity" : "swift-log", + "identity" : "swiftlint", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-log.git", + "location" : "https://github.com/realm/SwiftLint.git", "state" : { - "revision" : "9cb486020ebf03bfa5b5df985387a14a98744537", - "version" : "1.6.1" + "revision" : "f17a4f9dfb6a6afb0408426354e4180daaf49cee", + "version" : "0.54.0" } }, { - "identity" : "swift-nio", + "identity" : "swiftytexttable", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-nio.git", + "location" : "https://github.com/scottrhoyt/SwiftyTextTable.git", "state" : { - "revision" : "9746cf80e29edfef2a39924a66731249223f42a3", - "version" : "2.72.0" + "revision" : "c6df6cf533d120716bff38f8ff9885e1ce2a4ac3", + "version" : "0.9.0" } }, { - "identity" : "swift-nio-extras", + "identity" : "swxmlhash", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-nio-extras.git", + "location" : "https://github.com/drmohundro/SWXMLHash.git", "state" : { - "revision" : "d1ead62745cc3269e482f1c51f27608057174379", - "version" : "1.24.0" + "revision" : "4d0f62f561458cbe1f732171e625f03195151b60", + "version" : "7.0.1" } }, { - "identity" : "swift-nio-http2", + "identity" : "yams", "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-nio-http2.git", + "location" : "https://github.com/jpsim/Yams.git", "state" : { - "revision" : "b5f7062b60e4add1e8c343ba4eb8da2e324b3a94", - "version" : "1.34.0" - } - }, - { - "identity" : "swift-nio-ssl", - "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-nio-ssl.git", - "state" : { - "revision" : "7b84abbdcef69cc3be6573ac12440220789dcd69", - "version" : "2.27.2" - } - }, - { - "identity" : "swift-nio-transport-services", - "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-nio-transport-services.git", - "state" : { - "revision" : "38ac8221dd20674682148d6451367f89c2652980", - "version" : "1.21.0" - } - }, - { - "identity" : "swift-protobuf", - "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-protobuf.git", - "state" : { - "revision" : "edb6ed4919f7756157fe02f2552b7e3850a538e5", - "version" : "1.28.1" - } - }, - { - "identity" : "swift-system", - "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-system.git", - "state" : { - "revision" : "d2ba781702a1d8285419c15ee62fd734a9437ff5", - "version" : "1.3.2" + "revision" : "0d9ee7ea8c4ebd4a489ad7a73d5c6cad55d6fed3", + "version" : "5.0.6" } } ], - "version" : 3 + "version" : 2 } diff --git a/Apple/Burrow.xcodeproj/xcshareddata/xcschemes/App.xcscheme b/Apple/Burrow.xcodeproj/xcshareddata/xcschemes/App.xcscheme index f580ea7..c63f8e6 100644 --- a/Apple/Burrow.xcodeproj/xcshareddata/xcschemes/App.xcscheme +++ b/Apple/Burrow.xcodeproj/xcshareddata/xcschemes/App.xcscheme @@ -1,11 +1,10 @@ + buildImplicitDependencies = "YES"> - - - - - - + shouldAutocreateTestPlan = "YES"> + buildImplicitDependencies = "YES"> = { - switch FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: appGroupIdentifier) { - case .some(let url): .success(url) - case .none: - fallbackContainerURL().mapError { _ in .invalidAppGroupIdentifier } - } - }() - - private static func fallbackContainerURL() -> Result { -#if targetEnvironment(simulator) - Result { - // The simulator app's Application Support path lives inside its sandbox container, - // so the host daemon cannot reach it. Use a shared host temp location instead. - let url = URL(filePath: "/tmp", directoryHint: .isDirectory) - .appending(component: bundleIdentifier, directoryHint: .isDirectory) - .appending(component: "SimulatorFallback", directoryHint: .isDirectory) - try FileManager.default.createDirectory(at: url, withIntermediateDirectories: true) - return url - } -#else - .failure(Error.invalidAppGroupIdentifier) -#endif - } -} - -extension Logger { - @_dynamicReplacement(for: subsystem) - public static var subsystem: String { Constants.bundleIdentifier } -} diff --git a/Apple/Configuration/Debug.xcconfig b/Apple/Configuration/Debug.xcconfig deleted file mode 100644 index 9529dbd..0000000 --- a/Apple/Configuration/Debug.xcconfig +++ /dev/null @@ -1,26 +0,0 @@ -// Release -DEBUG_INFORMATION_FORMAT = dwarf-with-dsym -SWIFT_COMPILATION_MODE = wholemodule -SWIFT_OPTIMIZATION_LEVEL = -Osize -LLVM_LTO = YES -DEAD_CODE_STRIPPING = YES -STRIP_INSTALLED_PRODUCT = YES -STRIP_SWIFT_SYMBOLS = YES -COPY_PHASE_STRIP = NO -VALIDATE_PRODUCT = YES -ENABLE_MODULE_VERIFIER = YES - -// Debug -ONLY_ACTIVE_ARCH[config=Debug] = YES -DEBUG_INFORMATION_FORMAT[config=Debug] = dwarf -ENABLE_TESTABILITY[config=Debug] = YES -GCC_PREPROCESSOR_DEFINITIONS[config=Debug] = DEBUG=1 $(inherited) -SWIFT_OPTIMIZATION_LEVEL[config=Debug] = -Onone -SWIFT_ACTIVE_COMPILATION_CONDITIONS[config=Debug] = DEBUG -SWIFT_COMPILATION_MODE[config=Debug] = singlefile -LLVM_LTO[config=Debug] = NO -DEAD_CODE_STRIPPING[config=Debug] = NO -VALIDATE_PRODUCT[config=Debug] = NO -STRIP_INSTALLED_PRODUCT[config=Debug] = NO -STRIP_SWIFT_SYMBOLS[config=Debug] = NO -ENABLE_MODULE_VERIFIER[config=Debug] = NO diff --git a/Apple/Configuration/Extension.xcconfig b/Apple/Configuration/Extension.xcconfig index 5885c31..f8d90a3 100644 --- a/Apple/Configuration/Extension.xcconfig +++ b/Apple/Configuration/Extension.xcconfig @@ -1,6 +1,4 @@ -LD_EXPORT_SYMBOLS = NO - -OTHER_SWIFT_FLAGS = $(inherited) -Xfrontend -disable-autolink-framework -Xfrontend UIKit -Xfrontend -disable-autolink-framework -Xfrontend AppKit -Xfrontend -disable-autolink-framework -Xfrontend SwiftUI +MERGED_BINARY_TYPE = manual LD_RUNPATH_SEARCH_PATHS = $(inherited) @executable_path/Frameworks @executable_path/../../Frameworks -LD_RUNPATH_SEARCH_PATHS[sdk=macosx*] = $(inherited) @executable_path/../Frameworks @executable_path/../../../../Frameworks +LD_RUNPATH_SEARCH_PATHS[sdk=macos*] = $(inherited) @executable_path/../Frameworks @executable_path/../../../../Frameworks diff --git a/Apple/Configuration/Framework.xcconfig b/Apple/Configuration/Framework.xcconfig deleted file mode 100644 index 6fa4f19..0000000 --- a/Apple/Configuration/Framework.xcconfig +++ /dev/null @@ -1,14 +0,0 @@ -PRODUCT_NAME = Burrow$(TARGET_NAME:c99extidentifier) -PRODUCT_BUNDLE_IDENTIFIER = $(APP_BUNDLE_IDENTIFIER).$(TARGET_NAME:c99extidentifier) -APPLICATION_EXTENSION_API_ONLY = YES -SWIFT_INSTALL_OBJC_HEADER = NO -SWIFT_SKIP_AUTOLINKING_FRAMEWORKS = YES -SWIFT_SKIP_AUTOLINKING_LIBRARIES = YES - -LD_RUNPATH_SEARCH_PATHS = $(inherited) @executable_path/Frameworks @loader_path/Frameworks -LD_RUNPATH_SEARCH_PATHS[sdk=macosx*] = $(inherited) @executable_path/../Frameworks @loader_path/Frameworks - -DYLIB_INSTALL_NAME_BASE = @rpath -DYLIB_COMPATIBILITY_VERSION = 1 -DYLIB_CURRENT_VERSION = 1 -VERSIONING_SYSTEM = diff --git a/Apple/Configuration/UITests.xcconfig b/Apple/Configuration/UITests.xcconfig deleted file mode 100644 index a97e290..0000000 --- a/Apple/Configuration/UITests.xcconfig +++ /dev/null @@ -1,14 +0,0 @@ -#include "Compiler.xcconfig" - -SUPPORTED_PLATFORMS = iphonesimulator iphoneos -TARGETED_DEVICE_FAMILY[sdk=iphone*] = 1,2 - -PRODUCT_NAME = $(TARGET_NAME) -PRODUCT_BUNDLE_IDENTIFIER = $(APP_BUNDLE_IDENTIFIER).uitests - -STRING_CATALOG_GENERATE_SYMBOLS = NO -SWIFT_EMIT_LOC_STRINGS = NO - -ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES -LD_RUNPATH_SEARCH_PATHS = $(inherited) @executable_path/Frameworks @loader_path/Frameworks -TEST_TARGET_NAME = App diff --git a/Apple/Configuration/Version.xcconfig b/Apple/Configuration/Version.xcconfig deleted file mode 100644 index e69de29..0000000 diff --git a/Apple/Core/Client.swift b/Apple/Core/Client.swift deleted file mode 100644 index 7d4cfc7..0000000 --- a/Apple/Core/Client.swift +++ /dev/null @@ -1,508 +0,0 @@ -import Foundation -import GRPC -import NIOTransportServices -import SwiftProtobuf - -public typealias TunnelClient = Burrow_TunnelAsyncClient -public typealias NetworksClient = Burrow_NetworksAsyncClient - -public protocol Client { - init(channel: GRPCChannel) -} - -extension Client { - public static func unix(socketURL: URL) -> Self { - let group = NIOTSEventLoopGroup() - let configuration = ClientConnection.Configuration.default( - target: .unixDomainSocket(socketURL.path), - eventLoopGroup: group - ) - return Self(channel: ClientConnection(configuration: configuration)) - } -} - -extension TunnelClient: Client { - public init(channel: any GRPCChannel) { - self.init(channel: channel, defaultCallOptions: .init(), interceptors: .none) - } -} - -extension NetworksClient: Client { - public init(channel: any GRPCChannel) { - self.init(channel: channel, defaultCallOptions: .init(), interceptors: .none) - } -} - -public struct Burrow_TailnetDiscoverRequest: Sendable { - public var email: String = "" - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TailnetDiscoverResponse: Sendable { - public var domain: String = "" - public var authority: String = "" - public var oidcIssuer: String = "" - public var managed: Bool = false - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TailnetProbeRequest: Sendable { - public var authority: String = "" - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TailnetProbeResponse: Sendable { - public var authority: String = "" - public var statusCode: Int32 = 0 - public var summary: String = "" - public var detail: String = "" - public var reachable: Bool = false - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TailnetLoginStartRequest: Sendable { - public var accountName: String = "" - public var identityName: String = "" - public var hostname: String = "" - public var authority: String = "" - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TailnetLoginStatusRequest: Sendable { - public var sessionID: String = "" - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TailnetLoginCancelRequest: Sendable { - public var sessionID: String = "" - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TailnetLoginStatusResponse: Sendable { - public var sessionID: String = "" - public var backendState: String = "" - public var authURL: String = "" - public var running: Bool = false - public var needsLogin: Bool = false - public var tailnetName: String = "" - public var magicDNSSuffix: String = "" - public var selfDNSName: String = "" - public var tailnetIPs: [String] = [] - public var health: [String] = [] - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TunnelPacket: Sendable { - public var payload = Data() - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -extension Burrow_TailnetDiscoverRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetDiscoverRequest" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "email") - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.email) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.email.isEmpty { - try visitor.visitSingularStringField(value: self.email, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TailnetDiscoverResponse: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetDiscoverResponse" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "domain"), - 2: .same(proto: "authority"), - 3: .same(proto: "oidc_issuer"), - 4: .same(proto: "managed"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.domain) - case 2: try decoder.decodeSingularStringField(value: &self.authority) - case 3: try decoder.decodeSingularStringField(value: &self.oidcIssuer) - case 4: try decoder.decodeSingularBoolField(value: &self.managed) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.domain.isEmpty { - try visitor.visitSingularStringField(value: self.domain, fieldNumber: 1) - } - if !self.authority.isEmpty { - try visitor.visitSingularStringField(value: self.authority, fieldNumber: 2) - } - if !self.oidcIssuer.isEmpty { - try visitor.visitSingularStringField(value: self.oidcIssuer, fieldNumber: 3) - } - if self.managed { - try visitor.visitSingularBoolField(value: self.managed, fieldNumber: 4) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TailnetProbeRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetProbeRequest" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "authority") - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.authority) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.authority.isEmpty { - try visitor.visitSingularStringField(value: self.authority, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TailnetProbeResponse: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetProbeResponse" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "authority"), - 2: .same(proto: "status_code"), - 3: .same(proto: "summary"), - 4: .same(proto: "detail"), - 5: .same(proto: "reachable"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.authority) - case 2: try decoder.decodeSingularInt32Field(value: &self.statusCode) - case 3: try decoder.decodeSingularStringField(value: &self.summary) - case 4: try decoder.decodeSingularStringField(value: &self.detail) - case 5: try decoder.decodeSingularBoolField(value: &self.reachable) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.authority.isEmpty { - try visitor.visitSingularStringField(value: self.authority, fieldNumber: 1) - } - if self.statusCode != 0 { - try visitor.visitSingularInt32Field(value: self.statusCode, fieldNumber: 2) - } - if !self.summary.isEmpty { - try visitor.visitSingularStringField(value: self.summary, fieldNumber: 3) - } - if !self.detail.isEmpty { - try visitor.visitSingularStringField(value: self.detail, fieldNumber: 4) - } - if self.reachable { - try visitor.visitSingularBoolField(value: self.reachable, fieldNumber: 5) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TailnetLoginStartRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetLoginStartRequest" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "account_name"), - 2: .standard(proto: "identity_name"), - 3: .same(proto: "hostname"), - 4: .same(proto: "authority"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.accountName) - case 2: try decoder.decodeSingularStringField(value: &self.identityName) - case 3: try decoder.decodeSingularStringField(value: &self.hostname) - case 4: try decoder.decodeSingularStringField(value: &self.authority) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.accountName.isEmpty { - try visitor.visitSingularStringField(value: self.accountName, fieldNumber: 1) - } - if !self.identityName.isEmpty { - try visitor.visitSingularStringField(value: self.identityName, fieldNumber: 2) - } - if !self.hostname.isEmpty { - try visitor.visitSingularStringField(value: self.hostname, fieldNumber: 3) - } - if !self.authority.isEmpty { - try visitor.visitSingularStringField(value: self.authority, fieldNumber: 4) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TailnetLoginStatusRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetLoginStatusRequest" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "session_id") - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.sessionID) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.sessionID.isEmpty { - try visitor.visitSingularStringField(value: self.sessionID, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TailnetLoginCancelRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetLoginCancelRequest" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "session_id") - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.sessionID) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.sessionID.isEmpty { - try visitor.visitSingularStringField(value: self.sessionID, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TailnetLoginStatusResponse: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TailnetLoginStatusResponse" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "session_id"), - 2: .standard(proto: "backend_state"), - 3: .standard(proto: "auth_url"), - 4: .same(proto: "running"), - 5: .standard(proto: "needs_login"), - 6: .standard(proto: "tailnet_name"), - 7: .standard(proto: "magic_dns_suffix"), - 8: .standard(proto: "self_dns_name"), - 9: .standard(proto: "tailnet_ips"), - 10: .same(proto: "health"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularStringField(value: &self.sessionID) - case 2: try decoder.decodeSingularStringField(value: &self.backendState) - case 3: try decoder.decodeSingularStringField(value: &self.authURL) - case 4: try decoder.decodeSingularBoolField(value: &self.running) - case 5: try decoder.decodeSingularBoolField(value: &self.needsLogin) - case 6: try decoder.decodeSingularStringField(value: &self.tailnetName) - case 7: try decoder.decodeSingularStringField(value: &self.magicDNSSuffix) - case 8: try decoder.decodeSingularStringField(value: &self.selfDNSName) - case 9: try decoder.decodeRepeatedStringField(value: &self.tailnetIPs) - case 10: try decoder.decodeRepeatedStringField(value: &self.health) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.sessionID.isEmpty { - try visitor.visitSingularStringField(value: self.sessionID, fieldNumber: 1) - } - if !self.backendState.isEmpty { - try visitor.visitSingularStringField(value: self.backendState, fieldNumber: 2) - } - if !self.authURL.isEmpty { - try visitor.visitSingularStringField(value: self.authURL, fieldNumber: 3) - } - if self.running { - try visitor.visitSingularBoolField(value: self.running, fieldNumber: 4) - } - if self.needsLogin { - try visitor.visitSingularBoolField(value: self.needsLogin, fieldNumber: 5) - } - if !self.tailnetName.isEmpty { - try visitor.visitSingularStringField(value: self.tailnetName, fieldNumber: 6) - } - if !self.magicDNSSuffix.isEmpty { - try visitor.visitSingularStringField(value: self.magicDNSSuffix, fieldNumber: 7) - } - if !self.selfDNSName.isEmpty { - try visitor.visitSingularStringField(value: self.selfDNSName, fieldNumber: 8) - } - if !self.tailnetIPs.isEmpty { - try visitor.visitRepeatedStringField(value: self.tailnetIPs, fieldNumber: 9) - } - if !self.health.isEmpty { - try visitor.visitRepeatedStringField(value: self.health, fieldNumber: 10) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -extension Burrow_TunnelPacket: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = "burrow.TunnelPacket" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "payload") - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - switch fieldNumber { - case 1: try decoder.decodeSingularBytesField(value: &self.payload) - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.payload.isEmpty { - try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } -} - -public struct TailnetClient: Client, GRPCClient { - public let channel: GRPCChannel - public var defaultCallOptions: CallOptions - - public init(channel: any GRPCChannel) { - self.channel = channel - self.defaultCallOptions = .init() - } - - public func discover( - _ request: Burrow_TailnetDiscoverRequest, - callOptions: CallOptions? = nil - ) async throws -> Burrow_TailnetDiscoverResponse { - try await self.performAsyncUnaryCall( - path: "/burrow.TailnetControl/Discover", - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: [] - ) - } - - public func probe( - _ request: Burrow_TailnetProbeRequest, - callOptions: CallOptions? = nil - ) async throws -> Burrow_TailnetProbeResponse { - try await self.performAsyncUnaryCall( - path: "/burrow.TailnetControl/Probe", - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: [] - ) - } - - public func loginStart( - _ request: Burrow_TailnetLoginStartRequest, - callOptions: CallOptions? = nil - ) async throws -> Burrow_TailnetLoginStatusResponse { - try await self.performAsyncUnaryCall( - path: "/burrow.TailnetControl/LoginStart", - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: [] - ) - } - - public func loginStatus( - _ request: Burrow_TailnetLoginStatusRequest, - callOptions: CallOptions? = nil - ) async throws -> Burrow_TailnetLoginStatusResponse { - try await self.performAsyncUnaryCall( - path: "/burrow.TailnetControl/LoginStatus", - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: [] - ) - } - - public func loginCancel( - _ request: Burrow_TailnetLoginCancelRequest, - callOptions: CallOptions? = nil - ) async throws -> Burrow_Empty { - try await self.performAsyncUnaryCall( - path: "/burrow.TailnetControl/LoginCancel", - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: [] - ) - } -} - -public struct TunnelPacketClient: Client, GRPCClient { - public let channel: GRPCChannel - public var defaultCallOptions: CallOptions - - public init(channel: any GRPCChannel) { - self.channel = channel - self.defaultCallOptions = .init() - } - - public func makeTunnelPacketsCall( - callOptions: CallOptions? = nil - ) -> GRPCAsyncBidirectionalStreamingCall { - self.makeAsyncBidirectionalStreamingCall( - path: "/burrow.Tunnel/TunnelPackets", - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: [] - ) - } -} diff --git a/Apple/Core/Client/Generated/burrow.grpc.swift b/Apple/Core/Client/Generated/burrow.grpc.swift deleted file mode 100644 index d1f848c..0000000 --- a/Apple/Core/Client/Generated/burrow.grpc.swift +++ /dev/null @@ -1,761 +0,0 @@ -// -// DO NOT EDIT. -// swift-format-ignore-file -// -// Generated by the protocol buffer compiler. -// Source: burrow.proto -// -import GRPC -import NIO -import NIOConcurrencyHelpers -import SwiftProtobuf - - -/// Usage: instantiate `Burrow_TunnelClient`, then call methods of this protocol to make API calls. -public protocol Burrow_TunnelClientProtocol: GRPCClient { - var serviceName: String { get } - var interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? { get } - - func tunnelConfiguration( - _ request: Burrow_Empty, - callOptions: CallOptions?, - handler: @escaping (Burrow_TunnelConfigurationResponse) -> Void - ) -> ServerStreamingCall - - func tunnelStart( - _ request: Burrow_Empty, - callOptions: CallOptions? - ) -> UnaryCall - - func tunnelStop( - _ request: Burrow_Empty, - callOptions: CallOptions? - ) -> UnaryCall - - func tunnelStatus( - _ request: Burrow_Empty, - callOptions: CallOptions?, - handler: @escaping (Burrow_TunnelStatusResponse) -> Void - ) -> ServerStreamingCall -} - -extension Burrow_TunnelClientProtocol { - public var serviceName: String { - return "burrow.Tunnel" - } - - /// Server streaming call to TunnelConfiguration - /// - /// - Parameters: - /// - request: Request to send to TunnelConfiguration. - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. - public func tunnelConfiguration( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil, - handler: @escaping (Burrow_TunnelConfigurationResponse) -> Void - ) -> ServerStreamingCall { - return self.makeServerStreamingCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelConfiguration.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelConfigurationInterceptors() ?? [], - handler: handler - ) - } - - /// Unary call to TunnelStart - /// - /// - Parameters: - /// - request: Request to send to TunnelStart. - /// - callOptions: Call options. - /// - Returns: A `UnaryCall` with futures for the metadata, status and response. - public func tunnelStart( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> UnaryCall { - return self.makeUnaryCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStart.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStartInterceptors() ?? [] - ) - } - - /// Unary call to TunnelStop - /// - /// - Parameters: - /// - request: Request to send to TunnelStop. - /// - callOptions: Call options. - /// - Returns: A `UnaryCall` with futures for the metadata, status and response. - public func tunnelStop( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> UnaryCall { - return self.makeUnaryCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStop.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStopInterceptors() ?? [] - ) - } - - /// Server streaming call to TunnelStatus - /// - /// - Parameters: - /// - request: Request to send to TunnelStatus. - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. - public func tunnelStatus( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil, - handler: @escaping (Burrow_TunnelStatusResponse) -> Void - ) -> ServerStreamingCall { - return self.makeServerStreamingCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStatus.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStatusInterceptors() ?? [], - handler: handler - ) - } -} - -@available(*, deprecated) -extension Burrow_TunnelClient: @unchecked Sendable {} - -@available(*, deprecated, renamed: "Burrow_TunnelNIOClient") -public final class Burrow_TunnelClient: Burrow_TunnelClientProtocol { - private let lock = Lock() - private var _defaultCallOptions: CallOptions - private var _interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? - public let channel: GRPCChannel - public var defaultCallOptions: CallOptions { - get { self.lock.withLock { return self._defaultCallOptions } } - set { self.lock.withLockVoid { self._defaultCallOptions = newValue } } - } - public var interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? { - get { self.lock.withLock { return self._interceptors } } - set { self.lock.withLockVoid { self._interceptors = newValue } } - } - - /// Creates a client for the burrow.Tunnel service. - /// - /// - Parameters: - /// - channel: `GRPCChannel` to the service host. - /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. - /// - interceptors: A factory providing interceptors for each RPC. - public init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self._defaultCallOptions = defaultCallOptions - self._interceptors = interceptors - } -} - -public struct Burrow_TunnelNIOClient: Burrow_TunnelClientProtocol { - public var channel: GRPCChannel - public var defaultCallOptions: CallOptions - public var interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? - - /// Creates a client for the burrow.Tunnel service. - /// - /// - Parameters: - /// - channel: `GRPCChannel` to the service host. - /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. - /// - interceptors: A factory providing interceptors for each RPC. - public init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self.defaultCallOptions = defaultCallOptions - self.interceptors = interceptors - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -public protocol Burrow_TunnelAsyncClientProtocol: GRPCClient { - static var serviceDescriptor: GRPCServiceDescriptor { get } - var interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? { get } - - func makeTunnelConfigurationCall( - _ request: Burrow_Empty, - callOptions: CallOptions? - ) -> GRPCAsyncServerStreamingCall - - func makeTunnelStartCall( - _ request: Burrow_Empty, - callOptions: CallOptions? - ) -> GRPCAsyncUnaryCall - - func makeTunnelStopCall( - _ request: Burrow_Empty, - callOptions: CallOptions? - ) -> GRPCAsyncUnaryCall - - func makeTunnelStatusCall( - _ request: Burrow_Empty, - callOptions: CallOptions? - ) -> GRPCAsyncServerStreamingCall -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -extension Burrow_TunnelAsyncClientProtocol { - public static var serviceDescriptor: GRPCServiceDescriptor { - return Burrow_TunnelClientMetadata.serviceDescriptor - } - - public var interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? { - return nil - } - - public func makeTunnelConfigurationCall( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncServerStreamingCall { - return self.makeAsyncServerStreamingCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelConfiguration.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelConfigurationInterceptors() ?? [] - ) - } - - public func makeTunnelStartCall( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncUnaryCall { - return self.makeAsyncUnaryCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStart.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStartInterceptors() ?? [] - ) - } - - public func makeTunnelStopCall( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncUnaryCall { - return self.makeAsyncUnaryCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStop.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStopInterceptors() ?? [] - ) - } - - public func makeTunnelStatusCall( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncServerStreamingCall { - return self.makeAsyncServerStreamingCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStatus.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStatusInterceptors() ?? [] - ) - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -extension Burrow_TunnelAsyncClientProtocol { - public func tunnelConfiguration( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream { - return self.performAsyncServerStreamingCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelConfiguration.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelConfigurationInterceptors() ?? [] - ) - } - - public func tunnelStart( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) async throws -> Burrow_Empty { - return try await self.performAsyncUnaryCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStart.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStartInterceptors() ?? [] - ) - } - - public func tunnelStop( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) async throws -> Burrow_Empty { - return try await self.performAsyncUnaryCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStop.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStopInterceptors() ?? [] - ) - } - - public func tunnelStatus( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream { - return self.performAsyncServerStreamingCall( - path: Burrow_TunnelClientMetadata.Methods.tunnelStatus.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeTunnelStatusInterceptors() ?? [] - ) - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -public struct Burrow_TunnelAsyncClient: Burrow_TunnelAsyncClientProtocol { - public var channel: GRPCChannel - public var defaultCallOptions: CallOptions - public var interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? - - public init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Burrow_TunnelClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self.defaultCallOptions = defaultCallOptions - self.interceptors = interceptors - } -} - -public protocol Burrow_TunnelClientInterceptorFactoryProtocol: Sendable { - - /// - Returns: Interceptors to use when invoking 'tunnelConfiguration'. - func makeTunnelConfigurationInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'tunnelStart'. - func makeTunnelStartInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'tunnelStop'. - func makeTunnelStopInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'tunnelStatus'. - func makeTunnelStatusInterceptors() -> [ClientInterceptor] -} - -public enum Burrow_TunnelClientMetadata { - public static let serviceDescriptor = GRPCServiceDescriptor( - name: "Tunnel", - fullName: "burrow.Tunnel", - methods: [ - Burrow_TunnelClientMetadata.Methods.tunnelConfiguration, - Burrow_TunnelClientMetadata.Methods.tunnelStart, - Burrow_TunnelClientMetadata.Methods.tunnelStop, - Burrow_TunnelClientMetadata.Methods.tunnelStatus, - ] - ) - - public enum Methods { - public static let tunnelConfiguration = GRPCMethodDescriptor( - name: "TunnelConfiguration", - path: "/burrow.Tunnel/TunnelConfiguration", - type: GRPCCallType.serverStreaming - ) - - public static let tunnelStart = GRPCMethodDescriptor( - name: "TunnelStart", - path: "/burrow.Tunnel/TunnelStart", - type: GRPCCallType.unary - ) - - public static let tunnelStop = GRPCMethodDescriptor( - name: "TunnelStop", - path: "/burrow.Tunnel/TunnelStop", - type: GRPCCallType.unary - ) - - public static let tunnelStatus = GRPCMethodDescriptor( - name: "TunnelStatus", - path: "/burrow.Tunnel/TunnelStatus", - type: GRPCCallType.serverStreaming - ) - } -} - -/// Usage: instantiate `Burrow_NetworksClient`, then call methods of this protocol to make API calls. -public protocol Burrow_NetworksClientProtocol: GRPCClient { - var serviceName: String { get } - var interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? { get } - - func networkAdd( - _ request: Burrow_Network, - callOptions: CallOptions? - ) -> UnaryCall - - func networkList( - _ request: Burrow_Empty, - callOptions: CallOptions?, - handler: @escaping (Burrow_NetworkListResponse) -> Void - ) -> ServerStreamingCall - - func networkReorder( - _ request: Burrow_NetworkReorderRequest, - callOptions: CallOptions? - ) -> UnaryCall - - func networkDelete( - _ request: Burrow_NetworkDeleteRequest, - callOptions: CallOptions? - ) -> UnaryCall -} - -extension Burrow_NetworksClientProtocol { - public var serviceName: String { - return "burrow.Networks" - } - - /// Unary call to NetworkAdd - /// - /// - Parameters: - /// - request: Request to send to NetworkAdd. - /// - callOptions: Call options. - /// - Returns: A `UnaryCall` with futures for the metadata, status and response. - public func networkAdd( - _ request: Burrow_Network, - callOptions: CallOptions? = nil - ) -> UnaryCall { - return self.makeUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkAdd.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkAddInterceptors() ?? [] - ) - } - - /// Server streaming call to NetworkList - /// - /// - Parameters: - /// - request: Request to send to NetworkList. - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. - public func networkList( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil, - handler: @escaping (Burrow_NetworkListResponse) -> Void - ) -> ServerStreamingCall { - return self.makeServerStreamingCall( - path: Burrow_NetworksClientMetadata.Methods.networkList.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkListInterceptors() ?? [], - handler: handler - ) - } - - /// Unary call to NetworkReorder - /// - /// - Parameters: - /// - request: Request to send to NetworkReorder. - /// - callOptions: Call options. - /// - Returns: A `UnaryCall` with futures for the metadata, status and response. - public func networkReorder( - _ request: Burrow_NetworkReorderRequest, - callOptions: CallOptions? = nil - ) -> UnaryCall { - return self.makeUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkReorder.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkReorderInterceptors() ?? [] - ) - } - - /// Unary call to NetworkDelete - /// - /// - Parameters: - /// - request: Request to send to NetworkDelete. - /// - callOptions: Call options. - /// - Returns: A `UnaryCall` with futures for the metadata, status and response. - public func networkDelete( - _ request: Burrow_NetworkDeleteRequest, - callOptions: CallOptions? = nil - ) -> UnaryCall { - return self.makeUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkDelete.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkDeleteInterceptors() ?? [] - ) - } -} - -@available(*, deprecated) -extension Burrow_NetworksClient: @unchecked Sendable {} - -@available(*, deprecated, renamed: "Burrow_NetworksNIOClient") -public final class Burrow_NetworksClient: Burrow_NetworksClientProtocol { - private let lock = Lock() - private var _defaultCallOptions: CallOptions - private var _interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? - public let channel: GRPCChannel - public var defaultCallOptions: CallOptions { - get { self.lock.withLock { return self._defaultCallOptions } } - set { self.lock.withLockVoid { self._defaultCallOptions = newValue } } - } - public var interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? { - get { self.lock.withLock { return self._interceptors } } - set { self.lock.withLockVoid { self._interceptors = newValue } } - } - - /// Creates a client for the burrow.Networks service. - /// - /// - Parameters: - /// - channel: `GRPCChannel` to the service host. - /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. - /// - interceptors: A factory providing interceptors for each RPC. - public init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self._defaultCallOptions = defaultCallOptions - self._interceptors = interceptors - } -} - -public struct Burrow_NetworksNIOClient: Burrow_NetworksClientProtocol { - public var channel: GRPCChannel - public var defaultCallOptions: CallOptions - public var interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? - - /// Creates a client for the burrow.Networks service. - /// - /// - Parameters: - /// - channel: `GRPCChannel` to the service host. - /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. - /// - interceptors: A factory providing interceptors for each RPC. - public init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self.defaultCallOptions = defaultCallOptions - self.interceptors = interceptors - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -public protocol Burrow_NetworksAsyncClientProtocol: GRPCClient { - static var serviceDescriptor: GRPCServiceDescriptor { get } - var interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? { get } - - func makeNetworkAddCall( - _ request: Burrow_Network, - callOptions: CallOptions? - ) -> GRPCAsyncUnaryCall - - func makeNetworkListCall( - _ request: Burrow_Empty, - callOptions: CallOptions? - ) -> GRPCAsyncServerStreamingCall - - func makeNetworkReorderCall( - _ request: Burrow_NetworkReorderRequest, - callOptions: CallOptions? - ) -> GRPCAsyncUnaryCall - - func makeNetworkDeleteCall( - _ request: Burrow_NetworkDeleteRequest, - callOptions: CallOptions? - ) -> GRPCAsyncUnaryCall -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -extension Burrow_NetworksAsyncClientProtocol { - public static var serviceDescriptor: GRPCServiceDescriptor { - return Burrow_NetworksClientMetadata.serviceDescriptor - } - - public var interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? { - return nil - } - - public func makeNetworkAddCall( - _ request: Burrow_Network, - callOptions: CallOptions? = nil - ) -> GRPCAsyncUnaryCall { - return self.makeAsyncUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkAdd.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkAddInterceptors() ?? [] - ) - } - - public func makeNetworkListCall( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncServerStreamingCall { - return self.makeAsyncServerStreamingCall( - path: Burrow_NetworksClientMetadata.Methods.networkList.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkListInterceptors() ?? [] - ) - } - - public func makeNetworkReorderCall( - _ request: Burrow_NetworkReorderRequest, - callOptions: CallOptions? = nil - ) -> GRPCAsyncUnaryCall { - return self.makeAsyncUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkReorder.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkReorderInterceptors() ?? [] - ) - } - - public func makeNetworkDeleteCall( - _ request: Burrow_NetworkDeleteRequest, - callOptions: CallOptions? = nil - ) -> GRPCAsyncUnaryCall { - return self.makeAsyncUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkDelete.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkDeleteInterceptors() ?? [] - ) - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -extension Burrow_NetworksAsyncClientProtocol { - public func networkAdd( - _ request: Burrow_Network, - callOptions: CallOptions? = nil - ) async throws -> Burrow_Empty { - return try await self.performAsyncUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkAdd.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkAddInterceptors() ?? [] - ) - } - - public func networkList( - _ request: Burrow_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream { - return self.performAsyncServerStreamingCall( - path: Burrow_NetworksClientMetadata.Methods.networkList.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkListInterceptors() ?? [] - ) - } - - public func networkReorder( - _ request: Burrow_NetworkReorderRequest, - callOptions: CallOptions? = nil - ) async throws -> Burrow_Empty { - return try await self.performAsyncUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkReorder.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkReorderInterceptors() ?? [] - ) - } - - public func networkDelete( - _ request: Burrow_NetworkDeleteRequest, - callOptions: CallOptions? = nil - ) async throws -> Burrow_Empty { - return try await self.performAsyncUnaryCall( - path: Burrow_NetworksClientMetadata.Methods.networkDelete.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeNetworkDeleteInterceptors() ?? [] - ) - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -public struct Burrow_NetworksAsyncClient: Burrow_NetworksAsyncClientProtocol { - public var channel: GRPCChannel - public var defaultCallOptions: CallOptions - public var interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? - - public init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Burrow_NetworksClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self.defaultCallOptions = defaultCallOptions - self.interceptors = interceptors - } -} - -public protocol Burrow_NetworksClientInterceptorFactoryProtocol: Sendable { - - /// - Returns: Interceptors to use when invoking 'networkAdd'. - func makeNetworkAddInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'networkList'. - func makeNetworkListInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'networkReorder'. - func makeNetworkReorderInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'networkDelete'. - func makeNetworkDeleteInterceptors() -> [ClientInterceptor] -} - -public enum Burrow_NetworksClientMetadata { - public static let serviceDescriptor = GRPCServiceDescriptor( - name: "Networks", - fullName: "burrow.Networks", - methods: [ - Burrow_NetworksClientMetadata.Methods.networkAdd, - Burrow_NetworksClientMetadata.Methods.networkList, - Burrow_NetworksClientMetadata.Methods.networkReorder, - Burrow_NetworksClientMetadata.Methods.networkDelete, - ] - ) - - public enum Methods { - public static let networkAdd = GRPCMethodDescriptor( - name: "NetworkAdd", - path: "/burrow.Networks/NetworkAdd", - type: GRPCCallType.unary - ) - - public static let networkList = GRPCMethodDescriptor( - name: "NetworkList", - path: "/burrow.Networks/NetworkList", - type: GRPCCallType.serverStreaming - ) - - public static let networkReorder = GRPCMethodDescriptor( - name: "NetworkReorder", - path: "/burrow.Networks/NetworkReorder", - type: GRPCCallType.unary - ) - - public static let networkDelete = GRPCMethodDescriptor( - name: "NetworkDelete", - path: "/burrow.Networks/NetworkDelete", - type: GRPCCallType.unary - ) - } -} - diff --git a/Apple/Core/Client/Generated/burrow.pb.swift b/Apple/Core/Client/Generated/burrow.pb.swift deleted file mode 100644 index fccd769..0000000 --- a/Apple/Core/Client/Generated/burrow.pb.swift +++ /dev/null @@ -1,598 +0,0 @@ -// DO NOT EDIT. -// swift-format-ignore-file -// swiftlint:disable all -// -// Generated by the Swift generator plugin for the protocol buffer compiler. -// Source: burrow.proto -// -// For information on using the generated types, please see the documentation: -// https://github.com/apple/swift-protobuf/ - -import Foundation -import SwiftProtobuf - -// If the compiler emits an error on this type, it is because this file -// was generated by a version of the `protoc` Swift plug-in that is -// incompatible with the version of SwiftProtobuf to which you are linking. -// Please ensure that you are building against the same version of the API -// that was used to generate this file. -fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { - struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} - typealias Version = _2 -} - -public enum Burrow_NetworkType: SwiftProtobuf.Enum, Swift.CaseIterable { - public typealias RawValue = Int - case wireGuard // = 0 - case tailnet // = 1 - case UNRECOGNIZED(Int) - - public init() { - self = .wireGuard - } - - public init?(rawValue: Int) { - switch rawValue { - case 0: self = .wireGuard - case 1: self = .tailnet - default: self = .UNRECOGNIZED(rawValue) - } - } - - public var rawValue: Int { - switch self { - case .wireGuard: return 0 - case .tailnet: return 1 - case .UNRECOGNIZED(let i): return i - } - } - - // The compiler won't synthesize support with the UNRECOGNIZED case. - public static let allCases: [Burrow_NetworkType] = [ - .wireGuard, - .tailnet, - ] - -} - -public enum Burrow_State: SwiftProtobuf.Enum, Swift.CaseIterable { - public typealias RawValue = Int - case stopped // = 0 - case running // = 1 - case UNRECOGNIZED(Int) - - public init() { - self = .stopped - } - - public init?(rawValue: Int) { - switch rawValue { - case 0: self = .stopped - case 1: self = .running - default: self = .UNRECOGNIZED(rawValue) - } - } - - public var rawValue: Int { - switch self { - case .stopped: return 0 - case .running: return 1 - case .UNRECOGNIZED(let i): return i - } - } - - // The compiler won't synthesize support with the UNRECOGNIZED case. - public static let allCases: [Burrow_State] = [ - .stopped, - .running, - ] - -} - -public struct Burrow_NetworkReorderRequest: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var id: Int32 = 0 - - public var index: Int32 = 0 - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_WireGuardPeer: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var endpoint: String = String() - - public var subnet: [String] = [] - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_WireGuardNetwork: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var address: String = String() - - public var dns: String = String() - - public var peer: [Burrow_WireGuardPeer] = [] - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_NetworkDeleteRequest: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var id: Int32 = 0 - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_Network: @unchecked Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var id: Int32 = 0 - - public var type: Burrow_NetworkType = .wireGuard - - public var payload: Data = Data() - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_NetworkListResponse: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var network: [Burrow_Network] = [] - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_Empty: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -public struct Burrow_TunnelStatusResponse: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var state: Burrow_State = .stopped - - public var start: SwiftProtobuf.Google_Protobuf_Timestamp { - get {return _start ?? SwiftProtobuf.Google_Protobuf_Timestamp()} - set {_start = newValue} - } - /// Returns true if `start` has been explicitly set. - public var hasStart: Bool {return self._start != nil} - /// Clears the value of `start`. Subsequent reads from it will return its default value. - public mutating func clearStart() {self._start = nil} - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} - - fileprivate var _start: SwiftProtobuf.Google_Protobuf_Timestamp? = nil -} - -public struct Burrow_TunnelConfigurationResponse: Sendable { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - public var addresses: [String] = [] - - public var mtu: Int32 = 0 - - public var routes: [String] = [] - - public var dnsServers: [String] = [] - - public var searchDomains: [String] = [] - - public var includeDefaultRoute: Bool = false - - public var unknownFields = SwiftProtobuf.UnknownStorage() - - public init() {} -} - -// MARK: - Code below here is support for the SwiftProtobuf runtime. - -fileprivate let _protobuf_package = "burrow" - -extension Burrow_NetworkType: SwiftProtobuf._ProtoNameProviding { - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "WireGuard"), - 1: .same(proto: "Tailnet"), - ] -} - -extension Burrow_State: SwiftProtobuf._ProtoNameProviding { - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "Stopped"), - 1: .same(proto: "Running"), - ] -} - -extension Burrow_NetworkReorderRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".NetworkReorderRequest" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "id"), - 2: .same(proto: "index"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularInt32Field(value: &self.id) }() - case 2: try { try decoder.decodeSingularInt32Field(value: &self.index) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if self.id != 0 { - try visitor.visitSingularInt32Field(value: self.id, fieldNumber: 1) - } - if self.index != 0 { - try visitor.visitSingularInt32Field(value: self.index, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_NetworkReorderRequest, rhs: Burrow_NetworkReorderRequest) -> Bool { - if lhs.id != rhs.id {return false} - if lhs.index != rhs.index {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_WireGuardPeer: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".WireGuardPeer" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "endpoint"), - 2: .same(proto: "subnet"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.endpoint) }() - case 2: try { try decoder.decodeRepeatedStringField(value: &self.subnet) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.endpoint.isEmpty { - try visitor.visitSingularStringField(value: self.endpoint, fieldNumber: 1) - } - if !self.subnet.isEmpty { - try visitor.visitRepeatedStringField(value: self.subnet, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_WireGuardPeer, rhs: Burrow_WireGuardPeer) -> Bool { - if lhs.endpoint != rhs.endpoint {return false} - if lhs.subnet != rhs.subnet {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_WireGuardNetwork: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".WireGuardNetwork" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "address"), - 2: .same(proto: "dns"), - 3: .same(proto: "peer"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.address) }() - case 2: try { try decoder.decodeSingularStringField(value: &self.dns) }() - case 3: try { try decoder.decodeRepeatedMessageField(value: &self.peer) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.address.isEmpty { - try visitor.visitSingularStringField(value: self.address, fieldNumber: 1) - } - if !self.dns.isEmpty { - try visitor.visitSingularStringField(value: self.dns, fieldNumber: 2) - } - if !self.peer.isEmpty { - try visitor.visitRepeatedMessageField(value: self.peer, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_WireGuardNetwork, rhs: Burrow_WireGuardNetwork) -> Bool { - if lhs.address != rhs.address {return false} - if lhs.dns != rhs.dns {return false} - if lhs.peer != rhs.peer {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_NetworkDeleteRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".NetworkDeleteRequest" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "id"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularInt32Field(value: &self.id) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if self.id != 0 { - try visitor.visitSingularInt32Field(value: self.id, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_NetworkDeleteRequest, rhs: Burrow_NetworkDeleteRequest) -> Bool { - if lhs.id != rhs.id {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_Network: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".Network" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "id"), - 2: .same(proto: "type"), - 3: .same(proto: "payload"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularInt32Field(value: &self.id) }() - case 2: try { try decoder.decodeSingularEnumField(value: &self.type) }() - case 3: try { try decoder.decodeSingularBytesField(value: &self.payload) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if self.id != 0 { - try visitor.visitSingularInt32Field(value: self.id, fieldNumber: 1) - } - if self.type != .wireGuard { - try visitor.visitSingularEnumField(value: self.type, fieldNumber: 2) - } - if !self.payload.isEmpty { - try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_Network, rhs: Burrow_Network) -> Bool { - if lhs.id != rhs.id {return false} - if lhs.type != rhs.type {return false} - if lhs.payload != rhs.payload {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_NetworkListResponse: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".NetworkListResponse" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "network"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeRepeatedMessageField(value: &self.network) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.network.isEmpty { - try visitor.visitRepeatedMessageField(value: self.network, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_NetworkListResponse, rhs: Burrow_NetworkListResponse) -> Bool { - if lhs.network != rhs.network {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_Empty: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".Empty" - public static let _protobuf_nameMap = SwiftProtobuf._NameMap() - - public mutating func decodeMessage(decoder: inout D) throws { - // Load everything into unknown fields - while try decoder.nextFieldNumber() != nil {} - } - - public func traverse(visitor: inout V) throws { - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_Empty, rhs: Burrow_Empty) -> Bool { - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_TunnelStatusResponse: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".TunnelStatusResponse" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "state"), - 2: .same(proto: "start"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularEnumField(value: &self.state) }() - case 2: try { try decoder.decodeSingularMessageField(value: &self._start) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - if self.state != .stopped { - try visitor.visitSingularEnumField(value: self.state, fieldNumber: 1) - } - try { if let v = self._start { - try visitor.visitSingularMessageField(value: v, fieldNumber: 2) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_TunnelStatusResponse, rhs: Burrow_TunnelStatusResponse) -> Bool { - if lhs.state != rhs.state {return false} - if lhs._start != rhs._start {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Burrow_TunnelConfigurationResponse: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - public static let protoMessageName: String = _protobuf_package + ".TunnelConfigurationResponse" - public static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "addresses"), - 2: .same(proto: "mtu"), - 3: .same(proto: "routes"), - 4: .standard(proto: "dns_servers"), - 5: .standard(proto: "search_domains"), - 6: .standard(proto: "include_default_route"), - ] - - public mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeRepeatedStringField(value: &self.addresses) }() - case 2: try { try decoder.decodeSingularInt32Field(value: &self.mtu) }() - case 3: try { try decoder.decodeRepeatedStringField(value: &self.routes) }() - case 4: try { try decoder.decodeRepeatedStringField(value: &self.dnsServers) }() - case 5: try { try decoder.decodeRepeatedStringField(value: &self.searchDomains) }() - case 6: try { try decoder.decodeSingularBoolField(value: &self.includeDefaultRoute) }() - default: break - } - } - } - - public func traverse(visitor: inout V) throws { - if !self.addresses.isEmpty { - try visitor.visitRepeatedStringField(value: self.addresses, fieldNumber: 1) - } - if self.mtu != 0 { - try visitor.visitSingularInt32Field(value: self.mtu, fieldNumber: 2) - } - if !self.routes.isEmpty { - try visitor.visitRepeatedStringField(value: self.routes, fieldNumber: 3) - } - if !self.dnsServers.isEmpty { - try visitor.visitRepeatedStringField(value: self.dnsServers, fieldNumber: 4) - } - if !self.searchDomains.isEmpty { - try visitor.visitRepeatedStringField(value: self.searchDomains, fieldNumber: 5) - } - if self.includeDefaultRoute { - try visitor.visitSingularBoolField(value: self.includeDefaultRoute, fieldNumber: 6) - } - try unknownFields.traverse(visitor: &visitor) - } - - public static func ==(lhs: Burrow_TunnelConfigurationResponse, rhs: Burrow_TunnelConfigurationResponse) -> Bool { - if lhs.addresses != rhs.addresses {return false} - if lhs.mtu != rhs.mtu {return false} - if lhs.routes != rhs.routes {return false} - if lhs.dnsServers != rhs.dnsServers {return false} - if lhs.searchDomains != rhs.searchDomains {return false} - if lhs.includeDefaultRoute != rhs.includeDefaultRoute {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} diff --git a/Apple/Core/Client/burrow.proto b/Apple/Core/Client/burrow.proto deleted file mode 120000 index 03e86a5..0000000 --- a/Apple/Core/Client/burrow.proto +++ /dev/null @@ -1 +0,0 @@ -../../../proto/burrow.proto \ No newline at end of file diff --git a/Apple/Core/Client/google/protobuf/timestamp.proto b/Apple/Core/Client/google/protobuf/timestamp.proto deleted file mode 100644 index 7db2f6a..0000000 --- a/Apple/Core/Client/google/protobuf/timestamp.proto +++ /dev/null @@ -1,64 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package google.protobuf; - -option cc_enable_arenas = true; -option go_package = "google.golang.org/protobuf/types/known/timestamppb"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "TimestampProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; - -// A Timestamp represents a point in time independent of any time zone or local -// calendar, encoded as a count of seconds and fractions of seconds at -// nanosecond resolution. The count is relative to an epoch at UTC midnight on -// January 1, 1970, in the proleptic Gregorian calendar which extends the -// Gregorian calendar backwards to year one. -// -// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -// second table is needed for interpretation, using a 24-hour linear smear. -// -// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -// restricting to that range, we ensure that we can convert to and from RFC -// 3339 date strings. -message Timestamp { - // Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. - // Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. - int64 seconds = 1; - - // Non-negative fractions of a second at nanosecond resolution. Negative - // second values with fractions must still have non-negative nanos values - // that count forward in time. Must be from 0 to 999,999,999 inclusive. - int32 nanos = 2; -} diff --git a/Apple/NetworkExtension/Client.swift b/Apple/NetworkExtension/Client.swift new file mode 100644 index 0000000..a924c29 --- /dev/null +++ b/Apple/NetworkExtension/Client.swift @@ -0,0 +1,60 @@ +import BurrowShared +import Foundation +import Network + +final class Client { + let connection: NWConnection + + private let logger: Logger = Logger.logger(for: Client.self) + private var generator = SystemRandomNumberGenerator() + + convenience init() throws { + self.init(url: try Constants.socketURL) + } + + init(url: URL) { + let endpoint: NWEndpoint + if url.isFileURL { + endpoint = .unix(path: url.path(percentEncoded: false)) + } else { + endpoint = .url(url) + } + + let parameters = NWParameters.tcp + parameters.defaultProtocolStack + .applicationProtocols + .insert(NWProtocolFramer.Options(definition: NewlineProtocolFramer.definition), at: 0) + connection = NWConnection(to: endpoint, using: parameters) + connection.start(queue: .global()) + } + + func request(_ request: any Request, type: U.Type = U.self) async throws -> U { + do { + var copy = request + copy.id = generator.next(upperBound: UInt.max) + let content = try JSONEncoder().encode(copy) + logger.debug("> \(String(decoding: content, as: UTF8.self))") + + try await self.connection.send(content: content) + let (response, _, _) = try await connection.receiveMessage() + + logger.debug("< \(String(decoding: response, as: UTF8.self))") + return try JSONDecoder().decode(U.self, from: response) + } catch { + logger.error("\(error, privacy: .public)") + throw error + } + } + + deinit { + connection.cancel() + } +} + +extension Constants { + static var socketURL: URL { + get throws { + try groupContainerURL.appending(component: "burrow.sock", directoryHint: .notDirectory) + } + } +} diff --git a/Apple/NetworkExtension/DataTypes.swift b/Apple/NetworkExtension/DataTypes.swift new file mode 100644 index 0000000..1409fde --- /dev/null +++ b/Apple/NetworkExtension/DataTypes.swift @@ -0,0 +1,61 @@ +import Foundation + +// swiftlint:disable identifier_name +enum BurrowError: Error { + case addrDoesntExist + case resultIsError + case cantParseResult + case resultIsNone +} + +protocol Request: Codable where Command: Codable { + associatedtype Command + + var id: UInt { get set } + var command: Command { get set } +} + +struct BurrowSingleCommand: Request { + var id: UInt + var command: String +} + +struct BurrowRequest: Request where T: Codable { + var id: UInt + var command: T +} + +struct BurrowStartRequest: Codable { + struct TunOptions: Codable { + let name: String? + let no_pi: Bool + let tun_excl: Bool + let tun_retrieve: Bool + let address: [String] + } + struct StartOptions: Codable { + let tun: TunOptions + } + let Start: StartOptions +} + +struct Response: Decodable where T: Decodable { + var id: UInt + var result: T +} + +struct BurrowResult: Codable where T: Codable { + var Ok: T? + var Err: String? +} + +struct ServerConfigData: Codable { + struct InternalConfig: Codable { + let address: [String] + let name: String? + let mtu: Int32? + } + let ServerConfig: InternalConfig +} + +// swiftlint:enable identifier_name diff --git a/Apple/NetworkExtension/NWConnection+Async.swift b/Apple/NetworkExtension/NWConnection+Async.swift new file mode 100644 index 0000000..c21fdc0 --- /dev/null +++ b/Apple/NetworkExtension/NWConnection+Async.swift @@ -0,0 +1,32 @@ +import Foundation +import Network + +extension NWConnection { + // swiftlint:disable:next large_tuple + func receiveMessage() async throws -> (Data, NWConnection.ContentContext?, Bool) { + try await withUnsafeThrowingContinuation { continuation in + receiveMessage { completeContent, contentContext, isComplete, error in + if let error { + continuation.resume(throwing: error) + } else { + guard let completeContent = completeContent else { + fatalError("Both error and completeContent were nil") + } + continuation.resume(returning: (completeContent, contentContext, isComplete)) + } + } + } + } + + func send(content: Data) async throws { + try await withCheckedThrowingContinuation { (continuation: CheckedContinuation) in + send(content: content, completion: .contentProcessed { error in + if let error { + continuation.resume(throwing: error) + } else { + continuation.resume(returning: ()) + } + }) + } + } +} diff --git a/Apple/NetworkExtension/NewlineProtocolFramer.swift b/Apple/NetworkExtension/NewlineProtocolFramer.swift new file mode 100644 index 0000000..d2f71e5 --- /dev/null +++ b/Apple/NetworkExtension/NewlineProtocolFramer.swift @@ -0,0 +1,54 @@ +import Foundation +import Network + +final class NewlineProtocolFramer: NWProtocolFramerImplementation { + private static let delimeter: UInt8 = 10 // `\n` + + static let definition = NWProtocolFramer.Definition(implementation: NewlineProtocolFramer.self) + static let label = "Lines" + + init(framer: NWProtocolFramer.Instance) { } + + func start(framer: NWProtocolFramer.Instance) -> NWProtocolFramer.StartResult { .ready } + func stop(framer: NWProtocolFramer.Instance) -> Bool { true } + + func wakeup(framer: NWProtocolFramer.Instance) { } + func cleanup(framer: NWProtocolFramer.Instance) { } + + func handleInput(framer: NWProtocolFramer.Instance) -> Int { + while true { + var result: [Data] = [] + let parsed = framer.parseInput(minimumIncompleteLength: 1, maximumLength: 16_000) { buffer, _ in + guard let buffer else { return 0 } + var lines = buffer + .split(separator: Self.delimeter, omittingEmptySubsequences: false) + .map { Data($0) } + guard lines.count > 1 else { return 0 } + _ = lines.popLast() + + result = lines + return lines.reduce(lines.count) { $0 + $1.count } + } + + guard parsed && !result.isEmpty else { break } + + for line in result { + framer.deliverInput(data: line, message: .init(instance: framer), isComplete: true) + } + } + return 0 + } + + func handleOutput( + framer: NWProtocolFramer.Instance, + message: NWProtocolFramer.Message, + messageLength: Int, + isComplete: Bool + ) { + do { + try framer.writeOutputNoCopy(length: messageLength) + framer.writeOutput(data: [Self.delimeter]) + } catch { + } + } +} diff --git a/Apple/NetworkExtension/PacketTunnelProvider.swift b/Apple/NetworkExtension/PacketTunnelProvider.swift index 3f3d8b4..bfdb34a 100644 --- a/Apple/NetworkExtension/PacketTunnelProvider.swift +++ b/Apple/NetworkExtension/PacketTunnelProvider.swift @@ -1,326 +1,67 @@ -import AsyncAlgorithms -import BurrowConfiguration -import BurrowCore -import GRPC +import BurrowShared import libburrow import NetworkExtension import os -private final class SendableCallbackBox: @unchecked Sendable { - let callback: Callback - - init(_ callback: Callback) { - self.callback = callback - } -} - -final class PacketTunnelProvider: NEPacketTunnelProvider, @unchecked Sendable { - enum Error: Swift.Error { - case missingTunnelConfiguration - } - +class PacketTunnelProvider: NEPacketTunnelProvider { private let logger = Logger.logger(for: PacketTunnelProvider.self) - private var packetCall: GRPCAsyncBidirectionalStreamingCall? - private var inboundPacketTask: Task? - private var outboundPacketTask: Task? - private var client: TunnelClient { - get throws { try _client.get() } - } - private let _client: Result = Result { - try TunnelClient.unix(socketURL: Constants.socketURL) - } - - override init() { + override func startTunnel(options: [String: NSObject]? = nil) async throws { do { - libburrow.spawnInProcess( - socketPath: try Constants.socketURL.path(percentEncoded: false), - databasePath: try Constants.databaseURL.path(percentEncoded: false) - ) - } catch { - logger.error("Failed to spawn networking thread: \(error)") - } - } + libburrow.spawnInProcess(socketPath: try Constants.socketURL.path) - override func startTunnel( - options: [String: NSObject]?, - completionHandler: @escaping (Swift.Error?) -> Void - ) { - let completion = SendableCallbackBox(completionHandler) - Task { - do { - _ = try await client.tunnelStart(.init()) - let configuration = try await Array(client.tunnelConfiguration(.init()).prefix(1)).first - guard let settings = configuration?.settings else { - throw Error.missingTunnelConfiguration - } - try await setTunnelNetworkSettings(settings) - try startPacketBridge() - logger.log("Started tunnel with network settings: \(settings)") - completion.callback(nil) - } catch { - logger.error("Failed to start tunnel: \(error)") - stopPacketBridge() - completion.callback(error) + let client = try Client() + + let command = BurrowRequest(id: 0, command: "ServerConfig") + let data = try await client.request(command, type: Response>.self) + + let encoded = try JSONEncoder().encode(data.result) + self.logger.log("Received final data: \(String(decoding: encoded, as: UTF8.self))") + guard let serverconfig = data.result.Ok else { + throw BurrowError.resultIsError } - } - } - - override func stopTunnel( - with reason: NEProviderStopReason, - completionHandler: @escaping () -> Void - ) { - let completion = SendableCallbackBox(completionHandler) - Task { - stopPacketBridge() - do { - _ = try await client.tunnelStop(.init()) - logger.log("Stopped client") - } catch { - logger.error("Failed to stop tunnel: \(error)") + guard let tunNs = generateTunSettings(from: serverconfig) else { + throw BurrowError.addrDoesntExist } - completion.callback() - } - } -} + try await self.setTunnelNetworkSettings(tunNs) + self.logger.info("Set remote tunnel address to \(tunNs.tunnelRemoteAddress)") -extension PacketTunnelProvider { - private func startPacketBridge() throws { - stopPacketBridge() - - let packetClient = TunnelPacketClient.unix(socketURL: try Constants.socketURL) - let call = packetClient.makeTunnelPacketsCall() - self.packetCall = call - - inboundPacketTask = Task { [weak self] in - guard let self else { return } - do { - for try await packet in call.responseStream { - let payload = packet.payload - self.packetFlow.writePackets( - [payload], - withProtocols: [Self.protocolNumber(for: payload)] + let startRequest = BurrowRequest( + id: .random(in: (.min)..<(.max)), + command: BurrowStartRequest( + Start: BurrowStartRequest.StartOptions( + tun: BurrowStartRequest.TunOptions( + name: nil, no_pi: false, tun_excl: false, tun_retrieve: true, address: [] + ) ) - } - } catch { - guard !Task.isCancelled else { return } - self.logger.error("Tunnel packet receive loop failed: \(error)") - } - } - - outboundPacketTask = Task { [weak self] in - guard let self else { return } - defer { call.requestStream.finish() } - do { - while !Task.isCancelled { - let packets = await self.readPacketsBatch() - for (payload, _) in packets { - var packet = Burrow_TunnelPacket() - packet.payload = payload - try await call.requestStream.send(packet) - } - } - } catch { - guard !Task.isCancelled else { return } - self.logger.error("Tunnel packet send loop failed: \(error)") - } + ) + ) + let response = try await client.request(startRequest, type: Response>.self) + self.logger.log("Received start server response: \(String(describing: response.result))") + } catch { + self.logger.error("Failed to start tunnel: \(error)") + throw error } } - private func stopPacketBridge() { - inboundPacketTask?.cancel() - inboundPacketTask = nil - outboundPacketTask?.cancel() - outboundPacketTask = nil - packetCall?.cancel() - packetCall = nil - } - - private func readPacketsBatch() async -> [(Data, NSNumber)] { - await withCheckedContinuation { continuation in - packetFlow.readPackets { packets, protocols in - continuation.resume(returning: Array(zip(packets, protocols))) + private func generateTunSettings(from: ServerConfigData) -> NETunnelNetworkSettings? { + let cfig = from.ServerConfig + let nst = NEPacketTunnelNetworkSettings(tunnelRemoteAddress: "1.1.1.1") + var v4Addresses = [String]() + var v6Addresses = [String]() + for addr in cfig.address { + if IPv4Address(addr) != nil { + v6Addresses.append(addr) + } + if IPv6Address(addr) != nil { + v4Addresses.append(addr) } } - } - - private static func protocolNumber(for payload: Data) -> NSNumber { - guard let version = payload.first.map({ $0 >> 4 }) else { - return NSNumber(value: AF_INET) - } - switch version { - case 6: - return NSNumber(value: AF_INET6) - default: - return NSNumber(value: AF_INET) - } - } -} - -extension Burrow_TunnelConfigurationResponse { - fileprivate var settings: NEPacketTunnelNetworkSettings { - let parsedAddresses = addresses.compactMap(ParsedTunnelAddress.init(rawValue:)) - let ipv4Addresses = parsedAddresses.compactMap(\.ipv4Address) - let ipv6Addresses = parsedAddresses.compactMap(\.ipv6Address) - let parsedRoutes = routes.compactMap(ParsedTunnelRoute.init(rawValue:)) - var ipv4Routes = parsedRoutes.compactMap(\.ipv4Route) - var ipv6Routes = parsedRoutes.compactMap(\.ipv6Route) - if includeDefaultRoute { - ipv4Routes.append(.default()) - ipv6Routes.append(.default()) - } - - let settings = NEPacketTunnelNetworkSettings(tunnelRemoteAddress: "1.1.1.1") - settings.mtu = NSNumber(value: mtu) - if !ipv4Addresses.isEmpty { - let ipv4Settings = NEIPv4Settings( - addresses: ipv4Addresses.map(\.address), - subnetMasks: ipv4Addresses.map(\.subnetMask) - ) - if !ipv4Routes.isEmpty { - ipv4Settings.includedRoutes = ipv4Routes - } - settings.ipv4Settings = ipv4Settings - } - if !ipv6Addresses.isEmpty { - let ipv6Settings = NEIPv6Settings( - addresses: ipv6Addresses.map(\.address), - networkPrefixLengths: ipv6Addresses.map(\.prefixLength) - ) - if !ipv6Routes.isEmpty { - ipv6Settings.includedRoutes = ipv6Routes - } - settings.ipv6Settings = ipv6Settings - } - if !dnsServers.isEmpty { - let dnsSettings = NEDNSSettings(servers: dnsServers) - if !searchDomains.isEmpty { - dnsSettings.matchDomains = searchDomains - } - settings.dnsSettings = dnsSettings - } - return settings - } -} - -private struct ParsedTunnelAddress { - struct IPv4AddressSetting { - let address: String - let subnetMask: String - } - - struct IPv6AddressSetting { - let address: String - let prefixLength: NSNumber - } - - let ipv4Address: IPv4AddressSetting? - let ipv6Address: IPv6AddressSetting? - - init?(rawValue: String) { - let components = rawValue.split(separator: "/", maxSplits: 1).map(String.init) - let address = components.first?.trimmingCharacters(in: .whitespacesAndNewlines) ?? "" - guard !address.isEmpty else { - return nil - } - - let prefix = components.count == 2 ? Int(components[1]) : nil - if IPv4Address(address) != nil { - let prefixLength = prefix ?? 32 - guard (0 ... 32).contains(prefixLength) else { - return nil - } - ipv4Address = IPv4AddressSetting( - address: address, - subnetMask: Self.ipv4SubnetMask(prefixLength: prefixLength) - ) - ipv6Address = nil - return - } - - if IPv6Address(address) != nil { - let prefixLength = prefix ?? 128 - guard (0 ... 128).contains(prefixLength) else { - return nil - } - ipv4Address = nil - ipv6Address = IPv6AddressSetting( - address: address, - prefixLength: NSNumber(value: prefixLength) - ) - return - } - - return nil - } - - private static func ipv4SubnetMask(prefixLength: Int) -> String { - guard prefixLength > 0 else { - return "0.0.0.0" - } - let mask = UInt32.max << (32 - prefixLength) - let octets = [ - (mask >> 24) & 0xff, - (mask >> 16) & 0xff, - (mask >> 8) & 0xff, - mask & 0xff, - ] - return octets.map(String.init).joined(separator: ".") - } -} - -private struct ParsedTunnelRoute { - let ipv4Route: NEIPv4Route? - let ipv6Route: NEIPv6Route? - - init?(rawValue: String) { - let components = rawValue.split(separator: "/", maxSplits: 1).map(String.init) - let address = components.first?.trimmingCharacters(in: .whitespacesAndNewlines) ?? "" - guard !address.isEmpty else { - return nil - } - - let prefix = components.count == 2 ? Int(components[1]) : nil - if IPv4Address(address) != nil { - let prefixLength = prefix ?? 32 - guard (0 ... 32).contains(prefixLength) else { - return nil - } - ipv4Route = NEIPv4Route( - destinationAddress: address, - subnetMask: Self.ipv4SubnetMask(prefixLength: prefixLength) - ) - ipv6Route = nil - return - } - - if IPv6Address(address) != nil { - let prefixLength = prefix ?? 128 - guard (0 ... 128).contains(prefixLength) else { - return nil - } - ipv4Route = nil - ipv6Route = NEIPv6Route( - destinationAddress: address, - networkPrefixLength: NSNumber(value: prefixLength) - ) - return - } - - return nil - } - - private static func ipv4SubnetMask(prefixLength: Int) -> String { - var mask = UInt32.max << (32 - prefixLength) - if prefixLength == 0 { - mask = 0 - } - let octets = [ - String((mask >> 24) & 0xff), - String((mask >> 16) & 0xff), - String((mask >> 8) & 0xff), - String(mask & 0xff), - ] - return octets.joined(separator: ".") + nst.ipv4Settings = NEIPv4Settings(addresses: v4Addresses, subnetMasks: v4Addresses.map { _ in + "255.255.255.0" + }) + nst.ipv6Settings = NEIPv6Settings(addresses: v6Addresses, networkPrefixLengths: v6Addresses.map { _ in 64 }) + logger.log("Initialized ipv4 settings: \(nst.ipv4Settings)") + return nst } } diff --git a/Apple/NetworkExtension/libburrow/build-rust.sh b/Apple/NetworkExtension/libburrow/build-rust.sh index 5db2a2b..1ac73fb 100755 --- a/Apple/NetworkExtension/libburrow/build-rust.sh +++ b/Apple/NetworkExtension/libburrow/build-rust.sh @@ -56,10 +56,10 @@ CARGO_ARGS+=("--lib") # Pass the configuration (Debug or Release) through to cargo if [[ $SWIFT_ACTIVE_COMPILATION_CONDITIONS == *DEBUG* ]]; then - CARGO_TARGET_SUBDIR="debug" + CARGO_DIR="debug" else CARGO_ARGS+=("--release") - CARGO_TARGET_SUBDIR="release" + CARGO_DIR="release" fi if [[ -x "$(command -v rustup)" ]]; then @@ -68,30 +68,13 @@ else CARGO_PATH="$(dirname $(readlink -f $(which cargo))):/usr/bin" fi -PROTOC=$(readlink -f $(which protoc)) -CARGO_PATH="$(dirname $PROTOC):$CARGO_PATH" - # Run cargo without the various environment variables set by Xcode. # Those variables can confuse cargo and the build scripts it runs. -CARGO_ENV=( - "PATH=$CARGO_PATH" - "PROTOC=$PROTOC" - "CARGO_TARGET_DIR=${CONFIGURATION_TEMP_DIR}/target" -) - -if [[ -n "$IPHONEOS_DEPLOYMENT_TARGET" ]]; then - CARGO_ENV+=("IPHONEOS_DEPLOYMENT_TARGET=$IPHONEOS_DEPLOYMENT_TARGET") -fi - -if [[ -n "$MACOSX_DEPLOYMENT_TARGET" ]]; then - CARGO_ENV+=("MACOSX_DEPLOYMENT_TARGET=$MACOSX_DEPLOYMENT_TARGET") -fi - -env -i "${CARGO_ENV[@]}" cargo build "${CARGO_ARGS[@]}" +env -i PATH="$CARGO_PATH" cargo build "${CARGO_ARGS[@]}" mkdir -p "${BUILT_PRODUCTS_DIR}" # Use `lipo` to merge the architectures together into BUILT_PRODUCTS_DIR /usr/bin/xcrun --sdk $PLATFORM_NAME lipo \ - -create $(printf "${CONFIGURATION_TEMP_DIR}/target/%q/${CARGO_TARGET_SUBDIR}/libburrow.a " "${RUST_TARGETS[@]}") \ + -create $(printf "${PROJECT_DIR}/../target/%q/${CARGO_DIR}/libburrow.a " "${RUST_TARGETS[@]}") \ -output "${BUILT_PRODUCTS_DIR}/libburrow.a" diff --git a/Apple/NetworkExtension/libburrow/libburrow.h b/Apple/NetworkExtension/libburrow/libburrow.h index 59b4734..e500de4 100644 --- a/Apple/NetworkExtension/libburrow/libburrow.h +++ b/Apple/NetworkExtension/libburrow/libburrow.h @@ -1,2 +1,2 @@ -__attribute__((__swift_name__("spawnInProcess(socketPath:databasePath:)"))) -extern void spawn_in_process(const char * __nullable socket_path, const char * __nullable db_path); +__attribute__((__swift_name__("spawnInProcess(socketPath:)"))) +extern void spawn_in_process(const char * __nullable path); diff --git a/Apple/Profiles/Burrow_Developer_ID.provisionprofile b/Apple/Profiles/Burrow_Developer_ID.provisionprofile deleted file mode 100644 index 3ecd831..0000000 Binary files a/Apple/Profiles/Burrow_Developer_ID.provisionprofile and /dev/null differ diff --git a/Apple/Profiles/Burrow_Network_Developer_ID.provisionprofile b/Apple/Profiles/Burrow_Network_Developer_ID.provisionprofile deleted file mode 100644 index 3ce7e37..0000000 Binary files a/Apple/Profiles/Burrow_Network_Developer_ID.provisionprofile and /dev/null differ diff --git a/Apple/Shared/Constants.swift b/Apple/Shared/Constants.swift new file mode 100644 index 0000000..cb56cb3 --- /dev/null +++ b/Apple/Shared/Constants.swift @@ -0,0 +1,22 @@ +@_implementationOnly import Constants + +public enum Constants { + enum Error: Swift.Error { + case invalidAppGroupIdentifier + } + + public static let bundleIdentifier = AppBundleIdentifier + public static let appGroupIdentifier = AppGroupIdentifier + + public static var groupContainerURL: URL { + get throws { try _groupContainerURL.get() } + } + + private static let _groupContainerURL: Result = { + guard let groupContainerURL = FileManager.default + .containerURL(forSecurityApplicationGroupIdentifier: appGroupIdentifier) else { + return .failure(.invalidAppGroupIdentifier) + } + return .success(groupContainerURL) + }() +} diff --git a/Apple/Configuration/Constants/Constants.h b/Apple/Shared/Constants/Constants.h similarity index 74% rename from Apple/Configuration/Constants/Constants.h rename to Apple/Shared/Constants/Constants.h index 5278b61..09806c5 100644 --- a/Apple/Configuration/Constants/Constants.h +++ b/Apple/Shared/Constants/Constants.h @@ -7,6 +7,5 @@ NS_ASSUME_NONNULL_BEGIN static NSString * const AppBundleIdentifier = MACRO_STRING(APP_BUNDLE_IDENTIFIER); static NSString * const AppGroupIdentifier = MACRO_STRING(APP_GROUP_IDENTIFIER); -static NSString * const NetworkExtensionBundleIdentifier = MACRO_STRING(NETWORK_EXTENSION_BUNDLE_IDENTIFIER); NS_ASSUME_NONNULL_END diff --git a/Apple/Configuration/Constants/module.modulemap b/Apple/Shared/Constants/module.modulemap similarity index 66% rename from Apple/Configuration/Constants/module.modulemap rename to Apple/Shared/Constants/module.modulemap index 0e60f32..7ee21fc 100644 --- a/Apple/Configuration/Constants/module.modulemap +++ b/Apple/Shared/Constants/module.modulemap @@ -1,4 +1,4 @@ -module CConstants { +module Constants { header "Constants.h" export * } diff --git a/Apple/Core/Logging.swift b/Apple/Shared/Logging.swift similarity index 88% rename from Apple/Core/Logging.swift rename to Apple/Shared/Logging.swift index ba40888..36f024c 100644 --- a/Apple/Core/Logging.swift +++ b/Apple/Shared/Logging.swift @@ -4,7 +4,7 @@ import os extension Logger { private static let loggers: OSAllocatedUnfairLock<[String: Logger]> = OSAllocatedUnfairLock(initialState: [:]) - public dynamic static var subsystem: String { "com.hackclub.burrow" } + public static let subsystem = Constants.bundleIdentifier public static func logger(for type: Any.Type) -> Logger { let category = String(describing: type) diff --git a/Apple/Shared/Shared.xcconfig b/Apple/Shared/Shared.xcconfig new file mode 100644 index 0000000..50718bd --- /dev/null +++ b/Apple/Shared/Shared.xcconfig @@ -0,0 +1,5 @@ +PRODUCT_NAME = BurrowShared +MERGEABLE_LIBRARY = YES + +SWIFT_INCLUDE_PATHS = $(PROJECT_DIR)/Shared/Constants +GCC_PREPROCESSOR_DEFINITIONS = APP_BUNDLE_IDENTIFIER=$(APP_BUNDLE_IDENTIFIER) APP_GROUP_IDENTIFIER=$(APP_GROUP_IDENTIFIER) diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/100.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/100.png deleted file mode 100644 index f86c139..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/100.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/1024.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/1024.png deleted file mode 100644 index 872c9ce..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/1024.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/114.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/114.png deleted file mode 100644 index 3bb278d..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/114.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/120.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/120.png deleted file mode 100644 index 185615e..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/120.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/128.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/128.png deleted file mode 100644 index 51bd97c..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/128.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/144.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/144.png deleted file mode 100644 index b05e371..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/144.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/152.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/152.png deleted file mode 100644 index c95ea8a..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/152.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/16.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/16.png deleted file mode 100644 index 3cb15a5..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/16.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/167.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/167.png deleted file mode 100644 index a3ad6a2..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/167.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/172.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/172.png deleted file mode 100644 index 9f3bdb4..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/172.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/180.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/180.png deleted file mode 100644 index 53c1237..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/180.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/196.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/196.png deleted file mode 100644 index ea95961..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/196.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/20.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/20.png deleted file mode 100644 index aec8236..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/20.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/216.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/216.png deleted file mode 100644 index 9f0e3ce..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/216.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/256.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/256.png deleted file mode 100644 index a82ce93..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/256.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/29.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/29.png deleted file mode 100644 index 8dc25c1..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/29.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/32.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/32.png deleted file mode 100644 index 655a424..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/32.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/40.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/40.png deleted file mode 100644 index 1f7f5e9..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/40.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/48.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/48.png deleted file mode 100644 index 4a67ebf..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/48.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/50.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/50.png deleted file mode 100644 index 88985d8..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/50.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/512.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/512.png deleted file mode 100644 index e5cbf6a..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/512.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/55.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/55.png deleted file mode 100644 index dc079ea..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/55.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/57.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/57.png deleted file mode 100644 index de4fddc..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/57.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/58.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/58.png deleted file mode 100644 index 961adad..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/58.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/60.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/60.png deleted file mode 100644 index 2a9e939..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/60.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/64.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/64.png deleted file mode 100644 index c67e407..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/64.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/72.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/72.png deleted file mode 100644 index d09aebe..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/72.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/76.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/76.png deleted file mode 100644 index 3e649b6..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/76.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/80.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/80.png deleted file mode 100644 index 6dad29f..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/80.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/87.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/87.png deleted file mode 100644 index a8ccb38..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/87.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/88.png b/Apple/UI/Assets.xcassets/AppIcon.appiconset/88.png deleted file mode 100644 index b1a478a..0000000 Binary files a/Apple/UI/Assets.xcassets/AppIcon.appiconset/88.png and /dev/null differ diff --git a/Apple/UI/Assets.xcassets/AppIcon.appiconset/Contents.json b/Apple/UI/Assets.xcassets/AppIcon.appiconset/Contents.json deleted file mode 100644 index f78687a..0000000 --- a/Apple/UI/Assets.xcassets/AppIcon.appiconset/Contents.json +++ /dev/null @@ -1,344 +0,0 @@ -{ - "images" : [ - { - "filename" : "40.png", - "idiom" : "iphone", - "scale" : "2x", - "size" : "20x20" - }, - { - "filename" : "60.png", - "idiom" : "iphone", - "scale" : "3x", - "size" : "20x20" - }, - { - "filename" : "29.png", - "idiom" : "iphone", - "scale" : "1x", - "size" : "29x29" - }, - { - "filename" : "58.png", - "idiom" : "iphone", - "scale" : "2x", - "size" : "29x29" - }, - { - "filename" : "87.png", - "idiom" : "iphone", - "scale" : "3x", - "size" : "29x29" - }, - { - "filename" : "80.png", - "idiom" : "iphone", - "scale" : "2x", - "size" : "40x40" - }, - { - "filename" : "120.png", - "idiom" : "iphone", - "scale" : "3x", - "size" : "40x40" - }, - { - "filename" : "57.png", - "idiom" : "iphone", - "scale" : "1x", - "size" : "57x57" - }, - { - "filename" : "114.png", - "idiom" : "iphone", - "scale" : "2x", - "size" : "57x57" - }, - { - "filename" : "120.png", - "idiom" : "iphone", - "scale" : "2x", - "size" : "60x60" - }, - { - "filename" : "180.png", - "idiom" : "iphone", - "scale" : "3x", - "size" : "60x60" - }, - { - "filename" : "20.png", - "idiom" : "ipad", - "scale" : "1x", - "size" : "20x20" - }, - { - "filename" : "40.png", - "idiom" : "ipad", - "scale" : "2x", - "size" : "20x20" - }, - { - "filename" : "29.png", - "idiom" : "ipad", - "scale" : "1x", - "size" : "29x29" - }, - { - "filename" : "58.png", - "idiom" : "ipad", - "scale" : "2x", - "size" : "29x29" - }, - { - "filename" : "40.png", - "idiom" : "ipad", - "scale" : "1x", - "size" : "40x40" - }, - { - "filename" : "80.png", - "idiom" : "ipad", - "scale" : "2x", - "size" : "40x40" - }, - { - "filename" : "50.png", - "idiom" : "ipad", - "scale" : "1x", - "size" : "50x50" - }, - { - "filename" : "100.png", - "idiom" : "ipad", - "scale" : "2x", - "size" : "50x50" - }, - { - "filename" : "72.png", - "idiom" : "ipad", - "scale" : "1x", - "size" : "72x72" - }, - { - "filename" : "144.png", - "idiom" : "ipad", - "scale" : "2x", - "size" : "72x72" - }, - { - "filename" : "76.png", - "idiom" : "ipad", - "scale" : "1x", - "size" : "76x76" - }, - { - "filename" : "152.png", - "idiom" : "ipad", - "scale" : "2x", - "size" : "76x76" - }, - { - "filename" : "167.png", - "idiom" : "ipad", - "scale" : "2x", - "size" : "83.5x83.5" - }, - { - "filename" : "1024.png", - "idiom" : "ios-marketing", - "scale" : "1x", - "size" : "1024x1024" - }, - { - "filename" : "16.png", - "idiom" : "mac", - "scale" : "1x", - "size" : "16x16" - }, - { - "filename" : "32.png", - "idiom" : "mac", - "scale" : "2x", - "size" : "16x16" - }, - { - "filename" : "32.png", - "idiom" : "mac", - "scale" : "1x", - "size" : "32x32" - }, - { - "filename" : "64.png", - "idiom" : "mac", - "scale" : "2x", - "size" : "32x32" - }, - { - "filename" : "128.png", - "idiom" : "mac", - "scale" : "1x", - "size" : "128x128" - }, - { - "filename" : "256.png", - "idiom" : "mac", - "scale" : "2x", - "size" : "128x128" - }, - { - "filename" : "256.png", - "idiom" : "mac", - "scale" : "1x", - "size" : "256x256" - }, - { - "filename" : "512.png", - "idiom" : "mac", - "scale" : "2x", - "size" : "256x256" - }, - { - "filename" : "512.png", - "idiom" : "mac", - "scale" : "1x", - "size" : "512x512" - }, - { - "filename" : "1024.png", - "idiom" : "mac", - "scale" : "2x", - "size" : "512x512" - }, - { - "filename" : "48.png", - "idiom" : "watch", - "role" : "notificationCenter", - "scale" : "2x", - "size" : "24x24", - "subtype" : "38mm" - }, - { - "filename" : "55.png", - "idiom" : "watch", - "role" : "notificationCenter", - "scale" : "2x", - "size" : "27.5x27.5", - "subtype" : "42mm" - }, - { - "filename" : "58.png", - "idiom" : "watch", - "role" : "companionSettings", - "scale" : "2x", - "size" : "29x29" - }, - { - "filename" : "87.png", - "idiom" : "watch", - "role" : "companionSettings", - "scale" : "3x", - "size" : "29x29" - }, - { - "idiom" : "watch", - "role" : "notificationCenter", - "scale" : "2x", - "size" : "33x33", - "subtype" : "45mm" - }, - { - "filename" : "80.png", - "idiom" : "watch", - "role" : "appLauncher", - "scale" : "2x", - "size" : "40x40", - "subtype" : "38mm" - }, - { - "filename" : "88.png", - "idiom" : "watch", - "role" : "appLauncher", - "scale" : "2x", - "size" : "44x44", - "subtype" : "40mm" - }, - { - "idiom" : "watch", - "role" : "appLauncher", - "scale" : "2x", - "size" : "46x46", - "subtype" : "41mm" - }, - { - "filename" : "100.png", - "idiom" : "watch", - "role" : "appLauncher", - "scale" : "2x", - "size" : "50x50", - "subtype" : "44mm" - }, - { - "idiom" : "watch", - "role" : "appLauncher", - "scale" : "2x", - "size" : "51x51", - "subtype" : "45mm" - }, - { - "idiom" : "watch", - "role" : "appLauncher", - "scale" : "2x", - "size" : "54x54", - "subtype" : "49mm" - }, - { - "filename" : "172.png", - "idiom" : "watch", - "role" : "quickLook", - "scale" : "2x", - "size" : "86x86", - "subtype" : "38mm" - }, - { - "filename" : "196.png", - "idiom" : "watch", - "role" : "quickLook", - "scale" : "2x", - "size" : "98x98", - "subtype" : "42mm" - }, - { - "filename" : "216.png", - "idiom" : "watch", - "role" : "quickLook", - "scale" : "2x", - "size" : "108x108", - "subtype" : "44mm" - }, - { - "idiom" : "watch", - "role" : "quickLook", - "scale" : "2x", - "size" : "117x117", - "subtype" : "45mm" - }, - { - "idiom" : "watch", - "role" : "quickLook", - "scale" : "2x", - "size" : "129x129", - "subtype" : "49mm" - }, - { - "filename" : "1024.png", - "idiom" : "watch-marketing", - "scale" : "1x", - "size" : "1024x1024" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/Apple/UI/Assets.xcassets/WireGuard.colorset/Contents.json b/Apple/UI/Assets.xcassets/WireGuard.colorset/Contents.json deleted file mode 100644 index 092ec69..0000000 --- a/Apple/UI/Assets.xcassets/WireGuard.colorset/Contents.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "colors" : [ - { - "color" : { - "color-space" : "srgb", - "components" : { - "alpha" : "1.000", - "blue" : "0x1A", - "green" : "0x17", - "red" : "0x88" - } - }, - "idiom" : "universal" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/Apple/UI/Assets.xcassets/WireGuard.imageset/Contents.json b/Apple/UI/Assets.xcassets/WireGuard.imageset/Contents.json deleted file mode 100644 index e7fe15a..0000000 --- a/Apple/UI/Assets.xcassets/WireGuard.imageset/Contents.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "images" : [ - { - "filename" : "WireGuard.svg", - "idiom" : "universal" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - }, - "properties" : { - "preserves-vector-representation" : true - } -} diff --git a/Apple/UI/Assets.xcassets/WireGuard.imageset/WireGuard.svg b/Apple/UI/Assets.xcassets/WireGuard.imageset/WireGuard.svg deleted file mode 100644 index 9520f89..0000000 --- a/Apple/UI/Assets.xcassets/WireGuard.imageset/WireGuard.svg +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/Apple/UI/Assets.xcassets/WireGuardTitle.imageset/Contents.json b/Apple/UI/Assets.xcassets/WireGuardTitle.imageset/Contents.json deleted file mode 100644 index 782dd12..0000000 --- a/Apple/UI/Assets.xcassets/WireGuardTitle.imageset/Contents.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "images" : [ - { - "filename" : "WireGuardTitle.svg", - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "scale" : "2x" - }, - { - "idiom" : "universal", - "scale" : "3x" - } - ], - "info" : { - "author" : "xcode", - "version" : 1 - } -} diff --git a/Apple/UI/Assets.xcassets/WireGuardTitle.imageset/WireGuardTitle.svg b/Apple/UI/Assets.xcassets/WireGuardTitle.imageset/WireGuardTitle.svg deleted file mode 100644 index 64946da..0000000 --- a/Apple/UI/Assets.xcassets/WireGuardTitle.imageset/WireGuardTitle.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/Apple/UI/BurrowView.swift b/Apple/UI/BurrowView.swift deleted file mode 100644 index e15d3f7..0000000 --- a/Apple/UI/BurrowView.swift +++ /dev/null @@ -1,1716 +0,0 @@ -import BurrowConfiguration -import Foundation -import SwiftUI -#if canImport(AuthenticationServices) -import AuthenticationServices -#endif -#if canImport(UIKit) -import UIKit -#elseif canImport(AppKit) -import AppKit -#endif - -public struct BurrowView: View { - @State private var networkViewModel: NetworkViewModel - @State private var accountStore = NetworkAccountStore() - @State private var activeSheet: ConfigurationSheet? - @State private var didRunAutomation = false - - public var body: some View { - NavigationStack { - ScrollView { - VStack(alignment: .leading, spacing: 24) { - HStack(alignment: .top) { - VStack(alignment: .leading, spacing: 6) { - Text("Burrow") - .font(.largeTitle) - .fontWeight(.bold) - if showsHeaderSubtitle { - Text("Networks and accounts") - .font(.headline) - .foregroundStyle(.secondary) - } - } - if showsToolbarAddMenu { - Spacer() - Menu { - Button("Add WireGuard Network") { - activeSheet = .wireGuard - } - Button("Save Tor Account") { - activeSheet = .tor - } - Button("Add Tailnet Account") { - activeSheet = .tailnet - } - } label: { - Image(systemName: "plus.circle.fill") - .font(.title) - .accessibilityLabel("Add") - } - } - } - .padding(.top) - - if showsInlineQuickActions { - quickAddSection - } - - VStack(alignment: .leading, spacing: 12) { - sectionHeader( - title: "Networks", - detail: showsInlineQuickActions - ? nil - : "Stored daemon networks and their active account selectors" - ) - if let connectionError = networkViewModel.connectionError { - Text(connectionError) - .font(.footnote) - .foregroundStyle(.secondary) - } - NetworkCarouselView(networks: networkViewModel.cards) - } - - if showsAccountsSection { - VStack(alignment: .leading, spacing: 12) { - sectionHeader( - title: "Accounts", - detail: showsInlineQuickActions - ? nil - : "Per-network identities and sign-in state" - ) - if accountStore.accounts.isEmpty { - ContentUnavailableView( - "No Accounts Yet", - systemImage: "person.crop.circle.badge.plus", - description: Text("Save a Tor account or sign in to Tailnet to keep network identities ready on this device.") - ) - .frame(maxWidth: .infinity, minHeight: 180) - } else { - LazyVStack(spacing: 12) { - ForEach(accountStore.accounts) { account in - AccountRowView( - account: account, - hasSecret: accountStore.hasStoredSecret(for: account) - ) - } - } - } - } - } - - VStack(alignment: .leading, spacing: 8) { - sectionHeader( - title: "Tunnel", - detail: showsInlineQuickActions ? nil : "Current system extension state" - ) - TunnelStatusView() - TunnelButton() - .padding(.bottom) - } - } - .padding() - } - } - .sheet(item: $activeSheet) { sheet in - ConfigurationSheetView( - sheet: sheet, - networkViewModel: networkViewModel, - accountStore: accountStore - ) - } - .onAppear { - runAutomationIfNeeded() - } - } - - public init() { - _networkViewModel = State( - initialValue: NetworkViewModel( - socketURLResult: Result { try Constants.socketURL } - ) - ) - } - - private func runAutomationIfNeeded() { - guard !didRunAutomation, - let automation = BurrowAutomationConfig.current, - automation.action == .tailnetLogin || automation.action == .tailnetProbe - else { - return - } - didRunAutomation = true - activeSheet = .tailnet - } - - @ViewBuilder - private var quickAddSection: some View { - VStack(alignment: .leading, spacing: 12) { - sectionHeader(title: "Add", detail: nil) - VStack(spacing: 12) { - ForEach(ConfigurationSheet.allCases) { sheet in - QuickAddButton(sheet: sheet) { - activeSheet = sheet - } - } - } - } - } - - @ViewBuilder - private func sectionHeader(title: String, detail: String?) -> some View { - VStack(alignment: .leading, spacing: 4) { - Text(title) - .font(.title2.weight(.semibold)) - if let detail, !detail.isEmpty { - Text(detail) - .font(.subheadline) - .foregroundStyle(.secondary) - } - } - } - - private var showsInlineQuickActions: Bool { - #if os(iOS) - true - #else - false - #endif - } - - private var showsToolbarAddMenu: Bool { - !showsInlineQuickActions - } - - private var showsHeaderSubtitle: Bool { - !showsInlineQuickActions - } - - private var showsAccountsSection: Bool { - #if os(iOS) - !accountStore.accounts.isEmpty - #else - true - #endif - } -} - -private enum ConfigurationSheet: String, CaseIterable, Identifiable { - case wireGuard - case tor - case tailnet - - var id: String { rawValue } - - var kind: AccountNetworkKind { - switch self { - case .wireGuard: .wireGuard - case .tor: .tor - case .tailnet: .tailnet - } - } - - var iconName: String { - switch self { - case .wireGuard: - "wave.3.right" - case .tor: - "shield.lefthalf.filled.badge.checkmark" - case .tailnet: - "network.badge.shield.half.filled" - } - } - - var quickActionTitle: String { - switch self { - case .wireGuard: - "WireGuard" - case .tor: - "Tor" - case .tailnet: - "Tailnet" - } - } - - var quickActionSubtitle: String { - switch self { - case .wireGuard: - "Import a tunnel" - case .tor: - "Save an Arti profile" - case .tailnet: - "Sign in or save a control plane" - } - } - - var quickActionColor: Color { - switch self { - case .wireGuard: - .blue - case .tor, .tailnet: - kind.accentColor - } - } -} - -private struct QuickAddButton: View { - let sheet: ConfigurationSheet - let action: () -> Void - - var body: some View { - Button(action: action) { - HStack(spacing: 14) { - Image(systemName: sheet.iconName) - .font(.title3.weight(.semibold)) - .frame(width: 24) - - VStack(alignment: .leading, spacing: 4) { - Text(sheet.quickActionTitle) - .font(.headline) - Text(sheet.quickActionSubtitle) - .font(.caption) - .opacity(0.88) - } - - Spacer() - } - .frame(maxWidth: .infinity, minHeight: 64, alignment: .leading) - } - .accessibilityIdentifier("quick-add-\(sheet.rawValue)") - .buttonStyle(.floating(color: sheet.quickActionColor, cornerRadius: 18)) - } -} - -private struct AccountDraft { - var title = "" - var accountName = "" - var identityName = "" - var wireGuardConfig = "" - - var discoveryEmail = "" - var authority = "" - var tailnet = "" - var hostname = ProcessInfo.processInfo.hostName - var username = "" - var secret = "" - var authMode: AccountAuthMode = .none - - var torAddresses = "100.64.0.2/32" - var torDNS = "1.1.1.1, 1.0.0.1" - var torMTU = "1400" - var torListen = "127.0.0.1:9040" - - init(sheet: ConfigurationSheet) { - switch sheet { - case .wireGuard: - break - case .tor: - title = "Default Tor" - accountName = "default" - identityName = "apple" - case .tailnet: - title = "Tailnet" - accountName = "default" - identityName = "apple" - authority = TailnetProvider.tailscale.defaultAuthority ?? "" - authMode = .web - } - } -} - -private struct ConfigurationSheetView: View { - @Environment(\.dismiss) private var dismiss - - let sheet: ConfigurationSheet - let networkViewModel: NetworkViewModel - let accountStore: NetworkAccountStore - - @State private var draft: AccountDraft - @State private var isSubmitting = false - @State private var errorMessage: String? - @State private var discoveryStatus: TailnetDiscoveryResponse? - @State private var discoveryError: String? - @State private var isDiscoveringTailnet = false - @State private var authorityProbeStatus: TailnetAuthorityProbeStatus? - @State private var authorityProbeError: String? - @State private var isProbingAuthority = false - @State private var tailnetLoginStatus: TailnetLoginStatus? - @State private var tailnetLoginError: String? - @State private var tailnetLoginSessionID: String? - @State private var isStartingTailnetLogin = false - @State private var tailnetPresentedAuthURL: URL? - @State private var preserveTailnetLoginSession = false - @State private var usesCustomTailnetAuthority = false - @State private var showsAdvancedTailnetSettings = false - @State private var browserAuthenticator = TailnetBrowserAuthenticator() - @State private var tailnetLoginPollTask: Task? - @State private var tailnetDiscoveryTask: Task? - @State private var tailnetProbeTask: Task? - @State private var didRunAutomation = false - - init( - sheet: ConfigurationSheet, - networkViewModel: NetworkViewModel, - accountStore: NetworkAccountStore - ) { - self.sheet = sheet - self.networkViewModel = networkViewModel - self.accountStore = accountStore - _draft = State(initialValue: AccountDraft(sheet: sheet)) - } - - var body: some View { - NavigationStack { - Form { - Section { - sheetSummaryCard - } - .listRowInsets(.init(top: 4, leading: 0, bottom: 4, trailing: 0)) - .listRowBackground(Color.clear) - - if showsIdentitySection { - Section("Identity") { - identityFields - } - } - - switch sheet { - case .wireGuard: - Section("WireGuard Configuration") { - TextEditor(text: $draft.wireGuardConfig) - .font(.body.monospaced()) - .frame(minHeight: wireGuardEditorHeight) - .contextMenu { - wireGuardContextActions - } - } - case .tor: - Section("Tor Preferences") { - TextField("Virtual Addresses", text: $draft.torAddresses) - TextField("DNS Resolvers", text: $draft.torDNS) - TextField("MTU", text: $draft.torMTU) - TextField("Transparent Listener", text: $draft.torListen) - } - case .tailnet: - tailnetSections - } - - if let errorMessage { - Section { - Text(errorMessage) - .foregroundStyle(.red) - } - } - } - .navigationTitle(sheet.kind.title) - #if os(iOS) - .navigationBarTitleDisplayMode(.inline) - #endif - .toolbar { - ToolbarItem(placement: .cancellationAction) { - Button("Cancel") { - Task { @MainActor in - await cancelTailnetLoginIfNeeded() - dismiss() - } - } - } - #if os(iOS) - ToolbarItem(placement: .topBarTrailing) { - Menu { - sheetMenuActions - } label: { - Image(systemName: "ellipsis.circle") - } - .accessibilityLabel("More") - } - #else - ToolbarItem(placement: .primaryAction) { - Menu { - sheetMenuActions - } label: { - Image(systemName: "ellipsis.circle") - } - .accessibilityLabel("More") - } - #endif - if !showsBottomActionButton { - ToolbarItem(placement: .confirmationAction) { - Button(confirmationTitle) { - submit() - } - .disabled(isSubmitting || submissionDisabled) - } - } - } - } - #if os(macOS) - .frame(minWidth: 520, minHeight: 620) - #endif - .safeAreaInset(edge: .bottom) { - if showsBottomActionButton { - bottomActionBar - } - } - .onAppear { - runAutomationIfNeeded() - } - .onChange(of: draft.authority) { _, _ in - resetAuthorityProbe() - if sheet == .tailnet, usesCustomTailnetAuthority { - scheduleTailnetAuthorityProbe() - } - } - .onChange(of: draft.discoveryEmail) { _, _ in - resetTailnetDiscoveryFeedback() - if sheet == .tailnet, !usesCustomTailnetAuthority { - scheduleTailnetDiscovery() - } - } - .onChange(of: draft.authMode) { _, newMode in - guard newMode != .web else { return } - Task { @MainActor in - await cancelTailnetLoginIfNeeded() - } - } - .onDisappear { - tailnetLoginPollTask?.cancel() - tailnetDiscoveryTask?.cancel() - tailnetProbeTask?.cancel() - browserAuthenticator.cancel() - if !preserveTailnetLoginSession { - Task { @MainActor in - await cancelTailnetLoginIfNeeded() - } - } - } - } - - @ViewBuilder - private var identityFields: some View { - TextField("Title", text: $draft.title) - TextField("Account", text: $draft.accountName) - TextField("Identity", text: $draft.identityName) - if sheet == .tailnet { - TextField("Hostname", text: $draft.hostname) - .burrowLoginField() - .autocorrectionDisabled() - } - } - - @ViewBuilder - private var tailnetSections: some View { - Section("Connection") { - TextField("Email address", text: $draft.discoveryEmail) - .burrowEmailField() - .burrowLoginField() - .autocorrectionDisabled() - .accessibilityIdentifier("tailnet-discovery-email") - .submitLabel(.continue) - .onSubmit { - if !usesCustomTailnetAuthority { - scheduleTailnetDiscovery(immediate: true) - } - } - - tailnetServerCard - - if showsAdvancedTailnetSettings { - if usesCustomTailnetAuthority { - TextField("Server URL", text: $draft.authority) - .burrowLoginField() - .autocorrectionDisabled() - .accessibilityIdentifier("tailnet-authority") - } else { - TextField("Tailnet", text: $draft.tailnet) - .burrowLoginField() - .autocorrectionDisabled() - .accessibilityIdentifier("tailnet-name") - } - } - } - - Section("Authentication") { - if showsAdvancedTailnetSettings { - Picker("Authentication", selection: $draft.authMode) { - ForEach(availableTailnetAuthModes) { mode in - Text(mode.title).tag(mode) - } - } - .pickerStyle(.menu) - } - - if draft.authMode == .web { - Button { - startTailnetLogin() - } label: { - Label { - Text(isStartingTailnetLogin ? "Starting Sign-In" : tailnetSignInActionTitle) - } icon: { - Image(systemName: isStartingTailnetLogin ? "hourglass" : "person.badge.key") - } - } - .buttonStyle(.borderless) - .disabled(isStartingTailnetLogin || tailnetLoginActionDisabled) - .accessibilityIdentifier("tailnet-start-sign-in") - - if let tailnetLoginStatus { - tailnetLoginCard(status: tailnetLoginStatus, failure: nil) - } else if let tailnetLoginError { - tailnetLoginCard(status: nil, failure: tailnetLoginError) - } - } else { - TextField("Username", text: $draft.username) - .burrowLoginField() - .autocorrectionDisabled() - if draft.authMode != .none { - SecureField( - draft.authMode == .password ? "Password" : "Preauth Key", - text: $draft.secret - ) - } - } - - Text(tailnetAuthenticationFootnote) - .font(.footnote) - .foregroundStyle(.secondary) - } - } - - private var sheetSummaryCard: some View { - VStack(alignment: .leading, spacing: 10) { - HStack(spacing: 12) { - Image(systemName: sheet.iconName) - .font(.title3.weight(.semibold)) - .foregroundStyle(sheetAccentColor) - .frame(width: 28, height: 28) - .background( - Circle() - .fill(sheetAccentColor.opacity(0.14)) - ) - - VStack(alignment: .leading, spacing: 3) { - Text(summaryTitle) - .font(.headline) - Text(sheet.kind.subtitle) - .font(.footnote) - .foregroundStyle(.secondary) - } - - Spacer() - } - - if let availabilityNote = sheet.kind.availabilityNote { - Text(availabilityNote) - .font(.footnote) - .foregroundStyle(.secondary) - } - - if sheet == .tailnet { - labeledValue("Server", tailnetServerDisplayLabel) - if let connectionSummary = tailnetConnectionSummary { - Text(connectionSummary) - .font(.footnote.weight(.medium)) - .foregroundStyle(tailnetConnectionSummaryColor) - } - if tailnetLoginStatus?.running == true { - HStack(spacing: 8) { - summaryBadge("Signed In") - } - } - } - } - .padding(14) - .background( - RoundedRectangle(cornerRadius: 18) - .fill(.thinMaterial) - ) - } - - private var tailnetServerCard: some View { - VStack(alignment: .leading, spacing: 8) { - HStack(alignment: .top, spacing: 12) { - VStack(alignment: .leading, spacing: 4) { - Text(usesCustomTailnetAuthority ? "Custom Server" : "Server") - .font(.subheadline.weight(.medium)) - Text(tailnetServerDisplayLabel) - .font(.footnote.monospaced()) - .foregroundStyle(.secondary) - .textSelection(.enabled) - } - - Spacer() - - if isDiscoveringTailnet || isProbingAuthority { - ProgressView() - .controlSize(.small) - } else if let summary = tailnetConnectionSummary { - Text(summary) - .font(.caption.weight(.medium)) - .foregroundStyle(tailnetConnectionSummaryColor) - } - } - - if let detail = tailnetServerDetail { - Text(detail) - .font(.footnote) - .foregroundStyle(.secondary) - } - } - .padding(12) - .background( - RoundedRectangle(cornerRadius: 16) - .fill(.thinMaterial) - ) - .accessibilityIdentifier("tailnet-server-card") - } - - private func tailnetAuthorityProbeCard( - status: TailnetAuthorityProbeStatus?, - failure: String? - ) -> some View { - VStack(alignment: .leading, spacing: 6) { - if let status { - Text(status.summary) - .font(.subheadline.weight(.medium)) - Text(status.detail ?? "HTTP \(status.statusCode) from \(status.authority)") - .font(.footnote) - .foregroundStyle(.secondary) - .textSelection(.enabled) - } else if let failure { - Text("Connection failed") - .font(.subheadline.weight(.medium)) - .foregroundStyle(.red) - Text(failure) - .font(.footnote) - .foregroundStyle(.secondary) - } - } - .padding(12) - .background( - RoundedRectangle(cornerRadius: 16) - .fill(.thinMaterial) - ) - .accessibilityIdentifier("tailnet-authority-probe-card") - } - - private func tailnetDiscoveryCard( - status: TailnetDiscoveryResponse?, - failure: String? - ) -> some View { - VStack(alignment: .leading, spacing: 6) { - if let status { - Text("Discovered Tailnet Server") - .font(.subheadline.weight(.medium)) - Text(status.authority) - .font(.footnote.monospaced()) - .foregroundStyle(.secondary) - .textSelection(.enabled) - Text(status.provider == .tailscale ? "Managed authority" : "Custom authority") - .font(.footnote) - .foregroundStyle(.secondary) - if let oidcIssuer = status.oidcIssuer { - Text("OIDC: \(oidcIssuer)") - .font(.footnote) - .foregroundStyle(.secondary) - .lineLimit(3) - .textSelection(.enabled) - } - } else if let failure { - Text("Discovery failed") - .font(.subheadline.weight(.medium)) - .foregroundStyle(.red) - Text(failure) - .font(.footnote) - .foregroundStyle(.secondary) - } - } - .padding(12) - .background( - RoundedRectangle(cornerRadius: 16) - .fill(.thinMaterial) - ) - .accessibilityIdentifier("tailnet-discovery-card") - } - - private func tailnetLoginCard( - status: TailnetLoginStatus?, - failure: String? - ) -> some View { - VStack(alignment: .leading, spacing: 6) { - if let status { - Text(status.running ? "Signed In" : status.needsLogin ? "Browser Sign-In Required" : "Checking Sign-In") - .font(.subheadline.weight(.medium)) - if let tailnetName = status.tailnetName, !tailnetName.isEmpty { - Text("Tailnet: \(tailnetName)") - .font(.footnote) - .foregroundStyle(.secondary) - } - if let selfDNSName = status.selfDNSName, !selfDNSName.isEmpty { - Text(selfDNSName) - .font(.footnote.monospaced()) - .foregroundStyle(.secondary) - .textSelection(.enabled) - } - if !status.tailnetIPs.isEmpty { - Text(status.tailnetIPs.joined(separator: ", ")) - .font(.footnote.monospaced()) - .foregroundStyle(.secondary) - .textSelection(.enabled) - } - if !status.health.isEmpty { - Text(status.health.joined(separator: " • ")) - .font(.footnote) - .foregroundStyle(.secondary) - } - } else if let failure { - Text("Sign-In failed") - .font(.subheadline.weight(.medium)) - .foregroundStyle(.red) - Text(failure) - .font(.footnote) - .foregroundStyle(.secondary) - } - } - .padding(12) - .background( - RoundedRectangle(cornerRadius: 16) - .fill(.thinMaterial) - ) - .accessibilityIdentifier("tailnet-login-card") - } - - private func summaryBadge(_ label: String) -> some View { - Text(label) - .font(.caption.weight(.medium)) - .foregroundStyle(.secondary) - .padding(.horizontal, 10) - .padding(.vertical, 5) - .background( - Capsule() - .fill(.white.opacity(0.5)) - ) - } - - @ViewBuilder - private var bottomActionBar: some View { - VStack(spacing: 0) { - Divider() - .overlay(.white.opacity(0.3)) - Button(confirmationTitle) { - submit() - } - .buttonStyle(.floating(color: sheetAccentColor, cornerRadius: 18)) - .disabled(isSubmitting || submissionDisabled) - .padding(.horizontal) - .padding(.top, 12) - .padding(.bottom, 8) - } - .background(.ultraThinMaterial) - } - - @ViewBuilder - private var sheetMenuActions: some View { - Button("Use Suggested Identity") { - applySuggestedIdentity() - } - - switch sheet { - case .wireGuard: - Button("Paste Configuration") { - pasteWireGuardConfiguration() - } - .disabled(clipboardString?.isEmpty ?? true) - - Button("Clear Configuration", role: .destructive) { - draft.wireGuardConfig = "" - } - .disabled(draft.wireGuardConfig.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty) - - case .tor: - Menu("Presets") { - Button("Recommended Tor Defaults") { - applyTorDefaults() - } - Button("Restore Suggested Identity") { - applySuggestedIdentity() - } - } - - case .tailnet: - Button(usesCustomTailnetAuthority ? "Use Automatic Server" : "Edit Custom Server") { - toggleTailnetAuthorityMode() - } - - Button(showsAdvancedTailnetSettings ? "Hide Advanced Settings" : "Show Advanced Settings") { - showsAdvancedTailnetSettings.toggle() - } - - if showsAdvancedTailnetSettings, availableTailnetAuthModes.count > 1 { - Menu("Authentication") { - ForEach(availableTailnetAuthModes) { mode in - Button(mode.title) { - draft.authMode = mode - if mode == .none { - draft.secret = "" - } - } - } - } - } - - Button("Refresh Server Lookup") { - scheduleTailnetDiscovery(immediate: true) - } - .disabled(usesCustomTailnetAuthority || normalizedOptional(draft.discoveryEmail) == nil) - } - } - - @ViewBuilder - private var wireGuardContextActions: some View { - Button("Paste Configuration") { - pasteWireGuardConfiguration() - } - .disabled(clipboardString?.isEmpty ?? true) - - Button("Clear", role: .destructive) { - draft.wireGuardConfig = "" - } - .disabled(draft.wireGuardConfig.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty) - } - - private var sheetAccentColor: Color { - switch sheet { - case .wireGuard: - .blue - case .tor, .tailnet: - sheet.kind.accentColor - } - } - - private var summaryTitle: String { - switch sheet { - case .wireGuard: - "Import WireGuard" - case .tor: - "Configure Tor" - case .tailnet: - "Connect Tailnet" - } - } - - private var showsBottomActionButton: Bool { - #if os(iOS) - return true - #else - return false - #endif - } - - private var showsIdentitySection: Bool { - switch sheet { - case .wireGuard, .tor: - return true - case .tailnet: - return showsAdvancedTailnetSettings - } - } - - private var wireGuardEditorHeight: CGFloat { - #if os(iOS) - 180 - #else - 220 - #endif - } - - private var confirmationTitle: String { - switch sheet { - case .wireGuard: - return "Add Network" - case .tor: - return "Save Account" - case .tailnet: - return "Save Account" - } - } - - private var tailnetLoginActionDisabled: Bool { - switch sheet { - case .tailnet: - if usesCustomTailnetAuthority { - return normalizedOptional(draft.authority) == nil - } - return false - case .wireGuard, .tor: - return true - } - } - - private var submissionDisabled: Bool { - switch sheet { - case .wireGuard: - return draft.wireGuardConfig.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty - case .tor: - return normalizedOptional(draft.accountName) == nil || normalizedOptional(draft.identityName) == nil - case .tailnet: - if normalizedOptional(draft.accountName) == nil || normalizedOptional(draft.identityName) == nil { - return true - } - if normalizedOptional(draft.authority) == nil { - return true - } - if draft.authMode == .web { - return tailnetLoginStatus?.running != true - } - if draft.authMode != .none && normalizedOptional(draft.secret) == nil { - return true - } - return false - } - } - - private var tailnetServerDisplayLabel: String { - if usesCustomTailnetAuthority { - return normalizedOptional(draft.authority) - ?? "Enter a custom Tailnet server" - } - return TailnetProvider.tailscale.defaultAuthority ?? "Tailscale managed" - } - - private var tailnetServerDetail: String? { - if usesCustomTailnetAuthority { - if let discovery = discoveryStatus { - return "Discovered from \(discovery.domain)." - } - if let discoveryError { - return discoveryError - } - return "Use a custom Tailnet authority when your domain does not advertise one." - } - return "Continue with Tailscale, or open advanced settings to use a custom server." - } - - private var tailnetConnectionSummary: String? { - if isDiscoveringTailnet { - return "Finding server" - } - if isProbingAuthority { - return "Checking" - } - if let authorityProbeStatus { - return authorityProbeStatus.summary - } - if authorityProbeError != nil { - return "Unavailable" - } - return nil - } - - private var tailnetConnectionSummaryColor: Color { - if authorityProbeError != nil { - return .red - } - return .secondary - } - - private func submit() { - isSubmitting = true - errorMessage = nil - - Task { @MainActor in - defer { isSubmitting = false } - do { - switch sheet { - case .wireGuard: - try await submitWireGuard() - dismiss() - case .tor: - try submitTor() - dismiss() - case .tailnet: - try await submitTailnet() - } - } catch { - errorMessage = error.localizedDescription - } - } - } - - private func submitWireGuard() async throws { - let networkID = try await networkViewModel.addWireGuardNetwork( - configText: draft.wireGuardConfig - ) - - let title = titleOrFallback("WireGuard \(networkID)") - let record = NetworkAccountRecord( - id: UUID(), - kind: .wireGuard, - title: title, - authority: nil, - provider: nil, - accountName: normalized(draft.accountName, fallback: "default"), - identityName: normalized(draft.identityName, fallback: "network-\(networkID)"), - hostname: nil, - username: nil, - tailnet: nil, - authMode: .none, - note: "Linked to daemon network #\(networkID).", - createdAt: .now, - updatedAt: .now - ) - try accountStore.upsert(record, secret: nil) - } - - private func submitTor() throws { - let title = titleOrFallback("Tor \(normalized(draft.identityName, fallback: "apple"))") - let note = [ - "Addresses: \(csvSummary(draft.torAddresses))", - "DNS: \(csvSummary(draft.torDNS))", - "MTU: \(normalized(draft.torMTU, fallback: "1400"))", - "Listen: \(normalized(draft.torListen, fallback: "127.0.0.1:9040"))", - ].joined(separator: " • ") - - let record = NetworkAccountRecord( - id: UUID(), - kind: .tor, - title: title, - authority: "arti://local", - provider: nil, - accountName: normalized(draft.accountName, fallback: "default"), - identityName: normalized(draft.identityName, fallback: "apple"), - hostname: nil, - username: nil, - tailnet: nil, - authMode: .none, - note: note, - createdAt: .now, - updatedAt: .now - ) - try accountStore.upsert(record, secret: nil) - } - - private func submitTailnet() async throws { - let secret = (draft.authMode == .none || draft.authMode == .web) ? nil : draft.secret - let username = normalizedOptional(draft.username) - preserveTailnetLoginSession = draft.authMode == .web && tailnetLoginStatus?.running == true - try await saveTailnetAccount(secret: secret, username: username) - dismiss() - } - - private func runAutomationIfNeeded() { - guard !didRunAutomation, - sheet == .tailnet, - let automation = BurrowAutomationConfig.current, - automation.action == .tailnetLogin || automation.action == .tailnetProbe - else { - return - } - - didRunAutomation = true - draft.title = automation.title ?? draft.title - draft.accountName = automation.accountName ?? draft.accountName - draft.identityName = automation.identityName ?? draft.identityName - draft.hostname = automation.hostname ?? draft.hostname - - Task { @MainActor in - switch automation.action { - case .tailnetLogin: - applyTailnetDefaults(for: .tailscale) - startTailnetLogin() - case .tailnetProbe: - usesCustomTailnetAuthority = true - showsAdvancedTailnetSettings = true - draft.authority = automation.authority ?? TailnetProvider.headscale.defaultAuthority ?? draft.authority - probeTailnetAuthority() - } - } - } - - private func saveTailnetAccount(secret: String?, username: String?) async throws { - let provider = inferredTailnetProvider - let title = titleOrFallback( - hostnameFallback(from: draft.authority, fallback: "Tailnet") - ) - - let payload = TailnetNetworkPayload( - provider: provider, - authority: normalizedOptional(draft.authority) ?? normalizedOptional(provider.defaultAuthority ?? ""), - account: normalized(draft.accountName, fallback: "default"), - identity: normalized(draft.identityName, fallback: "apple"), - tailnet: normalizedOptional(draft.tailnet), - hostname: normalizedOptional(draft.hostname) - ) - - var noteParts: [String] = [ - "Server: \(hostnameFallback(from: payload.authority ?? "", fallback: "tailnet"))", - ] - - if showsAdvancedTailnetSettings || draft.authMode != .web { - noteParts.append("Auth: \(draft.authMode.title)") - } - - if draft.authMode == .web, tailnetLoginStatus?.running == true { - noteParts.append("Browser sign-in complete") - } - - do { - let networkID = try await networkViewModel.addTailnetNetwork(payload: payload) - noteParts.append("Linked to daemon network #\(networkID)") - } catch { - noteParts.append("Daemon network add pending") - } - - let record = NetworkAccountRecord( - id: UUID(), - kind: .tailnet, - title: title, - authority: payload.authority, - provider: provider, - accountName: payload.account, - identityName: payload.identity, - hostname: payload.hostname, - username: username, - tailnet: payload.tailnet, - authMode: draft.authMode, - note: noteParts.joined(separator: " • "), - createdAt: .now, - updatedAt: .now - ) - try accountStore.upsert(record, secret: secret) - } - - private func applySuggestedIdentity() { - let defaults = AccountDraft(sheet: sheet) - if draft.title.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty { - draft.title = defaults.title - } - draft.accountName = defaults.accountName - draft.identityName = defaults.identityName - if sheet == .tailnet { - draft.hostname = defaults.hostname - } - } - - private func applyTorDefaults() { - let defaults = AccountDraft(sheet: .tor) - draft.title = defaults.title - draft.accountName = defaults.accountName - draft.identityName = defaults.identityName - draft.torAddresses = defaults.torAddresses - draft.torDNS = defaults.torDNS - draft.torMTU = defaults.torMTU - draft.torListen = defaults.torListen - } - - private func applyTailnetDefaults(for provider: TailnetProvider) { - resetTailnetDiscoveryFeedback() - usesCustomTailnetAuthority = provider != .tailscale - draft.authority = provider.defaultAuthority ?? "" - if !availableTailnetAuthModes.contains(draft.authMode) { - draft.authMode = .web - } - } - - private func startTailnetLogin() { - isStartingTailnetLogin = true - tailnetLoginError = nil - preserveTailnetLoginSession = false - - Task { @MainActor in - defer { isStartingTailnetLogin = false } - do { - let authority = try await resolveTailnetAuthorityForLogin() - let status = try await networkViewModel.startTailnetLogin( - accountName: normalized(draft.accountName, fallback: "default"), - identityName: normalized(draft.identityName, fallback: "apple"), - hostname: normalizedOptional(draft.hostname), - authority: authority - ) - tailnetLoginSessionID = status.sessionID - updateTailnetLoginStatus(status) - beginTailnetLoginPolling(sessionID: status.sessionID) - } catch { - tailnetLoginError = error.localizedDescription - } - } - } - - private func probeTailnetAuthority() { - guard let authority = normalizedOptional(draft.authority) else { - authorityProbeStatus = nil - authorityProbeError = "Enter a server URL first." - return - } - - isProbingAuthority = true - authorityProbeStatus = nil - authorityProbeError = nil - - Task { @MainActor in - defer { isProbingAuthority = false } - do { - authorityProbeStatus = try await networkViewModel.probeTailnetAuthority(authority) - } catch { - authorityProbeError = error.localizedDescription - } - } - } - - private func resetAuthorityProbe() { - tailnetProbeTask?.cancel() - authorityProbeStatus = nil - authorityProbeError = nil - tailnetLoginError = nil - } - - private func resetTailnetDiscoveryFeedback() { - tailnetDiscoveryTask?.cancel() - discoveryStatus = nil - discoveryError = nil - } - - private func discoverTailnetAuthority() { - guard let email = normalizedOptional(draft.discoveryEmail) else { - discoveryStatus = nil - discoveryError = "Enter an email address first." - return - } - - isDiscoveringTailnet = true - discoveryStatus = nil - discoveryError = nil - - Task { @MainActor in - defer { isDiscoveringTailnet = false } - do { - let discovery = try await networkViewModel.discoverTailnet(email: email) - discoveryStatus = discovery - draft.authority = discovery.authority - probeTailnetAuthority() - } catch { - discoveryError = error.localizedDescription - } - } - } - - private func scheduleTailnetDiscovery(immediate: Bool = false) { - guard sheet == .tailnet else { return } - tailnetDiscoveryTask?.cancel() - - guard !usesCustomTailnetAuthority else { - discoveryStatus = nil - discoveryError = nil - return - } - - guard normalizedOptional(draft.discoveryEmail) != nil else { - discoveryStatus = nil - discoveryError = nil - draft.authority = TailnetProvider.tailscale.defaultAuthority ?? "" - return - } - - tailnetDiscoveryTask = Task { @MainActor in - if !immediate { - try? await Task.sleep(for: .milliseconds(450)) - } - guard !Task.isCancelled else { return } - discoverTailnetAuthority() - } - } - - private func scheduleTailnetAuthorityProbe() { - guard sheet == .tailnet else { return } - tailnetProbeTask?.cancel() - guard normalizedOptional(draft.authority) != nil else { return } - - tailnetProbeTask = Task { @MainActor in - try? await Task.sleep(for: .milliseconds(300)) - guard !Task.isCancelled else { return } - probeTailnetAuthority() - } - } - - private func toggleTailnetAuthorityMode() { - let discoveredAuthority = discoveryStatus?.authority - usesCustomTailnetAuthority.toggle() - resetTailnetDiscoveryFeedback() - resetAuthorityProbe() - if usesCustomTailnetAuthority { - draft.authority = discoveredAuthority ?? draft.authority - } else { - draft.authority = TailnetProvider.tailscale.defaultAuthority ?? "" - scheduleTailnetDiscovery(immediate: normalizedOptional(draft.discoveryEmail) != nil) - } - } - - private func resolveTailnetAuthorityForLogin() async throws -> String { - if !usesCustomTailnetAuthority { - let authority = TailnetProvider.tailscale.defaultAuthority ?? "" - draft.authority = authority - scheduleTailnetAuthorityProbe() - return authority - } - - if let authority = normalizedOptional(draft.authority) { - return authority - } - - if let email = normalizedOptional(draft.discoveryEmail) { - let discovery = try await networkViewModel.discoverTailnet(email: email) - discoveryStatus = discovery - discoveryError = nil - draft.authority = discovery.authority - scheduleTailnetAuthorityProbe() - return discovery.authority - } - - throw NSError(domain: "BurrowTailnet", code: 1, userInfo: [ - NSLocalizedDescriptionKey: "Enter an email address or a custom server URL first." - ]) - } - - private func beginTailnetLoginPolling(sessionID: String) { - tailnetLoginPollTask?.cancel() - tailnetLoginPollTask = Task { @MainActor in - while !Task.isCancelled { - do { - let status = try await networkViewModel.tailnetLoginStatus(sessionID: sessionID) - updateTailnetLoginStatus(status) - if status.running { - tailnetLoginPollTask = nil - return - } - } catch { - tailnetLoginError = error.localizedDescription - tailnetLoginPollTask = nil - return - } - try? await Task.sleep(for: .seconds(1)) - } - } - } - - private func updateTailnetLoginStatus(_ status: TailnetLoginStatus) { - tailnetLoginStatus = status - tailnetLoginError = nil - tailnetLoginSessionID = status.sessionID - - if status.running { - browserAuthenticator.cancel() - tailnetPresentedAuthURL = nil - return - } - - guard let authURL = status.authURL else { - return - } - - if tailnetPresentedAuthURL != authURL { - tailnetPresentedAuthURL = authURL - browserAuthenticator.start(url: authURL) { [sessionID = status.sessionID] in - Task { @MainActor in - if tailnetLoginStatus?.running != true { - tailnetLoginSessionID = sessionID - } - } - } - } - } - - private func cancelTailnetLoginIfNeeded() async { - tailnetLoginPollTask?.cancel() - tailnetLoginPollTask = nil - browserAuthenticator.cancel() - tailnetPresentedAuthURL = nil - - guard tailnetLoginStatus?.running != true, - let sessionID = tailnetLoginSessionID - else { - return - } - - do { - try await networkViewModel.cancelTailnetLogin(sessionID: sessionID) - } catch { - tailnetLoginError = error.localizedDescription - } - - tailnetLoginStatus = nil - tailnetLoginSessionID = nil - } - - private func pasteWireGuardConfiguration() { - guard let clipboardString else { return } - draft.wireGuardConfig = clipboardString - } - - private var clipboardString: String? { - #if canImport(UIKit) - UIPasteboard.general.string - #elseif canImport(AppKit) - NSPasteboard.general.string(forType: .string) - #else - nil - #endif - } - - private func normalized(_ value: String, fallback: String) -> String { - let trimmed = value.trimmingCharacters(in: .whitespacesAndNewlines) - return trimmed.isEmpty ? fallback : trimmed - } - - private func normalizedOptional(_ value: String) -> String? { - let trimmed = value.trimmingCharacters(in: .whitespacesAndNewlines) - return trimmed.isEmpty ? nil : trimmed - } - - private func titleOrFallback(_ fallback: String) -> String { - normalized(draft.title, fallback: fallback) - } - - private func csvSummary(_ value: String) -> String { - value - .split(separator: ",") - .map { $0.trimmingCharacters(in: .whitespacesAndNewlines) } - .filter { !$0.isEmpty } - .joined(separator: ", ") - } - - private func hostnameFallback(from value: String, fallback: String) -> String { - guard let url = URL(string: value), let host = url.host, !host.isEmpty else { - let trimmed = value.trimmingCharacters(in: .whitespacesAndNewlines) - return trimmed.isEmpty ? fallback : trimmed - } - return host - } - - private var availableTailnetAuthModes: [AccountAuthMode] { - [.web, .none, .password, .preauthKey] - } - - private var tailnetSignInActionTitle: String { - if tailnetLoginStatus?.running == true { - return "Signed In" - } - if tailnetLoginSessionID != nil { - return "Resume Sign-In" - } - return "Continue with Tailscale" - } - - private var tailnetAuthenticationFootnote: String { - switch draft.authMode { - case .web: - if usesCustomTailnetAuthority { - return "Burrow signs in through the daemon using your custom Tailnet server." - } - return "Burrow signs in through the daemon using Tailscale's managed browser flow." - case .none: - return "Save the authority only. Useful when the control plane handles authentication elsewhere." - case .password, .preauthKey: - return "Tailnet account material stays on-device. Burrow stores the authority and credentials for daemon-managed registration and refresh." - } - } - - private var inferredTailnetProvider: TailnetProvider { - TailnetProvider.inferred( - authority: normalizedOptional(draft.authority), - explicit: discoveryStatus?.provider - ) - } - - @ViewBuilder - private func labeledValue(_ label: String, _ value: String) -> some View { - VStack(alignment: .leading, spacing: 2) { - Text(label) - .font(.caption) - .foregroundStyle(.secondary) - Text(value) - .font(.body.monospaced()) - } - } -} - -private struct AccountRowView: View { - let account: NetworkAccountRecord - let hasSecret: Bool - - var body: some View { - VStack(alignment: .leading, spacing: 10) { - HStack(alignment: .top) { - VStack(alignment: .leading, spacing: 4) { - Text(account.title) - .font(.headline) - Text(account.kind.title) - .font(.subheadline) - .foregroundStyle(account.kind.accentColor) - } - Spacer() - if hasSecret { - Label("Credential stored", systemImage: "key.fill") - .font(.caption) - .foregroundStyle(.secondary) - } - } - - if let authority = account.authority { - labeledValue("Authority", authority) - } - - labeledValue("Account", account.accountName) - labeledValue("Identity", account.identityName) - - if let hostname = account.hostname { - labeledValue("Hostname", hostname) - } - - if let username = account.username { - labeledValue("Username", username) - } - - if let tailnet = account.tailnet { - labeledValue("Tailnet", tailnet) - } - - if let note = account.note { - Text(note) - .font(.footnote) - .foregroundStyle(.secondary) - } - } - .padding() - .frame(maxWidth: .infinity, alignment: .leading) - .background( - RoundedRectangle(cornerRadius: 16) - .fill(.thinMaterial) - ) - } - - @ViewBuilder - private func labeledValue(_ label: String, _ value: String) -> some View { - VStack(alignment: .leading, spacing: 2) { - Text(label) - .font(.caption) - .foregroundStyle(.secondary) - Text(value) - .font(.body.monospaced()) - } - } -} - -private extension View { - @ViewBuilder - func burrowLoginField() -> some View { - #if os(iOS) - textInputAutocapitalization(.never) - #else - self - #endif - } - - @ViewBuilder - func burrowEmailField() -> some View { - #if os(iOS) - textInputAutocapitalization(.never) - .keyboardType(.emailAddress) - #else - self - #endif - } -} - -#if canImport(AuthenticationServices) -@MainActor -private final class TailnetBrowserAuthenticator: NSObject { - private var session: ASWebAuthenticationSession? - private static var prefersEphemeralSessionForCurrentProcess: Bool { - let rawValue = ProcessInfo.processInfo.environment["BURROW_UI_TEST_EPHEMERAL_AUTH"]? - .trimmingCharacters(in: .whitespacesAndNewlines) - .lowercased() - return rawValue == "1" || rawValue == "true" || rawValue == "yes" - } - - func start(url: URL, onDismiss: @escaping @Sendable () -> Void) { - cancel() - let session = ASWebAuthenticationSession(url: url, callbackURLScheme: nil) { _, _ in - onDismiss() - } - session.presentationContextProvider = self - session.prefersEphemeralWebBrowserSession = Self.prefersEphemeralSessionForCurrentProcess - self.session = session - _ = session.start() - } - - func cancel() { - session?.cancel() - session = nil - } -} - -extension TailnetBrowserAuthenticator: ASWebAuthenticationPresentationContextProviding { - func presentationAnchor(for session: ASWebAuthenticationSession) -> ASPresentationAnchor { - #if canImport(AppKit) - return NSApplication.shared.keyWindow - ?? NSApplication.shared.windows.first - ?? ASPresentationAnchor() - #elseif canImport(UIKit) - return ASPresentationAnchor() - #else - return ASPresentationAnchor() - #endif - } -} -#else -@MainActor -private final class TailnetBrowserAuthenticator { - func start(url: URL, onDismiss: @escaping @Sendable () -> Void) { - _ = url - onDismiss() - } - - func cancel() {} -} -#endif - -private struct BurrowAutomationConfig { - enum Action: String { - case tailnetLogin = "tailnet-login" - case tailnetProbe = "tailnet-probe" - } - - let action: Action - let title: String? - let accountName: String? - let identityName: String? - let hostname: String? - let authority: String? - - static let current: BurrowAutomationConfig? = { - let environment = ProcessInfo.processInfo.environment - guard let rawAction = environment["BURROW_UI_AUTOMATION"], - let action = Action(rawValue: rawAction) - else { - return nil - } - - return BurrowAutomationConfig( - action: action, - title: environment["BURROW_UI_AUTOMATION_TITLE"], - accountName: environment["BURROW_UI_AUTOMATION_ACCOUNT"], - identityName: environment["BURROW_UI_AUTOMATION_IDENTITY"], - hostname: environment["BURROW_UI_AUTOMATION_HOSTNAME"], - authority: environment["BURROW_UI_AUTOMATION_AUTHORITY"] - ) - }() -} - -#if DEBUG -struct NetworkView_Previews: PreviewProvider { - static var previews: some View { - BurrowView() - .environment(\.tunnel, PreviewTunnel()) - } -} -#endif diff --git a/Apple/UI/FloatingButtonStyle.swift b/Apple/UI/FloatingButtonStyle.swift deleted file mode 100644 index 53ab5ed..0000000 --- a/Apple/UI/FloatingButtonStyle.swift +++ /dev/null @@ -1,50 +0,0 @@ -import SwiftUI - -struct FloatingButtonStyle: ButtonStyle { - static let duration = 0.08 - - var color: Color - var cornerRadius: CGFloat - - func makeBody(configuration: Configuration) -> some View { - configuration.label - .font(.headline) - .foregroundColor(.white) - .frame(minHeight: 48) - .padding(.horizontal) - .background( - RoundedRectangle(cornerRadius: cornerRadius) - .fill( - LinearGradient( - colors: [ - configuration.isPressed ? color.opacity(0.9) : color.opacity(0.9), - configuration.isPressed ? color.opacity(0.9) : color - ], - startPoint: .init(x: 0.2, y: 0), - endPoint: .init(x: 0.8, y: 1) - ) - ) - .background( - RoundedRectangle(cornerRadius: cornerRadius) - .fill(configuration.isPressed ? .black : .white) - ) - ) - .shadow(color: .black.opacity(configuration.isPressed ? 0.0 : 0.1), radius: 2.5, x: 0, y: 2) - .scaleEffect(configuration.isPressed ? 0.975 : 1.0) - .padding(.bottom, 2) - .animation( - configuration.isPressed ? .easeOut(duration: Self.duration) : .easeIn(duration: Self.duration), - value: configuration.isPressed - ) - } -} - -extension ButtonStyle where Self == FloatingButtonStyle { - static var floating: FloatingButtonStyle { - floating() - } - - static func floating(color: Color = .accentColor, cornerRadius: CGFloat = 10) -> FloatingButtonStyle { - FloatingButtonStyle(color: color, cornerRadius: cornerRadius) - } -} diff --git a/Apple/UI/MenuItemToggleView.swift b/Apple/UI/MenuItemToggleView.swift deleted file mode 100644 index ef5e8ee..0000000 --- a/Apple/UI/MenuItemToggleView.swift +++ /dev/null @@ -1,67 +0,0 @@ -// -// MenuItemToggleView.swift -// App -// -// Created by Thomas Stubblefield on 5/13/23. -// - -import SwiftUI - -public struct MenuItemToggleView: View { - @Environment(\.tunnel) - var tunnel: Tunnel - - public var body: some View { - HStack { - VStack(alignment: .leading) { - Text("Burrow") - .font(.headline) - Text(tunnel.status.description) - .font(.subheadline) - } - Spacer() - Toggle(isOn: tunnel.toggleIsOn) { - } - .disabled(tunnel.toggleDisabled) - .toggleStyle(.switch) - } - .accessibilityElement(children: .combine) - .padding(.horizontal, 4) - .padding(10) - .frame(minWidth: 300, minHeight: 32, maxHeight: 32) - } - - public init() { - } -} - -extension Tunnel { - @MainActor fileprivate var toggleDisabled: Bool { - switch status { - case .disconnected, .permissionRequired, .connected, .disconnecting: - false - case .unknown, .disabled, .connecting, .reasserting, .invalid, .configurationReadWriteFailed: - true - } - } - - @MainActor var toggleIsOn: Binding { - Binding { - switch status { - case .connecting, .reasserting, .connected: - true - default: - false - } - } set: { newValue in - switch (status, newValue) { - case (.permissionRequired, true): - enable() - case (_, true): - start() - case (_, false): - stop() - } - } - } -} diff --git a/Apple/UI/NetworkCarouselView.swift b/Apple/UI/NetworkCarouselView.swift deleted file mode 100644 index e7368db..0000000 --- a/Apple/UI/NetworkCarouselView.swift +++ /dev/null @@ -1,61 +0,0 @@ -import SwiftUI - -struct NetworkCarouselView: View { - var networks: [NetworkCardModel] - - var body: some View { - Group { - if networks.isEmpty { - #if os(iOS) - VStack(alignment: .leading, spacing: 6) { - Text("No stored networks yet") - .font(.headline) - Text("WireGuard and Tailnet networks show up here as soon as you add one.") - .font(.footnote) - .foregroundStyle(.secondary) - } - .frame(maxWidth: .infinity, alignment: .leading) - .padding() - .background( - RoundedRectangle(cornerRadius: 18) - .fill(.thinMaterial) - ) - #else - ContentUnavailableView( - "No Networks Yet", - systemImage: "network.slash", - description: Text("Add a WireGuard network, or save a Tailnet account so Burrow can store a managed network when the daemon is reachable.") - ) - .frame(maxWidth: .infinity, minHeight: 175) - #endif - } else { - ScrollView(.horizontal) { - LazyHStack { - ForEach(networks) { network in - NetworkView(network: network) - .containerRelativeFrame(.horizontal, count: 10, span: 7, spacing: 0, alignment: .center) - .scrollTransition(.interactive, axis: .horizontal) { content, phase in - content - .scaleEffect(1.0 - abs(phase.value) * 0.1) - } - } - } - } - .scrollTargetLayout() - .scrollClipDisabled() - .scrollIndicators(.hidden) - .defaultScrollAnchor(.center) - .scrollTargetBehavior(.viewAligned) - .containerRelativeFrame(.horizontal) - } - } - } -} - -#if DEBUG -struct NetworkCarouselView_Previews: PreviewProvider { - static var previews: some View { - NetworkCarouselView(networks: [WireGuardCard(id: 1, detail: "10.13.13.2/24 · wg.burrow.rs:51820").card]) - } -} -#endif diff --git a/Apple/UI/NetworkExtensionTunnel.swift b/Apple/UI/NetworkExtensionTunnel.swift deleted file mode 100644 index 23559f3..0000000 --- a/Apple/UI/NetworkExtensionTunnel.swift +++ /dev/null @@ -1,171 +0,0 @@ -import BurrowCore -import NetworkExtension - -@Observable -public final class NetworkExtensionTunnel: Tunnel { - @MainActor public private(set) var status: TunnelStatus = .unknown - @MainActor private var error: NEVPNError? - - private let logger = Logger.logger(for: Tunnel.self) - private let bundleIdentifier: String - private let configurationChanged: Task - private let statusChanged: Task - - // Each manager corresponds to one entry in the Settings app. - // Our goal is to maintain a single manager, so we create one if none exist and delete any extra. - @MainActor private var managers: [NEVPNManager]? { - didSet { Task { await updateStatus() } } - } - - @MainActor private var currentStatus: TunnelStatus { - guard let managers = managers else { - guard let error = error else { - return .unknown - } - - switch error.code { - case .configurationReadWriteFailed: - return .configurationReadWriteFailed - default: - return .unknown - } - } - - guard let manager = managers.first else { - return .permissionRequired - } - - guard manager.isEnabled else { - return .disabled - } - - return manager.connection.tunnelStatus - } - - public init(bundleIdentifier: String) { - self.bundleIdentifier = bundleIdentifier - - let center = NotificationCenter.default - let tunnel: OSAllocatedUnfairLock = .init(initialState: .none) - configurationChanged = Task { - for try await _ in center.notifications(named: .NEVPNConfigurationChange) { - try Task.checkCancellation() - await tunnel.withLock { $0 }?.update() - } - } - statusChanged = Task { - for try await _ in center.notifications(named: .NEVPNStatusDidChange) { - try Task.checkCancellation() - await tunnel.withLock { $0 }?.updateStatus() - } - } - tunnel.withLock { $0 = self } - - Task { await update() } - } - - private func update() async { - do { - let result = try await NETunnelProviderManager.managers - await MainActor.run { - managers = result - status = currentStatus - } - await self.updateStatus() - } catch let vpnError as NEVPNError { - await MainActor.run { - error = vpnError - } - } catch { - logger.error("Failed to update VPN configurations: \(error)") - } - } - - private func updateStatus() async { - await MainActor.run { - status = currentStatus - } - } - - func configure() async throws { - let managers = try await NETunnelProviderManager.managers - if managers.count > 1 { - try await withThrowingTaskGroup(of: Void.self, returning: Void.self) { group in - for manager in managers.suffix(from: 1) { - group.addTask { try await manager.remove() } - } - try await group.waitForAll() - } - } - - guard managers.isEmpty else { return } - - let manager = NETunnelProviderManager() - manager.localizedDescription = "Burrow" - - let proto = NETunnelProviderProtocol() - proto.providerBundleIdentifier = bundleIdentifier - proto.serverAddress = "burrow.rs" - - manager.protocolConfiguration = proto - try await manager.save() - } - - public func start() { - Task { - guard let manager = try await NETunnelProviderManager.managers.first else { return } - do { - if !manager.isEnabled { - manager.isEnabled = true - try await manager.save() - } - try manager.connection.startVPNTunnel() - } catch { - logger.error("Failed to start: \(error)") - } - } - } - - public func stop() { - Task { - guard let manager = try await NETunnelProviderManager.managers.first else { return } - manager.connection.stopVPNTunnel() - } - } - - public func enable() { - Task { - do { - try await configure() - } catch { - logger.error("Failed to enable: \(error)") - } - } - } - - deinit { - configurationChanged.cancel() - statusChanged.cancel() - } -} - -extension NEVPNConnection { - fileprivate var tunnelStatus: TunnelStatus { - switch status { - case .connected: - .connected(connectedDate!) - case .connecting: - .connecting - case .disconnecting: - .disconnecting - case .disconnected: - .disconnected - case .reasserting: - .reasserting - case .invalid: - .invalid - @unknown default: - .unknown - } - } -} diff --git a/Apple/UI/NetworkView.swift b/Apple/UI/NetworkView.swift deleted file mode 100644 index 437adce..0000000 --- a/Apple/UI/NetworkView.swift +++ /dev/null @@ -1,38 +0,0 @@ -import SwiftUI - -struct NetworkView: View { - var color: Color - var content: () -> Content - - private var gradient: LinearGradient { - LinearGradient( - colors: [ - color.opacity(0.8), - color - ], - startPoint: .init(x: 0.2, y: 0), - endPoint: .init(x: 0.8, y: 1) - ) - } - - var body: some View { - content() - .frame(maxWidth: .infinity, minHeight: 175, maxHeight: 175) - .background( - RoundedRectangle(cornerRadius: 10) - .fill(gradient) - .background( - RoundedRectangle(cornerRadius: 10) - .fill(.white) - ) - ) - .shadow(color: .black.opacity(0.1), radius: 3.0, x: 0, y: 2) - } -} - -extension NetworkView where Content == AnyView { - init(network: NetworkCardModel) { - color = network.backgroundColor - content = { network.label } - } -} diff --git a/Apple/UI/Networks/Network.swift b/Apple/UI/Networks/Network.swift deleted file mode 100644 index 35bd0e1..0000000 --- a/Apple/UI/Networks/Network.swift +++ /dev/null @@ -1,623 +0,0 @@ -import BurrowConfiguration -import BurrowCore -import Foundation -import Security -import SwiftProtobuf -import SwiftUI - -struct NetworkCardModel: Identifiable { - let id: Int32 - let backgroundColor: Color - let label: AnyView -} - -struct TailnetNetworkPayload: Codable, Sendable { - var provider: TailnetProvider - var authority: String? - var account: String - var identity: String - var tailnet: String? - var hostname: String? - - func encoded() throws -> Data { - let encoder = JSONEncoder() - encoder.outputFormatting = [.prettyPrinted, .sortedKeys] - return try encoder.encode(self) - } -} - -struct TailnetDiscoveryResponse: Codable, Sendable { - var domain: String - var provider: TailnetProvider - var authority: String - var oidcIssuer: String? -} - -struct TailnetAuthorityProbeStatus: Sendable { - var authority: String - var statusCode: Int - var summary: String - var detail: String? -} - -struct TailnetLoginStatus: Sendable { - var sessionID: String - var backendState: String - var authURL: URL? - var running: Bool - var needsLogin: Bool - var tailnetName: String? - var magicDNSSuffix: String? - var selfDNSName: String? - var tailnetIPs: [String] - var health: [String] -} - -enum TailnetDiscoveryClient { - static func discover(email: String, socketURL: URL) async throws -> TailnetDiscoveryResponse { - var request = Burrow_TailnetDiscoverRequest() - request.email = email - - let response = try await TailnetClient.unix(socketURL: socketURL).discover(request) - return TailnetDiscoveryResponse( - domain: response.domain, - provider: response.managed ? .tailscale : .headscale, - authority: response.authority, - oidcIssuer: response.oidcIssuer.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty - ? nil - : response.oidcIssuer - ) - } -} - -enum TailnetAuthorityProbeClient { - static func probe(authority: String, socketURL: URL) async throws -> TailnetAuthorityProbeStatus { - var request = Burrow_TailnetProbeRequest() - request.authority = authority - - let response = try await TailnetClient.unix(socketURL: socketURL).probe(request) - return TailnetAuthorityProbeStatus( - authority: response.authority, - statusCode: Int(response.statusCode), - summary: response.summary, - detail: response.detail.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty - ? nil - : response.detail - ) - } -} - -enum TailnetLoginClient { - static func start( - accountName: String, - identityName: String, - hostname: String?, - authority: String, - socketURL: URL - ) async throws -> TailnetLoginStatus { - var request = Burrow_TailnetLoginStartRequest() - request.accountName = accountName - request.identityName = identityName - request.hostname = hostname ?? "" - request.authority = authority - let response = try await TailnetClient.unix(socketURL: socketURL).loginStart(request) - return decode(response) - } - - static func status(sessionID: String, socketURL: URL) async throws -> TailnetLoginStatus { - var request = Burrow_TailnetLoginStatusRequest() - request.sessionID = sessionID - let response = try await TailnetClient.unix(socketURL: socketURL).loginStatus(request) - return decode(response) - } - - static func cancel(sessionID: String, socketURL: URL) async throws { - var request = Burrow_TailnetLoginCancelRequest() - request.sessionID = sessionID - _ = try await TailnetClient.unix(socketURL: socketURL).loginCancel(request) - } - - private static func decode(_ response: Burrow_TailnetLoginStatusResponse) -> TailnetLoginStatus { - TailnetLoginStatus( - sessionID: response.sessionID, - backendState: response.backendState, - authURL: URL(string: response.authURL.trimmingCharacters(in: .whitespacesAndNewlines)), - running: response.running, - needsLogin: response.needsLogin, - tailnetName: response.tailnetName.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty - ? nil - : response.tailnetName, - magicDNSSuffix: response.magicDNSSuffix.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty - ? nil - : response.magicDNSSuffix, - selfDNSName: response.selfDNSName.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty - ? nil - : response.selfDNSName, - tailnetIPs: response.tailnetIPs, - health: response.health - ) - } -} - -@Observable -@MainActor -final class NetworkViewModel: Sendable { - private(set) var networks: [Burrow_Network] = [] - private(set) var connectionError: String? - private let socketURLResult: Result - - @ObservationIgnored private var task: Task? - - init(socketURLResult: Result) { - self.socketURLResult = socketURLResult - startStreaming() - } - - deinit { - task?.cancel() - } - - var cards: [NetworkCardModel] { - networks.map(Self.makeCard(for:)) - } - - var nextNetworkID: Int32 { - (networks.map(\.id).max() ?? 0) + 1 - } - - func addWireGuardNetwork(configText: String) async throws -> Int32 { - try await addNetwork(type: .wireGuard, payload: Data(configText.utf8)) - } - - func addTailnetNetwork(payload: TailnetNetworkPayload) async throws -> Int32 { - try await addNetwork(type: .tailnet, payload: payload.encoded()) - } - - func discoverTailnet(email: String) async throws -> TailnetDiscoveryResponse { - let socketURL = try socketURLResult.get() - return try await TailnetDiscoveryClient.discover(email: email, socketURL: socketURL) - } - - func probeTailnetAuthority(_ authority: String) async throws -> TailnetAuthorityProbeStatus { - let socketURL = try socketURLResult.get() - return try await TailnetAuthorityProbeClient.probe(authority: authority, socketURL: socketURL) - } - - func startTailnetLogin( - accountName: String, - identityName: String, - hostname: String?, - authority: String - ) async throws -> TailnetLoginStatus { - let socketURL = try socketURLResult.get() - return try await TailnetLoginClient.start( - accountName: accountName, - identityName: identityName, - hostname: hostname, - authority: authority, - socketURL: socketURL - ) - } - - func tailnetLoginStatus(sessionID: String) async throws -> TailnetLoginStatus { - let socketURL = try socketURLResult.get() - return try await TailnetLoginClient.status(sessionID: sessionID, socketURL: socketURL) - } - - func cancelTailnetLogin(sessionID: String) async throws { - let socketURL = try socketURLResult.get() - try await TailnetLoginClient.cancel(sessionID: sessionID, socketURL: socketURL) - } - - private func addNetwork(type: Burrow_NetworkType, payload: Data) async throws -> Int32 { - let socketURL = try socketURLResult.get() - let networkID = nextNetworkID - let request = Burrow_Network.with { - $0.id = networkID - $0.type = type - $0.payload = payload - } - - let client = NetworksClient.unix(socketURL: socketURL) - _ = try await client.networkAdd(request) - return networkID - } - - private func startStreaming() { - task?.cancel() - let socketURLResult = self.socketURLResult - task = Task { [weak self] in - do { - let socketURL = try socketURLResult.get() - let client = NetworksClient.unix(socketURL: socketURL) - for try await response in client.networkList(.init()) { - guard !Task.isCancelled else { return } - await MainActor.run { - guard let self else { return } - self.networks = response.network - self.connectionError = nil - } - } - } catch { - guard !Task.isCancelled else { return } - await MainActor.run { - guard let self else { return } - self.connectionError = error.localizedDescription - } - } - } - } - - private static func makeCard(for network: Burrow_Network) -> NetworkCardModel { - switch network.type { - case .wireGuard: - WireGuardCard(network: network).card - case .tailnet: - TailnetCard(network: network).card - case .UNRECOGNIZED(let rawValue): - unsupportedCard( - id: network.id, - title: "Unknown Network", - detail: "Type \(rawValue) is not recognized by this build." - ) - @unknown default: - unsupportedCard( - id: network.id, - title: "Unsupported Network", - detail: "Update Burrow to view this network." - ) - } - } - - private static func unsupportedCard(id: Int32, title: String, detail: String) -> NetworkCardModel { - NetworkCardModel( - id: id, - backgroundColor: .gray.opacity(0.85), - label: AnyView( - VStack(alignment: .leading, spacing: 12) { - Text(title) - .font(.title3.weight(.semibold)) - .foregroundStyle(.white) - Text(detail) - .font(.body) - .foregroundStyle(.white.opacity(0.9)) - Spacer() - Text("Network #\(id)") - .font(.footnote.monospaced()) - .foregroundStyle(.white.opacity(0.8)) - } - .padding() - .frame(maxWidth: .infinity, alignment: .leading) - ) - ) - } -} - -enum TailnetProvider: String, CaseIterable, Codable, Identifiable, Sendable { - case tailscale - case headscale - case burrow - - var id: String { rawValue } - - var title: String { - switch self { - case .tailscale: "Tailscale" - case .headscale: "Custom Tailnet" - case .burrow: "Burrow" - } - } - - var defaultAuthority: String? { - switch self { - case .tailscale: - "https://controlplane.tailscale.com" - case .headscale: - "https://ts.burrow.net" - case .burrow: - nil - } - } - - var subtitle: String { - switch self { - case .tailscale: - "Managed Tailnet authority." - case .headscale: - "Custom Tailnet control server." - case .burrow: - "Burrow-native Tailnet authority." - } - } - - static func inferred(authority: String?, explicit: TailnetProvider?) -> TailnetProvider { - if explicit == .burrow { - return .burrow - } - if isManagedTailscaleAuthority(authority) { - return .tailscale - } - return .headscale - } - - static func isManagedTailscaleAuthority(_ authority: String?) -> Bool { - guard let normalized = authority? - .trimmingCharacters(in: .whitespacesAndNewlines) - .lowercased() - .trimmingCharacters(in: CharacterSet(charactersIn: "/")), - !normalized.isEmpty - else { - return false - } - - return normalized == "https://controlplane.tailscale.com" - || normalized == "http://controlplane.tailscale.com" - || normalized == "controlplane.tailscale.com" - } -} - -enum AccountNetworkKind: String, CaseIterable, Codable, Identifiable, Sendable { - case wireGuard - case tor - case tailnet - - var id: String { rawValue } - - var title: String { - switch self { - case .wireGuard: "WireGuard" - case .tor: "Tor" - case .tailnet: "Tailnet" - } - } - - var subtitle: String { - switch self { - case .wireGuard: "Import a tunnel and optional account metadata." - case .tor: "Store Arti account and identity preferences." - case .tailnet: "Save Tailnet authority, identity defaults, and login material." - } - } - - var accentColor: Color { - switch self { - case .wireGuard: .init("WireGuard") - case .tor: .orange - case .tailnet: .mint - } - } - - var actionTitle: String { - switch self { - case .wireGuard: "Add Network" - case .tor: "Save Account" - case .tailnet: "Save Account" - } - } - - var availabilityNote: String? { - switch self { - case .wireGuard: - nil - case .tor: - "Tor account preferences are stored on Apple now. The managed Tor runtime is not wired on Apple in this branch yet." - case .tailnet: - "Tailnet accounts can sign in from Apple now. The managed Apple runtime is still pending, but Tailnet networks can already be stored in the daemon." - } - } -} - -enum AccountAuthMode: String, CaseIterable, Codable, Identifiable, Sendable { - case web - case none - case password - case preauthKey - - var id: String { rawValue } - - var title: String { - switch self { - case .web: "Browser Sign-In" - case .none: "None" - case .password: "Password" - case .preauthKey: "Preauth Key" - } - } -} - -struct NetworkAccountRecord: Codable, Identifiable, Hashable, Sendable { - let id: UUID - var kind: AccountNetworkKind - var title: String - var authority: String? - var provider: TailnetProvider? - var accountName: String - var identityName: String - var hostname: String? - var username: String? - var tailnet: String? - var authMode: AccountAuthMode - var note: String? - var createdAt: Date - var updatedAt: Date -} - -struct TailnetCard { - var id: Int32 - var title: String - var detail: String - - init(network: Burrow_Network) { - let payload = (try? JSONDecoder().decode(TailnetNetworkPayload.self, from: network.payload)) - id = network.id - title = payload?.tailnet ?? payload?.hostname ?? "Tailnet" - detail = [ - payload?.authority.flatMap { URL(string: $0)?.host } ?? payload?.authority, - payload?.authority, - payload.map { "Account: \($0.account)" }, - ] - .compactMap { $0 } - .joined(separator: " · ") - .ifEmpty("Stored Tailnet configuration") - } - - var card: NetworkCardModel { - NetworkCardModel( - id: id, - backgroundColor: .mint, - label: AnyView( - VStack(alignment: .leading, spacing: 12) { - HStack { - VStack(alignment: .leading, spacing: 4) { - Text("Tailnet") - .font(.headline) - .foregroundStyle(.white.opacity(0.85)) - Text(title) - .font(.title3.weight(.semibold)) - .foregroundStyle(.white) - } - Spacer() - } - Spacer() - Text(detail) - .font(.body.monospaced()) - .foregroundStyle(.white.opacity(0.92)) - .lineLimit(4) - } - .padding() - .frame(maxWidth: .infinity, alignment: .leading) - ) - ) - } -} - -@Observable -@MainActor -final class NetworkAccountStore { - private static let storageKey = "burrow.network-accounts" - - private let defaults: UserDefaults - private(set) var accounts: [NetworkAccountRecord] = [] - - init(defaults: UserDefaults = UserDefaults(suiteName: Constants.appGroupIdentifier) ?? .standard) { - self.defaults = defaults - load() - } - - func upsert(_ record: NetworkAccountRecord, secret: String?) throws { - if let index = accounts.firstIndex(where: { $0.id == record.id }) { - accounts[index] = record - } else { - accounts.append(record) - } - accounts.sort { - if $0.kind == $1.kind { - return $0.title.localizedCaseInsensitiveCompare($1.title) == .orderedAscending - } - return $0.kind.rawValue < $1.kind.rawValue - } - try persist() - if let secret, !secret.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty { - try AccountSecretStore.store(secret, for: record.id) - } else { - try AccountSecretStore.removeSecret(for: record.id) - } - } - - func delete(_ record: NetworkAccountRecord) throws { - accounts.removeAll { $0.id == record.id } - try persist() - try AccountSecretStore.removeSecret(for: record.id) - } - - func hasStoredSecret(for record: NetworkAccountRecord) -> Bool { - AccountSecretStore.hasSecret(for: record.id) - } - - private func load() { - guard let data = defaults.data(forKey: Self.storageKey) else { - accounts = [] - return - } - - do { - accounts = try JSONDecoder().decode([NetworkAccountRecord].self, from: data) - } catch { - accounts = [] - } - } - - private func persist() throws { - let data = try JSONEncoder().encode(accounts) - defaults.set(data, forKey: Self.storageKey) - } -} - -private enum AccountSecretStore { - private static let service = "\(Constants.bundleIdentifier).accounts" - - static func hasSecret(for accountID: UUID) -> Bool { - let query = baseQuery(for: accountID) - return SecItemCopyMatching(query as CFDictionary, nil) == errSecSuccess - } - - static func store(_ secret: String, for accountID: UUID) throws { - let data = Data(secret.utf8) - let query = baseQuery(for: accountID) - let status = SecItemCopyMatching(query as CFDictionary, nil) - - if status == errSecSuccess { - let updateStatus = SecItemUpdate( - query as CFDictionary, - [kSecValueData as String: data] as CFDictionary - ) - guard updateStatus == errSecSuccess else { - throw AccountSecretStoreError.osStatus(updateStatus) - } - return - } - - var item = query - item[kSecValueData as String] = data - item[kSecAttrAccessible as String] = kSecAttrAccessibleAfterFirstUnlock - let addStatus = SecItemAdd(item as CFDictionary, nil) - guard addStatus == errSecSuccess else { - throw AccountSecretStoreError.osStatus(addStatus) - } - } - - static func removeSecret(for accountID: UUID) throws { - let status = SecItemDelete(baseQuery(for: accountID) as CFDictionary) - guard status == errSecSuccess || status == errSecItemNotFound else { - throw AccountSecretStoreError.osStatus(status) - } - } - - private static func baseQuery(for accountID: UUID) -> [String: Any] { - [ - kSecClass as String: kSecClassGenericPassword, - kSecAttrService as String: service, - kSecAttrAccount as String: accountID.uuidString, - ] - } -} - -private enum AccountSecretStoreError: LocalizedError { - case osStatus(OSStatus) - - var errorDescription: String? { - switch self { - case .osStatus(let status): - if let message = SecCopyErrorMessageString(status, nil) as String? { - return message - } - return "Keychain error \(status)" - } - } -} - -private extension String { - func ifEmpty(_ fallback: @autoclosure () -> String) -> String { - isEmpty ? fallback() : self - } -} diff --git a/Apple/UI/Networks/WireGuard.swift b/Apple/UI/Networks/WireGuard.swift deleted file mode 100644 index c0426cd..0000000 --- a/Apple/UI/Networks/WireGuard.swift +++ /dev/null @@ -1,77 +0,0 @@ -import BurrowCore -import Foundation -import SwiftUI - -struct WireGuardCard { - var id: Int32 - var title: String - var detail: String - - init(id: Int32, title: String = "WireGuard", detail: String = "Stored configuration") { - self.id = id - self.title = title - self.detail = detail - } - - init(network: Burrow_Network) { - let payload = String(data: network.payload, encoding: .utf8) ?? "" - let address = Self.firstValue(for: "Address", in: payload) - let endpoint = Self.firstValue(for: "Endpoint", in: payload) - self.id = network.id - self.title = "WireGuard" - self.detail = [address, endpoint] - .compactMap { $0 } - .filter { !$0.isEmpty } - .joined(separator: " · ") - .ifEmpty("Stored configuration") - } - - var card: NetworkCardModel { - NetworkCardModel( - id: id, - backgroundColor: .init("WireGuard"), - label: AnyView(label) - ) - } - - private var label: some View { - GeometryReader { reader in - VStack(alignment: .leading) { - HStack { - Image("WireGuard") - .resizable() - .aspectRatio(contentMode: .fit) - Image("WireGuardTitle") - .resizable() - .aspectRatio(contentMode: .fit) - .frame(width: reader.size.width / 2) - Spacer() - } - .frame(maxWidth: .infinity, maxHeight: reader.size.height / 4) - Spacer() - Text(detail) - .foregroundStyle(.white) - .font(.body.monospaced()) - .lineLimit(3) - } - .padding() - .frame(maxWidth: .infinity) - } - } - - private static func firstValue(for key: String, in config: String) -> String? { - config - .split(whereSeparator: \.isNewline) - .map(String.init) - .first(where: { $0.hasPrefix("\(key) = ") })? - .split(separator: "=", maxSplits: 1) - .last - .map { $0.trimmingCharacters(in: .whitespaces) } - } -} - -private extension String { - func ifEmpty(_ fallback: @autoclosure () -> String) -> String { - isEmpty ? fallback() : self - } -} diff --git a/Apple/UI/Tunnel.swift b/Apple/UI/Tunnel.swift deleted file mode 100644 index 4ec9320..0000000 --- a/Apple/UI/Tunnel.swift +++ /dev/null @@ -1,61 +0,0 @@ -import BurrowConfiguration -import NetworkExtension -import SwiftUI - -protocol Tunnel: Sendable { - @MainActor var status: TunnelStatus { get } - - func start() - func stop() - func enable() -} - -public enum TunnelStatus: Sendable, Equatable, Hashable { - case unknown - case permissionRequired - case disabled - case connecting - case connected(Date) - case disconnecting - case disconnected - case reasserting - case invalid - case configurationReadWriteFailed -} - -struct TunnelKey: EnvironmentKey { - static var defaultValue: any Tunnel { - NetworkExtensionTunnel(bundleIdentifier: Constants.networkExtensionBundleIdentifier) - } -} - -extension EnvironmentValues { - var tunnel: any Tunnel { - get { self[TunnelKey.self] } - set { self[TunnelKey.self] = newValue } - } -} - -#if DEBUG -@Observable -@MainActor -final class PreviewTunnel: Tunnel { - private(set) var status: TunnelStatus = .permissionRequired - - nonisolated func start() { - set(.connected(.now)) - } - - nonisolated func stop() { - set(.disconnected) - } - - nonisolated func enable() { - set(.disconnected) - } - - nonisolated private func set(_ status: TunnelStatus) { - Task { @MainActor in self.status = status } - } -} -#endif diff --git a/Apple/UI/TunnelButton.swift b/Apple/UI/TunnelButton.swift deleted file mode 100644 index d0222d4..0000000 --- a/Apple/UI/TunnelButton.swift +++ /dev/null @@ -1,73 +0,0 @@ -import SwiftUI - -struct TunnelButton: View { - @Environment(\.tunnel) - var tunnel: any Tunnel - - private var action: Action? { tunnel.action } - - var body: some View { - Button { - if let action { - tunnel.perform(action) - } - } label: { - Text(action.description) - } - .disabled(action.isDisabled) - .padding(.horizontal) - .buttonStyle(.floating) - } -} - -extension Tunnel { - @MainActor fileprivate var action: TunnelButton.Action? { - switch status { - case .permissionRequired, .invalid: - .enable - case .disabled, .disconnecting, .disconnected: - .start - case .connecting, .connected, .reasserting: - .stop - case .unknown, .configurationReadWriteFailed: - nil - } - } -} - -extension TunnelButton { - fileprivate enum Action { - case enable - case start - case stop - } -} - -extension TunnelButton.Action? { - var description: LocalizedStringKey { - switch self { - case .enable: "Enable" - case .start: "Start" - case .stop: "Stop" - case .none: "Start" - } - } - - var isDisabled: Bool { - if case .none = self { - true - } else { - false - } - } -} - -extension Tunnel { - fileprivate func perform(_ action: TunnelButton.Action) { - switch action { - case .enable: enable() - case .start: start() - case .stop: stop() - } - } -} diff --git a/Apple/UI/TunnelStatusView.swift b/Apple/UI/TunnelStatusView.swift deleted file mode 100644 index 15717ec..0000000 --- a/Apple/UI/TunnelStatusView.swift +++ /dev/null @@ -1,37 +0,0 @@ -import SwiftUI - -struct TunnelStatusView: View { - @Environment(\.tunnel) - var tunnel: any Tunnel - - var body: some View { - Text(tunnel.status.description) - } -} - -extension TunnelStatus: CustomStringConvertible { - public var description: String { - switch self { - case .unknown: - "Unknown" - case .permissionRequired: - "Permission Required" - case .disconnected: - "Disconnected" - case .disabled: - "Disabled" - case .connecting: - "Connecting…" - case .connected: - "Connected" - case .disconnecting: - "Disconnecting…" - case .reasserting: - "Reasserting…" - case .invalid: - "Invalid" - case .configurationReadWriteFailed: - "System Error" - } - } -} diff --git a/Apple/UI/UI.xcconfig b/Apple/UI/UI.xcconfig deleted file mode 100644 index b44d676..0000000 --- a/Apple/UI/UI.xcconfig +++ /dev/null @@ -1,3 +0,0 @@ -#include "../Configuration/Framework.xcconfig" - -ENABLE_PREVIEWS = YES diff --git a/CONSTITUTION.md b/CONSTITUTION.md deleted file mode 100644 index f97e683..0000000 --- a/CONSTITUTION.md +++ /dev/null @@ -1,38 +0,0 @@ -# Burrow Constitution - -1. Mission - -Burrow exists to build a proper VPN: fast, inspectable, deployable on infrastructure the project controls, and legible enough that future contributors can extend it without guesswork. - -2. Commitments - -- Protocol work must favor correctness over novelty. Burrow does not claim support for a transport or control-plane feature until the wire format, state handling, and recovery behavior are implemented and tested. -- Security is a design constraint, not a cleanup phase. Key material, bootstrap credentials, control-plane tokens, and routing policy must have explicit storage and rotation paths. -- Performance matters. Burrow should avoid needless copies, hidden blocking, and ad hoc process graphs that make packet forwarding or control-plane convergence harder to reason about. -- Source, infrastructure, and release logic live in the repository. If the forge cannot be rebuilt from the tree, the work is incomplete. -- Non-trivial changes require a Burrow Evolution Proposal. Durable rationale belongs in the repository, not only in chat. - -3. Infrastructure - -Burrow controls its own forge, runners, deployment automation, and edge configuration for `burrow.net` and `burrow.rs`. - -- Dedicated compute is preferred over SaaS dependencies when the dependency would hold release, source, or identity authority. -- Secrets may be bootstrapped from local intake for initial bring-up, but long-lived operation must converge on encrypted, versioned secret handling. -- Production access must be attributable. Automation identities, SSH keys, and service accounts must be named and documented. - -4. Contributors - -- Read this constitution before drafting product, protocol, or infrastructure changes. -- Capture intent, testing expectations, and rollback procedures in proposals. -- Prefer reversible migrations. If a change is destructive, document the preconditions and teardown plan first. -- Security-sensitive work requires explicit reviewer attention, even when the implementation is performed by an agent. - -5. Governance - -- Burrow Evolution Proposals (BEPs) are the primary design record for architectural, protocol, forge, and deployment changes. -- Accepted proposals are authoritative until superseded. -- Constitutional changes require a dedicated proposal that quotes the affected text and records the decision. - -6. Origin - -Burrow started as a firewall-burrowing client and now carries its own transport, daemon, mesh, and control-plane work. This constitution exists so the project can finish that evolution coherently. diff --git a/Cargo.lock b/Cargo.lock index 2950701..a75bd28 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,21 +1,21 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 4 +version = 3 [[package]] name = "addr2line" -version = "0.24.2" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ "gimli", ] [[package]] -name = "adler2" -version = "2.0.1" +name = "adler" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aead" @@ -29,413 +29,129 @@ dependencies = [ [[package]] name = "aes" -version = "0.8.4" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" dependencies = [ "cfg-if", "cipher", "cpufeatures", - "zeroize", ] [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] -[[package]] -name = "alloca" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7d05ea6aea7e9e64d25b9156ba2fee3fdd659e34e41063cd2fc7cd020d7f4" -dependencies = [ - "cc", -] - -[[package]] -name = "amplify" -version = "4.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f7fb4ac7c881e54a8e7015e399b6112a2a5bc958b6c89ac510840ff20273b31" -dependencies = [ - "amplify_derive", - "amplify_num", - "ascii", - "getrandom 0.2.16", - "getrandom 0.3.3", - "wasm-bindgen", -] - -[[package]] -name = "amplify_derive" -version = "4.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a6309e6b8d89b36b9f959b7a8fa093583b94922a0f6438a24fb08936de4d428" -dependencies = [ - "amplify_syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "amplify_num" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99bcb75a2982047f733547042fc3968c0f460dfcf7d90b90dea3b2744580e9ad" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "amplify_syn" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7736fb8d473c0d83098b5bac44df6a561e20470375cd8bcae30516dc889fd62a" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anes" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" - [[package]] name = "anstream" -version = "0.6.20" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" +checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.11" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.7" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.4" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.10" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", - "once_cell_polyfill", - "windows-sys 0.60.2", + "windows-sys 0.52.0", ] [[package]] name = "anyhow" -version = "1.0.100" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" - -[[package]] -name = "argon2" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3610892ee6e0cbce8ae2700349fcf8f98adb0dbfbee85aec3c9179d29cc072" -dependencies = [ - "base64ct", - "blake2", - "cpufeatures", - "password-hash 0.5.0", -] - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" - -[[package]] -name = "arti-client" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89842cae6e3bda0fd128a5c66eb3392ed412065dc698c77d9fcc4b77e4159f2" -dependencies = [ - "async-trait", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more", - "educe", - "fs-mistrust", - "futures", - "hostname-validator", - "humantime", - "humantime-serde", - "libc", - "once_cell", - "postage", - "rand 0.9.2", - "safelog", - "serde", - "thiserror 2.0.16", - "time", - "tor-async-utils", - "tor-basic-utils", - "tor-chanmgr", - "tor-circmgr", - "tor-config", - "tor-config-path", - "tor-dircommon", - "tor-dirmgr", - "tor-error", - "tor-guardmgr", - "tor-keymgr", - "tor-linkspec", - "tor-llcrypto", - "tor-memquota", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-rtcompat", - "tracing", - "void", -] - -[[package]] -name = "ascii" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" - -[[package]] -name = "asn1-rs" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60" -dependencies = [ - "asn1-rs-derive", - "asn1-rs-impl", - "displaydoc", - "nom 7.1.3", - "num-traits", - "rusticata-macros", - "thiserror 2.0.16", -] - -[[package]] -name = "asn1-rs-derive" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", - "synstructure", -] - -[[package]] -name = "asn1-rs-impl" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "assert_matches" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" +checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" [[package]] name = "async-channel" -version = "2.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +checksum = "1ca33f4bc4ed1babef42cad36cc1f51fa88be00420404e5b1e80ab1b18f7678c" dependencies = [ "concurrent-queue", + "event-listener", "event-listener-strategy", "futures-core", "pin-project-lite", ] -[[package]] -name = "async-compression" -version = "0.4.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93c1f86859c1af3d514fa19e8323147ff10ea98684e6c7b307912509f50e67b2" -dependencies = [ - "compression-codecs", - "compression-core", - "futures-core", - "futures-io", - "pin-project-lite", -] - -[[package]] -name = "async-native-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9343dc5acf07e79ff82d0c37899f079db3534d99f189a1837c8e549c99405bec" -dependencies = [ - "futures-util", - "native-tls", - "thiserror 1.0.69", - "url", -] - [[package]] name = "async-stream" -version = "0.2.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22068c0c19514942eefcfd4daf8976ef1aad84e61539f95cd200c35202f80af5" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" dependencies = [ - "async-stream-impl 0.2.1", - "futures-core", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl 0.3.6", + "async-stream-impl", "futures-core", "pin-project-lite", ] [[package]] name = "async-stream-impl" -version = "0.2.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f9db3b38af870bf7e5cc649167533b493928e50744e2c30ae350230b414670" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "async-trait" -version = "0.1.89" +version = "0.1.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] -[[package]] -name = "async_executors" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a982d2f86de6137cc05c9db9a915a19886c97911f9790d04f174cede74be01a5" -dependencies = [ - "blanket", - "futures-core", - "futures-task", - "futures-util", - "pin-project", - "rustc_version", - "tokio", -] - -[[package]] -name = "asynchronous-codec" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" -dependencies = [ - "bytes", - "futures-sink", - "futures-util", - "memchr", - "pin-project-lite", -] - -[[package]] -name = "atomic" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" - -[[package]] -name = "atomic" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89cbf775b137e9b968e67227ef7f775587cde3fd31b0d8599dbd0f598a48340" -dependencies = [ - "bytemuck", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - [[package]] name = "autocfg" -version = "1.5.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" @@ -444,13 +160,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ "async-trait", - "axum-core 0.3.4", + "axum-core", "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", + "http", + "http-body", + "hyper", "itoa", "matchit", "memchr", @@ -459,46 +175,12 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper 0.1.2", - "tower 0.4.13", + "sync_wrapper", + "tower", "tower-layer", "tower-service", ] -[[package]] -name = "axum" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" -dependencies = [ - "async-trait", - "axum-core 0.4.5", - "bytes", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.7.0", - "hyper-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper 1.0.2", - "tokio", - "tower 0.5.2", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "axum-core" version = "0.3.4" @@ -508,83 +190,40 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", + "http", + "http-body", "mime", "rustversion", "tower-layer", "tower-service", ] -[[package]] -name = "axum-core" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "mime", - "pin-project-lite", - "rustversion", - "sync_wrapper 1.0.2", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "backtrace" -version = "0.3.75" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", + "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", - "windows-targets 0.52.6", ] -[[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - [[package]] name = "base64" version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - [[package]] name = "base64ct" -version = "1.8.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" - -[[package]] -name = "bincode" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" -dependencies = [ - "serde", - "unty", -] +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bindgen" @@ -602,7 +241,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash", "shlex", "syn 1.0.109", "which", @@ -625,9 +264,9 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash", "shlex", - "syn 2.0.106", + "syn 2.0.48", "which", ] @@ -639,21 +278,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.4" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "blake2" @@ -664,17 +291,6 @@ dependencies = [ "digest", ] -[[package]] -name = "blanket" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0b121a9fe0df916e362fb3271088d071159cdf11db0e4182d02152850756eff" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "block-buffer" version = "0.10.4" @@ -684,22 +300,11 @@ dependencies = [ "generic-array", ] -[[package]] -name = "bstr" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" -dependencies = [ - "memchr", - "regex-automata", - "serde", -] - [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "burrow" @@ -707,57 +312,32 @@ version = "0.1.0" dependencies = [ "aead", "anyhow", - "argon2", - "arti-client", "async-channel", - "async-stream 0.2.1", - "axum 0.7.9", - "base64 0.21.7", + "base64", "blake2", - "bytes", "caps", "chacha20poly1305", "clap", "console", "console-subscriber", - "dotenv", "fehler", "futures", - "hickory-proto", "hmac", - "hyper-util", "insta", "ip_network", "ip_network_table", - "ipnetwork", - "libc", "libsystemd", "log", - "netstack-smoltcp", "nix 0.27.1", "once_cell", "parking_lot", - "prost 0.13.5", - "prost-types 0.13.5", - "rand 0.8.5", - "rand_core 0.6.4", - "reqwest 0.12.23", + "rand", + "rand_core", "ring", - "rusqlite", - "rust-ini", - "schemars 0.8.22", + "schemars", "serde", "serde_json", - "subtle", - "tempfile", "tokio", - "tokio-stream", - "tokio-util", - "toml 0.8.23", - "tonic 0.12.3", - "tonic-build", - "tor-rtcompat", - "tower 0.4.13", "tracing", "tracing-journald", "tracing-log 0.1.4", @@ -767,18 +347,6 @@ dependencies = [ "x25519-dalek", ] -[[package]] -name = "by_address" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64fa3c856b712db6612c019f14756e64e4bcea13337a6b33b696333a9eaa2d06" - -[[package]] -name = "bytemuck" -version = "1.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" - [[package]] name = "byteorder" version = "1.5.0" @@ -787,9 +355,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "bzip2" @@ -803,11 +371,12 @@ dependencies = [ [[package]] name = "bzip2-sys" -version = "0.1.13+1.0.8" +version = "0.1.11+1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" +checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" dependencies = [ "cc", + "libc", "pkg-config", ] @@ -818,31 +387,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "190baaad529bcfbde9e1a19022c42781bdb6ff9de25721abdb8fd98c0807730b" dependencies = [ "libc", - "thiserror 1.0.69", + "thiserror", ] -[[package]] -name = "caret" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beae2cb9f60bc3f21effaaf9c64e51f6627edd54eedc9199ba07f519ef2a2101" - -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - [[package]] name = "cc" -version = "1.2.38" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80f41ae168f955c12fb8960b057d70d0ca153fb83182b57d86380443527be7e9" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ - "find-msvc-tools", "jobserver", "libc", - "shlex", ] [[package]] @@ -851,20 +406,14 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ - "nom 7.1.3", + "nom", ] [[package]] name = "cfg-if" -version = "1.0.3" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chacha20" @@ -890,45 +439,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "chrono" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" -dependencies = [ - "iana-time-zone", - "num-traits", - "serde", - "windows-link 0.2.1", -] - -[[package]] -name = "ciborium" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" -dependencies = [ - "ciborium-io", - "ciborium-ll", - "serde", -] - -[[package]] -name = "ciborium-io" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" - -[[package]] -name = "ciborium-ll" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" -dependencies = [ - "ciborium-io", - "half", -] - [[package]] name = "cipher" version = "0.4.4" @@ -942,20 +452,20 @@ dependencies = [ [[package]] name = "clang-sys" -version = "1.8.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +checksum = "67523a3b4be3ce1989d607a828d036249522dd9c1c8de7f4dd2dae43a37369d1" dependencies = [ "glob", "libc", - "libloading 0.8.9", + "libloading 0.8.1", ] [[package]] name = "clap" -version = "4.5.48" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" +checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" dependencies = [ "clap_builder", "clap_derive", @@ -963,90 +473,60 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.48" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" +checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.1", + "strsim", ] [[package]] name = "clap_derive" -version = "4.5.47" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "clap_lex" -version = "0.7.5" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" - -[[package]] -name = "coarsetime" -version = "0.1.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e58eb270476aa4fc7843849f8a35063e8743b4dbcdf6dd0f8ea0886980c204c2" -dependencies = [ - "libc", - "wasix", - "wasm-bindgen", -] +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "colorchoice" -version = "1.0.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" - -[[package]] -name = "compression-codecs" -version = "0.4.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "680dc087785c5230f8e8843e2e57ac7c1c90488b6a91b88caa265410568f441b" -dependencies = [ - "compression-core", - "flate2", - "liblzma", - "zstd 0.13.3", - "zstd-safe 7.2.4", -] - -[[package]] -name = "compression-core" -version = "0.4.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "concurrent-queue" -version = "2.5.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" dependencies = [ "crossbeam-utils", ] [[package]] name = "console" -version = "0.15.11" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode", + "lazy_static", "libc", - "once_cell", - "unicode-width 0.2.1", - "windows-sys 0.59.0", + "unicode-width", + "windows-sys 0.52.0", ] [[package]] @@ -1056,9 +536,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" dependencies = [ "futures-core", - "prost 0.12.6", - "prost-types 0.12.6", - "tonic 0.10.2", + "prost", + "prost-types", + "tonic", "tracing-core", ] @@ -1074,68 +554,24 @@ dependencies = [ "futures-task", "hdrhistogram", "humantime", - "prost-types 0.12.6", + "prost-types", "serde", "serde_json", "thread_local", "tokio", "tokio-stream", - "tonic 0.10.2", + "tonic", "tracing", "tracing-core", "tracing-subscriber", ] -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "const-random" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" -dependencies = [ - "const-random-macro", -] - -[[package]] -name = "const-random-macro" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" -dependencies = [ - "getrandom 0.2.16", - "once_cell", - "tiny-keccak", -] - [[package]] name = "constant_time_eq" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" -[[package]] -name = "convert_case" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "cookie-factory" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9885fa71e26b8ab7855e2ec7cae6e9b380edff76cd052e07c683a0319d51b3a2" -dependencies = [ - "futures", -] - [[package]] name = "core-foundation" version = "0.9.4" @@ -1148,139 +584,42 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.7" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.17" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] [[package]] name = "crc32fast" -version = "1.5.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ "cfg-if", ] -[[package]] -name = "criterion" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "950046b2aa2492f9a536f5f4f9a3de7b9e2476e575e05bd6c333371add4d98f3" -dependencies = [ - "alloca", - "anes", - "cast", - "ciborium", - "clap", - "criterion-plot", - "itertools 0.13.0", - "num-traits", - "oorandom", - "page_size", - "plotters", - "rayon", - "regex", - "serde", - "serde_json", - "tinytemplate", - "walkdir", -] - -[[package]] -name = "criterion-cycles-per-byte" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5396de42a52e9e5d8f67ef0702dae30451f310a9ba1c3094dcf228f0be0e54bc" -dependencies = [ - "cfg-if", - "criterion", -] - -[[package]] -name = "criterion-plot" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8d80a2f4f5b554395e47b5d8305bc3d27813bacb73493eb1001e8f76dae29ea" -dependencies = [ - "cast", - "itertools 0.13.0", -] - -[[package]] -name = "critical-section" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" - [[package]] name = "crossbeam-channel" -version = "0.5.15" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-queue" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +checksum = "176dc175b78f56c0f321911d9c8eb2b77a78a4860b9c19db83835fea1a46649b" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.21" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "subtle", - "zeroize", -] +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" [[package]] name = "crypto-common" @@ -1289,30 +628,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "typenum", ] -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher", -] - [[package]] name = "curve25519-dalek" -version = "4.1.3" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" dependencies = [ "cfg-if", "cpufeatures", "curve25519-dalek-derive", - "digest", "fiat-crypto", + "platforms", "rustc_version", "subtle", "zeroize", @@ -1326,273 +656,16 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", -] - -[[package]] -name = "darling" -version = "0.14.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" -dependencies = [ - "darling_core 0.14.4", - "darling_macro 0.14.4", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", -] - -[[package]] -name = "darling" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" -dependencies = [ - "darling_core 0.23.0", - "darling_macro 0.23.0", -] - -[[package]] -name = "darling_core" -version = "0.14.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.10.0", - "syn 1.0.109", -] - -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "darling_core" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" -dependencies = [ - "ident_case", - "proc-macro2", - "quote", - "strsim 0.11.1", - "syn 2.0.106", -] - -[[package]] -name = "darling_macro" -version = "0.14.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" -dependencies = [ - "darling_core 0.14.4", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core 0.21.3", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "darling_macro" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" -dependencies = [ - "darling_core 0.23.0", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "data-encoding" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" - -[[package]] -name = "defmt" -version = "0.3.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0963443817029b2024136fc4dd07a5107eb8f977eaf18fcd1fdeb11306b64ad" -dependencies = [ - "defmt 1.0.1", -] - -[[package]] -name = "defmt" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "548d977b6da32fa1d1fda2876453da1e7df63ad0304c8b3dae4dbe7b96f39b78" -dependencies = [ - "bitflags 1.3.2", - "defmt-macros", -] - -[[package]] -name = "defmt-macros" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d4fc12a85bcf441cfe44344c4b72d58493178ce635338a3f3b78943aceb258e" -dependencies = [ - "defmt-parser", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "defmt-parser" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10d60334b3b2e7c9d91ef8150abfb6fa4c1c39ebbcf4a81c2e346aad939fee3e" -dependencies = [ - "thiserror 2.0.16", -] - -[[package]] -name = "der" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" -dependencies = [ - "const-oid", - "pem-rfc7468", - "zeroize", -] - -[[package]] -name = "der-parser" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6" -dependencies = [ - "asn1-rs", - "cookie-factory", - "displaydoc", - "nom 7.1.3", - "num-traits", - "rusticata-macros", + "syn 2.0.48", ] [[package]] name = "deranged" -version = "0.5.3" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d630bccd429a5bb5a64b5e94f693bfc48c9f8566418fda4c494cc94f911f87cc" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", - "serde", -] - -[[package]] -name = "derive-deftly" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284db66a66f03c3dafbe17360d959eb76b83f77cfe191677e2a7899c0da291f3" -dependencies = [ - "derive-deftly-macros", - "heck", -] - -[[package]] -name = "derive-deftly-macros" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caef6056a5788d05d173cdc3c562ac28ae093828f851f69378b74e4e3d578e41" -dependencies = [ - "heck", - "indexmap 2.11.4", - "itertools 0.14.0", - "proc-macro-crate", - "proc-macro2", - "quote", - "sha3", - "strum", - "syn 2.0.106", - "void", -] - -[[package]] -name = "derive_builder_core_fork_arti" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24c1b715c79be6328caa9a5e1a387a196ea503740f0722ec3dd8f67a9e72314d" -dependencies = [ - "darling 0.14.4", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive_builder_fork_arti" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3eae24d595f4d0ecc90a9a5a6d11c2bd8dafe2375ec4a1ec63250e5ade7d228" -dependencies = [ - "derive_builder_macro_fork_arti", -] - -[[package]] -name = "derive_builder_macro_fork_arti" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69887769a2489cd946bf782eb2b1bb2cb7bc88551440c94a765d4f040c08ebf3" -dependencies = [ - "derive_builder_core_fork_arti", - "syn 1.0.109", -] - -[[package]] -name = "derive_more" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" -dependencies = [ - "convert_case", - "proc-macro2", - "quote", - "syn 2.0.106", - "unicode-xid", ] [[package]] @@ -1602,259 +675,58 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", - "const-oid", "crypto-common", "subtle", ] -[[package]] -name = "directories" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.61.0", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "dlv-list" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" -dependencies = [ - "const-random", -] - -[[package]] -name = "dotenv" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" - -[[package]] -name = "downcast-rs" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "117240f60069e65410b3ae1bb213295bd828f707b5bec6596a1afc8793ce0cbc" - [[package]] name = "dyn-clone" -version = "1.0.20" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" - -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der", - "digest", - "elliptic-curve", - "rfc6979", - "signature", - "spki", -] - -[[package]] -name = "ed25519" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" -dependencies = [ - "pkcs8", - "signature", -] - -[[package]] -name = "ed25519-dalek" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" -dependencies = [ - "curve25519-dalek", - "ed25519", - "merlin", - "rand_core 0.6.4", - "serde", - "sha2", - "subtle", - "zeroize", -] - -[[package]] -name = "educe" -version = "0.4.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f" -dependencies = [ - "enum-ordinalize", - "proc-macro2", - "quote", - "syn 1.0.109", -] +checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" [[package]] name = "either" -version = "1.15.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" - -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct", - "crypto-bigint", - "digest", - "ff", - "generic-array", - "group", - "pkcs8", - "rand_core 0.6.4", - "sec1", - "subtle", - "zeroize", -] +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "encode_unicode" -version = "1.0.0" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "encoding_rs" -version = "0.8.35" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ "cfg-if", ] -[[package]] -name = "enum-as-inner" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "enum-ordinalize" -version = "3.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "enum_dispatch" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" -dependencies = [ - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "enumset" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b07a8dfbbbfc0064c0a6bdf9edcf966de6b1c33ce344bdeca3b41615452634" -dependencies = [ - "enumset_derive", -] - -[[package]] -name = "enumset_derive" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43e744e4ea338060faee68ed933e46e722fb7f3617e722a5772d7e856d8b3ce" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "equivalent" -version = "1.0.2" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.14" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", - "windows-sys 0.61.0", -] - -[[package]] -name = "etherparse" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8d8a704b617484e9d867a0423cd45f7577f008c4068e2e33378f8d3860a6d73" -dependencies = [ - "arrayvec", + "windows-sys 0.52.0", ] [[package]] name = "event-listener" -version = "5.4.1" +version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" dependencies = [ "concurrent-queue", "parking", @@ -1863,31 +735,19 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.4" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" dependencies = [ "event-listener", "pin-project-lite", ] -[[package]] -name = "fallible-iterator" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" - -[[package]] -name = "fallible-streaming-iterator" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" - [[package]] name = "fastrand" -version = "2.3.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "fehler" @@ -1909,92 +769,28 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "ff" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "fiat-crypto" -version = "0.2.9" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" - -[[package]] -name = "figment" -version = "0.10.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cb01cd46b0cf372153850f4c6c272d9cbea2da513e07538405148f95bd789f3" -dependencies = [ - "atomic 0.6.1", - "serde", - "toml 0.8.23", - "uncased", - "version_check", -] - -[[package]] -name = "filetime" -version = "0.2.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" -dependencies = [ - "cfg-if", - "libc", - "libredox", -] - -[[package]] -name = "find-msvc-tools" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" - -[[package]] -name = "fixedbitset" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" +checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" [[package]] name = "flate2" -version = "1.1.2" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", ] -[[package]] -name = "fluid-let" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "749cff877dc1af878a0b31a41dd221a753634401ea0ef2f87b62d3171522485a" - [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foldhash" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "foldhash" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" - [[package]] name = "foreign-types" version = "0.3.2" @@ -2012,49 +808,18 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.2" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] -[[package]] -name = "fs-mistrust" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f5ac9f88fd18733e0f9ce1f4a95c40eb1d4f83131bf1472e81d1f128fefb7c2" -dependencies = [ - "derive_builder_fork_arti", - "dirs", - "libc", - "pwd-grp", - "serde", - "thiserror 2.0.16", - "walkdir", -] - -[[package]] -name = "fslock" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04412b8935272e3a9bae6f48c7bfff74c2911f60525404edfdd28e49884c3bfb" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - [[package]] name = "futures" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -2067,9 +832,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -2077,15 +842,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -2094,38 +859,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures-channel", "futures-core", @@ -2147,148 +912,50 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", - "zeroize", ] [[package]] name = "getrandom" -version = "0.2.16" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi 0.11.1+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "r-efi 5.3.0", - "wasi 0.14.7+wasi-0.2.4", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "libc", - "r-efi 6.0.0", - "wasip2", - "wasip3", -] - -[[package]] -name = "getset" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf0fc11e47561d47397154977bc219f4cf809b2974facc3ccb3b89e2436f912" -dependencies = [ - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.106", + "wasi", ] [[package]] name = "gimli" -version = "0.31.1" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "glob" -version = "0.3.3" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" - -[[package]] -name = "glob-match" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985c9503b412198aa4197559e9a318524ebc4519c229bfa05a535828c950b9d" - -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff", - "rand_core 0.6.4", - "subtle", -] +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "h2" -version = "0.3.27" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", - "http 0.2.12", - "indexmap 2.11.4", + "http", + "indexmap 2.1.0", "slab", "tokio", "tokio-util", "tracing", ] -[[package]] -name = "h2" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http 1.3.1", - "indexmap 2.11.4", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "half" -version = "2.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" -dependencies = [ - "cfg-if", - "crunchy", - "zerocopy", -] - -[[package]] -name = "hash32" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606" -dependencies = [ - "byteorder", -] - [[package]] name = "hashbrown" version = "0.12.3" @@ -2297,36 +964,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.5" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" - -[[package]] -name = "hashbrown" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" -dependencies = [ - "foldhash 0.1.5", -] - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -dependencies = [ - "foldhash 0.2.0", -] - -[[package]] -name = "hashlink" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0b22561a9c04a7cb1a302c013e0259cd3b4bb619f145b32f72b8b4bcbed230" -dependencies = [ - "hashbrown 0.16.1", -] +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" [[package]] name = "hdrhistogram" @@ -2334,28 +974,24 @@ version = "7.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" dependencies = [ - "base64 0.21.7", + "base64", "byteorder", "flate2", - "nom 7.1.3", + "nom", "num-traits", ] [[package]] -name = "heapless" -version = "0.8.0" +name = "heck" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bfb9eb618601c89945a70e254898da93b13be0388091d42117462b265bb3fad" -dependencies = [ - "hash32", - "stable_deref_trait", -] +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] -name = "heck" -version = "0.5.0" +name = "hermit-abi" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f" [[package]] name = "hex" @@ -2363,40 +999,6 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -[[package]] -name = "hickory-proto" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8a6fe56c0038198998a6f217ca4e7ef3a5e51f46163bd6dd60b5c71ca6c6502" -dependencies = [ - "async-trait", - "cfg-if", - "data-encoding", - "enum-as-inner", - "futures-channel", - "futures-io", - "futures-util", - "idna", - "ipnet", - "once_cell", - "rand 0.9.2", - "ring", - "thiserror 2.0.16", - "tinyvec", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "hkdf" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" -dependencies = [ - "hmac", -] - [[package]] name = "hmac" version = "0.12.1" @@ -2408,35 +1010,18 @@ dependencies = [ [[package]] name = "home" -version = "0.5.11" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "hostname-validator" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f558a64ac9af88b5ba400d99b579451af0d39c6d360980045b91aac966d705e2" - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", + "windows-sys 0.52.0", ] [[package]] name = "http" -version = "1.3.1" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" dependencies = [ "bytes", "fnv", @@ -2450,38 +1035,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.12", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http 1.3.1", -] - -[[package]] -name = "http-body-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" -dependencies = [ - "bytes", - "futures-core", - "http 1.3.1", - "http-body 1.0.1", + "http", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.10.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" @@ -2491,109 +1053,46 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" -version = "2.3.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" - -[[package]] -name = "humantime-serde" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c" -dependencies = [ - "humantime", - "serde", -] +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.32" +version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2 0.3.27", - "http 0.2.12", - "http-body 0.4.6", + "h2", + "http", + "http-body", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.10", + "socket2", "tokio", "tower-service", "tracing", "want", ] -[[package]] -name = "hyper" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" -dependencies = [ - "atomic-waker", - "bytes", - "futures-channel", - "futures-core", - "h2 0.4.12", - "http 1.3.1", - "http-body 1.0.1", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "pin-utils", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" -dependencies = [ - "http 1.3.1", - "hyper 1.7.0", - "hyper-util", - "rustls", - "rustls-pki-types", - "tokio", - "tokio-rustls", - "tower-service", - "webpki-roots", -] - [[package]] name = "hyper-timeout" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper 0.14.32", + "hyper", "pin-project-lite", "tokio", "tokio-io-timeout", ] -[[package]] -name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper 1.7.0", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - [[package]] name = "hyper-tls" version = "0.5.0" @@ -2601,177 +1100,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.32", + "hyper", "native-tls", "tokio", "tokio-native-tls", ] -[[package]] -name = "hyper-util" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "hyper 1.7.0", - "ipnet", - "libc", - "percent-encoding", - "pin-project-lite", - "socket2 0.6.3", - "tokio", - "tower-service", - "tracing", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core 0.62.2", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "icu_collections" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" - -[[package]] -name = "icu_properties" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "potential_utf", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" - -[[package]] -name = "icu_provider" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" -dependencies = [ - "displaydoc", - "icu_locale_core", - "stable_deref_trait", - "tinystr", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "id-arena" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - [[package]] name = "idna" -version = "1.1.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", + "unicode-bidi", + "unicode-normalization", ] [[package]] @@ -2782,80 +1124,39 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", - "serde", ] [[package]] name = "indexmap" -version = "2.11.4" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", -] - -[[package]] -name = "inotify" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd5b3eaf1a28b758ac0faa5a4254e8ab2705605496f1b1f3fbbc3988ad73d199" -dependencies = [ - "bitflags 2.9.4", - "inotify-sys", - "libc", -] - -[[package]] -name = "inotify-sys" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" -dependencies = [ - "libc", + "hashbrown 0.14.3", ] [[package]] name = "inout" -version = "0.1.4" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ "generic-array", ] [[package]] name = "insta" -version = "1.43.2" +version = "1.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0" +checksum = "5d64600be34b2fcfc267740a243fa7744441bb4947a619ac4e5bb6507f35fbfc" dependencies = [ "console", - "once_cell", + "lazy_static", + "linked-hash-map", "serde", "similar", -] - -[[package]] -name = "inventory" -version = "0.3.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4f0c30c76f2f4ccee3fe55a2435f691ca00c0e4bd87abe4f4a851b1d4dac39b" -dependencies = [ - "rustversion", -] - -[[package]] -name = "io-uring" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" -dependencies = [ - "bitflags 2.9.4", - "cfg-if", - "libc", + "yaml-rust", ] [[package]] @@ -2882,127 +1183,48 @@ checksum = "8e537132deb99c0eb4b752f0346b6a836200eaaa3516dd7e5514b63930a09e5d" [[package]] name = "ipnet" -version = "2.11.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" - -[[package]] -name = "ipnetwork" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf370abdafd54d13e54a620e8c3e1145f28e46cc9d704bc6d94414559df41763" -dependencies = [ - "serde", -] - -[[package]] -name = "iri-string" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" -dependencies = [ - "memchr", - "serde", -] - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "itertools" -version = "0.12.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.15" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "jobserver" -version = "0.1.34" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" dependencies = [ - "getrandom 0.3.3", "libc", ] [[package]] name = "js-sys" -version = "0.3.93" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "797146bb2677299a1eb6b7b50a890f4c361b29ef967addf5b2fa45dae1bb6d7d" +checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" dependencies = [ - "cfg-if", - "futures-util", - "once_cell", "wasm-bindgen", ] -[[package]] -name = "keccak" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "kqueue" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" -dependencies = [ - "kqueue-sys", - "libc", -] - -[[package]] -name = "kqueue-sys" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" -dependencies = [ - "bitflags 1.3.2", - "libc", -] - [[package]] name = "lazy_static" -version = "1.5.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" -dependencies = [ - "spin", -] +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "lazycell" @@ -3010,17 +1232,11 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" -[[package]] -name = "leb128fmt" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" - [[package]] name = "libc" -version = "0.2.176" +version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" [[package]] name = "libloading" @@ -3034,104 +1250,49 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.9" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" +checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161" dependencies = [ "cfg-if", - "windows-link 0.2.1", -] - -[[package]] -name = "liblzma" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6033b77c21d1f56deeae8014eb9fbe7bdf1765185a6c508b5ca82eeaed7f899" -dependencies = [ - "liblzma-sys", -] - -[[package]] -name = "liblzma-sys" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f2db66f3268487b5033077f266da6777d057949b8f93c8ad82e441df25e6186" -dependencies = [ - "cc", - "libc", - "pkg-config", -] - -[[package]] -name = "libm" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" - -[[package]] -name = "libredox" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ddbf48fd451246b1f8c2610bd3b4ac0cc6e149d89832867093ab69a17194f08" -dependencies = [ - "bitflags 2.9.4", - "libc", - "plain", - "redox_syscall 0.7.3", -] - -[[package]] -name = "libsqlite3-sys" -version = "0.36.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95b4103cffefa72eb8428cb6b47d6627161e51c2739fc5e3b734584157bc642a" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", + "windows-sys 0.48.0", ] [[package]] name = "libsystemd" -version = "0.7.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19c97a761fc86953c5b885422b22c891dbf5bcb9dcc99d0110d6ce4c052759f0" +checksum = "c592dc396b464005f78a5853555b9f240bc5378bf5221acc4e129910b2678869" dependencies = [ "hmac", "libc", "log", - "nix 0.29.0", - "nom 8.0.0", + "nix 0.27.1", + "nom", "once_cell", "serde", "sha2", - "thiserror 2.0.16", + "thiserror", "uuid", ] [[package]] -name = "linux-raw-sys" -version = "0.4.15" +name = "linked-hash-map" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.11.0" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" - -[[package]] -name = "litemap" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" -version = "0.4.13" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -3139,29 +1300,17 @@ dependencies = [ [[package]] name = "log" -version = "0.4.28" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" - -[[package]] -name = "lru-slab" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" - -[[package]] -name = "managed" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca88d725a0a943b096803bd34e73a4437208b6077654cc4ecb2947a5f91618d" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "matchers" -version = "0.2.0" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" dependencies = [ - "regex-automata", + "regex-automata 0.1.10", ] [[package]] @@ -3172,18 +1321,9 @@ checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "memchr" -version = "2.7.5" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" - -[[package]] -name = "memmap2" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3" -dependencies = [ - "libc", -] +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] name = "memoffset" @@ -3196,25 +1336,13 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.9.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", ] -[[package]] -name = "merlin" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d" -dependencies = [ - "byteorder", - "keccak", - "rand_core 0.6.4", - "zeroize", -] - [[package]] name = "miette" version = "5.10.0" @@ -3223,8 +1351,8 @@ checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e" dependencies = [ "miette-derive", "once_cell", - "thiserror 1.0.69", - "unicode-width 0.1.14", + "thiserror", + "unicode-width", ] [[package]] @@ -3235,7 +1363,7 @@ checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -3252,37 +1380,31 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.9" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" dependencies = [ - "adler2", + "adler", ] [[package]] name = "mio" -version = "1.0.4" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", - "log", - "wasi 0.11.1+wasi-snapshot-preview1", - "windows-sys 0.59.0", + "wasi", + "windows-sys 0.48.0", ] -[[package]] -name = "multimap" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" - [[package]] name = "native-tls" -version = "0.2.14" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" dependencies = [ + "lazy_static", "libc", "log", "openssl", @@ -3294,22 +1416,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "netstack-smoltcp" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab8eb143b5f4a5907f5ac72a929edf6c9d9454485cf5a3a35ce8fd3c62165adf" -dependencies = [ - "etherparse", - "futures", - "rand 0.8.5", - "smoltcp", - "spin", - "tokio", - "tokio-util", - "tracing", -] - [[package]] name = "nix" version = "0.26.4" @@ -3329,23 +1435,10 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.4.2", "cfg-if", "libc", - "memoffset 0.9.1", -] - -[[package]] -name = "nix" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" -dependencies = [ - "bitflags 2.9.4", - "cfg-if", - "cfg_aliases", - "libc", - "memoffset 0.9.1", + "memoffset 0.9.0", ] [[package]] @@ -3359,220 +1452,62 @@ dependencies = [ ] [[package]] -name = "nom" -version = "8.0.0" +name = "nu-ansi-term" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df9761775871bdef83bee530e60050f7e54b1105350d6884eb0fb4f46c2f9405" -dependencies = [ - "memchr", -] - -[[package]] -name = "nonany" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6b8866ec53810a9a4b3d434a29801e78c707430a9ae11c2db4b8b62bb9675a0" - -[[package]] -name = "notify" -version = "8.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" -dependencies = [ - "bitflags 2.9.4", - "inotify", - "kqueue", - "libc", - "log", - "mio", - "notify-types", - "walkdir", - "windows-sys 0.60.2", -] - -[[package]] -name = "notify-types" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a" -dependencies = [ - "bitflags 2.9.4", -] - -[[package]] -name = "ntapi" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3b335231dfd352ffb0f8017f3b6027a4917f7df785ea2143d8af2adc66980ae" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" dependencies = [ + "overload", "winapi", ] -[[package]] -name = "nu-ansi-term" -version = "0.50.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "num-bigint-dig" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" -dependencies = [ - "lazy_static", - "libm", - "num-integer", - "num-iter", - "num-traits", - "rand 0.8.5", - "smallvec", - "zeroize", -] - -[[package]] -name = "num-conv" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6673768db2d862beb9b39a78fdcb1a69439615d5794a1be50caa9bc92c81967" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-traits" -version = "0.2.19" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", - "libm", ] [[package]] -name = "num_enum" -version = "0.7.5" +name = "num_cpus" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" -dependencies = [ - "num_enum_derive", - "rustversion", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "objc2-core-foundation" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" -dependencies = [ - "bitflags 2.9.4", -] - -[[package]] -name = "objc2-io-kit" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33fafba39597d6dc1fb709123dfa8289d39406734be322956a69f0931c73bb15" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ + "hermit-abi", "libc", - "objc2-core-foundation", ] [[package]] name = "object" -version = "0.36.7" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.21.3" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -dependencies = [ - "critical-section", - "portable-atomic", -] - -[[package]] -name = "once_cell_polyfill" -version = "1.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" - -[[package]] -name = "oneshot-fused-workaround" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17b52d0e4a06a4c7eb8d2943c0015fa628cf4ccc409429cebc0f5bed6d33a82" -dependencies = [ - "futures", -] - -[[package]] -name = "oorandom" -version = "11.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opaque-debug" -version = "0.3.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.73" +version = "0.10.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +checksum = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.4.2", "cfg-if", "foreign-types", "libc", @@ -3589,20 +1524,20 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "openssl-probe" -version = "0.1.6" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.109" +version = "0.9.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +checksum = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae" dependencies = [ "cc", "libc", @@ -3611,98 +1546,22 @@ dependencies = [ ] [[package]] -name = "option-ext" -version = "0.2.0" +name = "overload" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "ordered-float" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" -dependencies = [ - "num-traits", -] - -[[package]] -name = "ordered-multimap" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" -dependencies = [ - "dlv-list", - "hashbrown 0.14.5", -] - -[[package]] -name = "os_str_bytes" -version = "6.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" -dependencies = [ - "memchr", -] - -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa", - "elliptic-curve", - "primeorder", - "sha2", -] - -[[package]] -name = "p384" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6" -dependencies = [ - "ecdsa", - "elliptic-curve", - "primeorder", - "sha2", -] - -[[package]] -name = "p521" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc9e2161f1f215afdfce23677034ae137bbd45016a880c2eb3ba8eb95f085b2" -dependencies = [ - "base16ct", - "ecdsa", - "elliptic-curve", - "primeorder", - "rand_core 0.6.4", - "sha2", -] - -[[package]] -name = "page_size" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" -dependencies = [ - "libc", - "winapi", -] +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parking" -version = "2.2.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" [[package]] name = "parking_lot" -version = "0.12.4" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", "parking_lot_core", @@ -3710,15 +1569,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.11" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.17", + "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -3728,27 +1587,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle", ] -[[package]] -name = "password-hash" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" -dependencies = [ - "base64ct", - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - [[package]] name = "pbkdf2" version = "0.11.0" @@ -3757,7 +1599,7 @@ checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ "digest", "hmac", - "password-hash 0.4.2", + "password-hash", "sha2", ] @@ -3767,99 +1609,37 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" -[[package]] -name = "pem-rfc7468" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" -dependencies = [ - "base64ct", -] - [[package]] name = "percent-encoding" -version = "2.3.2" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "petgraph" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" -dependencies = [ - "fixedbitset", - "indexmap 2.11.4", -] - -[[package]] -name = "phf" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" -dependencies = [ - "phf_macros", - "phf_shared", - "serde", -] - -[[package]] -name = "phf_generator" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" -dependencies = [ - "fastrand", - "phf_shared", -] - -[[package]] -name = "phf_macros" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" -dependencies = [ - "phf_generator", - "phf_shared", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "phf_shared" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" -dependencies = [ - "siphasher", -] +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pin-project" -version = "1.1.10" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "0302c4a0442c456bd56f841aee5c3bfd17967563f6fadc9ceb9f9c23cf3807e0" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.10" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "pin-project-lite" -version = "0.2.16" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -3867,66 +1647,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "pkcs1" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" -dependencies = [ - "der", - "pkcs8", - "spki", -] - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - [[package]] name = "pkg-config" -version = "0.3.32" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" [[package]] -name = "plain" -version = "0.2.3" +name = "platforms" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" - -[[package]] -name = "plotters" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" -dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" - -[[package]] -name = "plotters-svg" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" -dependencies = [ - "plotters-backend", -] +checksum = "626dec3cac7cc0e1577a2ec3fc496277ec2baa084bebad95bb6fdbfae235f84c" [[package]] name = "poly1305" @@ -3939,36 +1670,6 @@ dependencies = [ "universal-hash", ] -[[package]] -name = "portable-atomic" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" - -[[package]] -name = "postage" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1" -dependencies = [ - "atomic 0.5.3", - "crossbeam-queue", - "futures", - "parking_lot", - "pin-project", - "static_assertions", - "thiserror 1.0.69", -] - -[[package]] -name = "potential_utf" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" -dependencies = [ - "zerovec", -] - [[package]] name = "powerfmt" version = "0.2.0" @@ -3977,261 +1678,70 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.21" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "prettyplease" -version = "0.2.37" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5" dependencies = [ "proc-macro2", - "syn 2.0.106", -] - -[[package]] -name = "primeorder" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" -dependencies = [ - "elliptic-curve", -] - -[[package]] -name = "priority-queue" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93980406f12d9f8140ed5abe7155acb10bb1e69ea55c88960b9c2f117445ef96" -dependencies = [ - "equivalent", - "indexmap 2.11.4", - "serde", -] - -[[package]] -name = "proc-macro-crate" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" -dependencies = [ - "toml_edit 0.23.7", -] - -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "proc-macro2" -version = "1.0.101" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] [[package]] name = "prost" -version = "0.12.6" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", - "prost-derive 0.12.6", -] - -[[package]] -name = "prost" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" -dependencies = [ - "bytes", - "prost-derive 0.13.5", -] - -[[package]] -name = "prost-build" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" -dependencies = [ - "heck", - "itertools 0.14.0", - "log", - "multimap", - "once_cell", - "petgraph", - "prettyplease", - "prost 0.13.5", - "prost-types 0.13.5", - "regex", - "syn 2.0.106", - "tempfile", + "prost-derive", ] [[package]] name = "prost-derive" -version = "0.12.6" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", - "itertools 0.12.1", + "itertools", "proc-macro2", "quote", - "syn 2.0.106", -] - -[[package]] -name = "prost-derive" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" -dependencies = [ - "anyhow", - "itertools 0.14.0", - "proc-macro2", - "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "prost-types" -version = "0.12.6" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ - "prost 0.12.6", -] - -[[package]] -name = "prost-types" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" -dependencies = [ - "prost 0.13.5", -] - -[[package]] -name = "pwd-grp" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e2023f41b5fcb7c30eb5300a5733edfaa9e0e0d502d51b586f65633fd39e40c" -dependencies = [ - "derive-deftly", - "libc", - "paste", - "thiserror 2.0.16", -] - -[[package]] -name = "quinn" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" -dependencies = [ - "bytes", - "cfg_aliases", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash 2.1.1", - "rustls", - "socket2 0.6.3", - "thiserror 2.0.16", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-proto" -version = "0.11.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" -dependencies = [ - "bytes", - "getrandom 0.3.3", - "lru-slab", - "rand 0.9.2", - "ring", - "rustc-hash 2.1.1", - "rustls", - "rustls-pki-types", - "slab", - "thiserror 2.0.16", - "tinyvec", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-udp" -version = "0.5.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" -dependencies = [ - "cfg_aliases", - "libc", - "once_cell", - "socket2 0.6.3", - "tracing", - "windows-sys 0.60.2", + "prost", ] [[package]] name = "quote" -version = "1.0.40" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "r-efi" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - [[package]] name = "rand" version = "0.8.5" @@ -4239,18 +1749,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.3", + "rand_chacha", + "rand_core", ] [[package]] @@ -4260,17 +1760,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.3", + "rand_core", ] [[package]] @@ -4279,151 +1769,77 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.16", -] - -[[package]] -name = "rand_core" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" -dependencies = [ - "getrandom 0.3.3", -] - -[[package]] -name = "rand_jitter" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16df48f071248e67b8fc5e866d9448d45c08ad8b672baaaf796e2f15e606ff0" -dependencies = [ - "libc", - "rand_core 0.9.3", - "winapi", -] - -[[package]] -name = "rayon" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "rdrand" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d92195228612ac8eed47adbc2ed0f04e513a4ccb98175b6f2bd04d963b533655" -dependencies = [ - "rand_core 0.6.4", + "getrandom", ] [[package]] name = "redox_syscall" -version = "0.5.17" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ - "bitflags 2.9.4", -] - -[[package]] -name = "redox_syscall" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce70a74e890531977d37e532c34d45e9055d2409ed08ddba14529471ed0be16" -dependencies = [ - "bitflags 2.9.4", -] - -[[package]] -name = "redox_users" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" -dependencies = [ - "getrandom 0.2.16", - "libredox", - "thiserror 2.0.16", -] - -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", + "bitflags 1.3.2", ] [[package]] name = "regex" -version = "1.11.2" +version = "1.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.5", + "regex-syntax 0.8.2", ] [[package]] name = "regex-automata" -version = "0.4.10" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.2", ] [[package]] name = "regex-syntax" -version = "0.8.6" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "reqwest" -version = "0.11.27" +version = "0.11.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" dependencies = [ - "base64 0.21.7", + "base64", "bytes", "encoding_rs", "futures-core", "futures-util", - "h2 0.3.27", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", + "h2", + "http", + "http-body", + "hyper", "hyper-tls", "ipnet", "js-sys", @@ -4433,11 +1849,9 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 0.1.2", "system-configuration", "tokio", "tokio-native-tls", @@ -4449,139 +1863,25 @@ dependencies = [ "winreg", ] -[[package]] -name = "reqwest" -version = "0.12.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures-core", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.7.0", - "hyper-rustls", - "hyper-util", - "js-sys", - "log", - "percent-encoding", - "pin-project-lite", - "quinn", - "rustls", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 1.0.2", - "tokio", - "tokio-rustls", - "tower 0.5.2", - "tower-http", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots", -] - -[[package]] -name = "retry-error" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c322ea521636c5a3f13685a6266055b2dda7e54e2be35214d7c2a5d0672a5db" -dependencies = [ - "humantime", -] - -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - [[package]] name = "ring" -version = "0.17.14" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", - "cfg-if", - "getrandom 0.2.16", + "getrandom", "libc", + "spin", "untrusted", - "windows-sys 0.52.0", -] - -[[package]] -name = "rsa" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d" -dependencies = [ - "const-oid", - "digest", - "num-bigint-dig", - "num-integer", - "num-traits", - "pkcs1", - "pkcs8", - "rand_core 0.6.4", - "sha2", - "signature", - "spki", - "subtle", - "zeroize", -] - -[[package]] -name = "rsqlite-vfs" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8a1f2315036ef6b1fbacd1972e8ee7688030b0a2121edfc2a6550febd41574d" -dependencies = [ - "hashbrown 0.16.1", - "thiserror 2.0.16", -] - -[[package]] -name = "rusqlite" -version = "0.38.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1c93dd1c9683b438c392c492109cb702b8090b2bfc8fed6f6e4eb4523f17af3" -dependencies = [ - "bitflags 2.9.4", - "fallible-iterator", - "fallible-streaming-iterator", - "hashlink", - "libsqlite3-sys", - "smallvec", - "sqlite-wasm-rs", - "time", -] - -[[package]] -name = "rust-ini" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "796e8d2b6696392a43bea58116b667fb4c29727dc5abd27d6acf338bb4f688c7" -dependencies = [ - "cfg-if", - "ordered-multimap", + "windows-sys 0.48.0", ] [[package]] name = "rustc-demangle" -version = "0.1.26" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "rustc-hash" @@ -4589,163 +1889,54 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" - [[package]] name = "rustc_version" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ "semver", ] -[[package]] -name = "rusticata-macros" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" -dependencies = [ - "nom 7.1.3", -] - [[package]] name = "rustix" -version = "0.38.44" +version = "0.38.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.4.2", "errno", "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustix" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" -dependencies = [ - "bitflags 2.9.4", - "errno", - "libc", - "linux-raw-sys 0.11.0", - "windows-sys 0.61.0", -] - -[[package]] -name = "rustls" -version = "0.23.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9586e9ee2b4f8fab52a0048ca7334d7024eef48e2cb9407e3497bb7cab7fa7" -dependencies = [ - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - -[[package]] -name = "rustls-pki-types" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" -dependencies = [ - "web-time", - "zeroize", -] - -[[package]] -name = "rustls-webpki" -version = "0.103.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", + "linux-raw-sys", + "windows-sys 0.52.0", ] [[package]] name = "rustversion" -version = "1.0.22" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "ryu" -version = "1.0.20" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" - -[[package]] -name = "safelog" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee9f10dd250956c65d58a19507dd06ff976f898560fe843580d05134541f0898" -dependencies = [ - "derive_more", - "educe", - "either", - "fluid-let", - "thiserror 2.0.16", -] - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "sanitize-filename" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc984f4f9ceb736a7bb755c3e3bd17dc56370af2600c9780dcc48c66453da34d" -dependencies = [ - "regex", -] - -[[package]] -name = "saturating-time" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b63583a1dd0647d1484228529ab4ecaa874048d2956f117362aa5f5826456230" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "schannel" -version = "0.1.28" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" dependencies = [ - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] name = "schemars" -version = "0.8.22" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29" dependencies = [ "dyn-clone", "schemars_derive", @@ -4753,40 +1944,16 @@ dependencies = [ "serde_json", ] -[[package]] -name = "schemars" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "schemars" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - [[package]] name = "schemars_derive" -version = "0.8.22" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" +checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967" dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.106", + "syn 1.0.109", ] [[package]] @@ -4795,27 +1962,13 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "subtle", - "zeroize", -] - [[package]] name = "security-framework" -version = "2.11.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ - "bitflags 2.9.4", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -4824,9 +1977,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.15.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -4834,111 +1987,50 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.27" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" [[package]] name = "serde" -version = "1.0.228" +version = "1.0.196" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde-value" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" -dependencies = [ - "ordered-float", - "serde", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.228" +version = "1.0.196" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "serde_derive_internals" -version = "0.29.1" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", -] - -[[package]] -name = "serde_ignored" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115dffd5f3853e06e746965a20dcbae6ee747ae30b543d91b0e089668bb07798" -dependencies = [ - "serde", - "serde_core", + "syn 1.0.109", ] [[package]] name = "serde_json" -version = "1.0.145" +version = "1.0.112" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" +checksum = "4d1bd37ce2324cf3bf85e5a25f96eb4baf0d5aa6eba43e7ae8958870c4ec48ed" dependencies = [ "itoa", - "memchr", "ryu", "serde", - "serde_core", -] - -[[package]] -name = "serde_path_to_error" -version = "0.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" -dependencies = [ - "itoa", - "serde", - "serde_core", -] - -[[package]] -name = "serde_spanned" -version = "0.6.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_spanned" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "876ac351060d4f882bb1032b6369eb0aef79ad9df1ea8bc404874d8cc3d0cd98" -dependencies = [ - "serde_core", ] [[package]] @@ -4953,37 +2045,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_with" -version = "3.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd5414fad8e6907dbdd5bc441a50ae8d6e26151a03b1de04d89a5576de61d01f" -dependencies = [ - "base64 0.22.1", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.11.4", - "schemars 0.9.0", - "schemars 1.2.1", - "serde_core", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3db8978e608f1fe7357e211969fd9abdcae80bac1ba7a3369bb7eb6b404eb65" -dependencies = [ - "darling 0.23.0", - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "sha-1" version = "0.10.1" @@ -5008,25 +2069,15 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.9" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", "digest", ] -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest", - "keccak", -] - [[package]] name = "sharded-slab" version = "0.1.7" @@ -5036,122 +2087,41 @@ dependencies = [ "lazy_static", ] -[[package]] -name = "shellexpand" -version = "3.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32824fab5e16e6c4d86dc1ba84489390419a39f97699852b66480bb87d297ed8" -dependencies = [ - "bstr", - "dirs", - "os_str_bytes", -] - [[package]] name = "shlex" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" -[[package]] -name = "signal-hook-registry" -version = "1.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" -dependencies = [ - "libc", -] - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest", - "rand_core 0.6.4", -] - [[package]] name = "similar" -version = "2.7.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" - -[[package]] -name = "siphasher" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" +checksum = "32fea41aca09ee824cc9724996433064c89f7777e60762749a4170a14abbfa21" [[package]] name = "slab" -version = "0.4.11" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" - -[[package]] -name = "slotmap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdd58c3c93c3d278ca835519292445cb4b0d4dc59ccfdf7ceadaab3f8aeb4038" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ - "serde", - "version_check", -] - -[[package]] -name = "slotmap-careful" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed92816c1fbb29891a525b92d5fa95757c9dee47044f76c8e06ceb1e052a8d64" -dependencies = [ - "paste", - "serde", - "slotmap", - "thiserror 2.0.16", - "void", + "autocfg", ] [[package]] name = "smallvec" -version = "1.15.1" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" - -[[package]] -name = "smoltcp" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad095989c1533c1c266d9b1e8d70a1329dd3723c3edac6d03bbd67e7bf6f4bb" -dependencies = [ - "bitflags 1.3.2", - "byteorder", - "cfg-if", - "defmt 0.3.100", - "heapless", - "log", - "managed", -] +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" [[package]] name = "socket2" -version = "0.5.10" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "socket2" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" -dependencies = [ - "libc", - "windows-sys 0.61.0", + "windows-sys 0.48.0", ] [[package]] @@ -5159,73 +2129,6 @@ name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "sqlite-wasm-rs" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f4206ed3a67690b9c29b77d728f6acc3ce78f16bf846d83c94f76400320181b" -dependencies = [ - "cc", - "js-sys", - "rsqlite-vfs", - "wasm-bindgen", -] - -[[package]] -name = "ssh-cipher" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caac132742f0d33c3af65bfcde7f6aa8f62f0e991d80db99149eb9d44708784f" -dependencies = [ - "cipher", - "ssh-encoding", -] - -[[package]] -name = "ssh-encoding" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb9242b9ef4108a78e8cd1a2c98e193ef372437f8c22be363075233321dd4a15" -dependencies = [ - "base64ct", - "pem-rfc7468", - "sha2", -] - -[[package]] -name = "ssh-key" -version = "0.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b86f5297f0f04d08cabaa0f6bff7cb6aec4d9c3b49d87990d63da9d9156a8c3" -dependencies = [ - "num-bigint-dig", - "p256", - "p384", - "p521", - "rand_core 0.6.4", - "rsa", - "sec1", - "sha2", - "signature", - "ssh-cipher", - "ssh-encoding", - "subtle", - "zeroize", -] [[package]] name = "ssri" @@ -5233,66 +2136,27 @@ version = "9.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da7a2b3c2bc9693bcb40870c4e9b5bf0d79f9cb46273321bf855ec513e919082" dependencies = [ - "base64 0.21.7", + "base64", "digest", "hex", "miette", "sha-1", "sha2", - "thiserror 1.0.69", + "thiserror", "xxhash-rust", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "strsim" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "strum" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "subtle" -version = "2.6.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" @@ -5307,9 +2171,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", @@ -5322,40 +2186,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" -dependencies = [ - "futures-core", -] - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "sysinfo" -version = "0.36.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "252800745060e7b9ffb7b2badbd8b31cfa4aa2e61af879d0a3bf2a317c20217d" -dependencies = [ - "libc", - "memchr", - "ntapi", - "objc2-core-foundation", - "objc2-io-kit", - "windows 0.61.3", -] - [[package]] name = "system-configuration" version = "0.5.1" @@ -5377,140 +2207,72 @@ dependencies = [ "libc", ] -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - [[package]] name = "tempfile" -version = "3.23.0" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" +checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" dependencies = [ + "cfg-if", "fastrand", - "getrandom 0.3.3", - "once_cell", - "rustix 1.1.2", - "windows-sys 0.61.0", + "redox_syscall", + "rustix", + "windows-sys 0.52.0", ] [[package]] name = "thiserror" -version = "1.0.69" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" dependencies = [ - "thiserror-impl 1.0.69", -] - -[[package]] -name = "thiserror" -version = "2.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" -dependencies = [ - "thiserror-impl 2.0.16", + "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.69" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "thread_local" -version = "1.1.9" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ "cfg-if", + "once_cell", ] [[package]] name = "time" -version = "0.3.47" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" +checksum = "f657ba42c3f86e7680e53c8cd3af8abbe56b5491790b46e22e19c0d57463583e" dependencies = [ "deranged", - "itoa", - "js-sys", - "num-conv", "powerfmt", - "serde_core", + "serde", "time-core", - "time-macros", ] [[package]] name = "time-core" -version = "0.1.8" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" - -[[package]] -name = "time-macros" -version = "0.2.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "tinystr" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tinytemplate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" -dependencies = [ - "serde", - "serde_json", -] +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "tinyvec" -version = "1.10.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" dependencies = [ "tinyvec_macros", ] @@ -5523,29 +2285,27 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.47.1" +version = "1.35.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" dependencies = [ "backtrace", "bytes", - "io-uring", "libc", "mio", + "num_cpus", "pin-project-lite", - "signal-hook-registry", - "slab", - "socket2 0.6.3", + "socket2", "tokio-macros", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] name = "tokio-io-timeout" -version = "1.2.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bd86198d9ee903fedd2f9a2e72014287c0d9167e4ae43b5853007205dda1b76" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" dependencies = [ "pin-project-lite", "tokio", @@ -5553,13 +2313,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -5572,21 +2332,11 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.26.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f63835928ca123f1bef57abbcd23bb2ba0ac9ae1235f1e65bda0d06e7786bd" -dependencies = [ - "rustls", - "tokio", -] - [[package]] name = "tokio-stream" -version = "0.1.17" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" dependencies = [ "futures-core", "pin-project-lite", @@ -5595,1134 +2345,45 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.18" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ "bytes", "futures-core", - "futures-io", "futures-sink", "pin-project-lite", "tokio", + "tracing", ] -[[package]] -name = "toml" -version = "0.8.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" -dependencies = [ - "serde", - "serde_spanned 0.6.9", - "toml_datetime 0.6.11", - "toml_edit 0.22.27", -] - -[[package]] -name = "toml" -version = "0.9.12+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" -dependencies = [ - "indexmap 2.11.4", - "serde_core", - "serde_spanned 1.1.0", - "toml_datetime 0.7.5+spec-1.1.0", - "toml_parser", - "toml_writer", - "winnow 0.7.13", -] - -[[package]] -name = "toml_datetime" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_datetime" -version = "0.7.5+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" -dependencies = [ - "serde_core", -] - -[[package]] -name = "toml_edit" -version = "0.22.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" -dependencies = [ - "indexmap 2.11.4", - "serde", - "serde_spanned 0.6.9", - "toml_datetime 0.6.11", - "toml_write", - "winnow 0.7.13", -] - -[[package]] -name = "toml_edit" -version = "0.23.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d" -dependencies = [ - "indexmap 2.11.4", - "toml_datetime 0.7.5+spec-1.1.0", - "toml_parser", - "winnow 0.7.13", -] - -[[package]] -name = "toml_parser" -version = "1.1.0+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2334f11ee363607eb04df9b8fc8a13ca1715a72ba8662a26ac285c98aabb4011" -dependencies = [ - "winnow 1.0.1", -] - -[[package]] -name = "toml_write" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" - -[[package]] -name = "toml_writer" -version = "1.1.0+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d282ade6016312faf3e41e57ebbba0c073e4056dab1232ab1cb624199648f8ed" - [[package]] name = "tonic" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" dependencies = [ - "async-stream 0.3.6", + "async-stream", "async-trait", - "axum 0.6.20", - "base64 0.21.7", + "axum", + "base64", "bytes", - "h2 0.3.27", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", - "hyper-timeout 0.4.1", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", "percent-encoding", "pin-project", - "prost 0.12.6", + "prost", "tokio", "tokio-stream", - "tower 0.4.13", + "tower", "tower-layer", "tower-service", "tracing", ] -[[package]] -name = "tonic" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" -dependencies = [ - "async-stream 0.3.6", - "async-trait", - "axum 0.7.9", - "base64 0.22.1", - "bytes", - "h2 0.4.12", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.7.0", - "hyper-timeout 0.5.2", - "hyper-util", - "percent-encoding", - "pin-project", - "prost 0.13.5", - "socket2 0.5.10", - "tokio", - "tokio-stream", - "tower 0.4.13", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tonic-build" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" -dependencies = [ - "prettyplease", - "proc-macro2", - "prost-build", - "prost-types 0.13.5", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "tor-async-utils" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "895c61a46909134501c6815eceeb66c9c80fc494ee89429821b0f05ccf34b4f5" -dependencies = [ - "derive-deftly", - "educe", - "futures", - "oneshot-fused-workaround", - "pin-project", - "postage", - "thiserror 2.0.16", - "void", -] - -[[package]] -name = "tor-basic-utils" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fac6e4d7e131b7d69bc85558383cd4ac61e46b4dd0d4ed51632f28fac98cac0c" -dependencies = [ - "derive_more", - "hex", - "itertools 0.14.0", - "libc", - "paste", - "rand 0.9.2", - "rand_chacha 0.9.0", - "serde", - "slab", - "smallvec", - "thiserror 2.0.16", - "weak-table", -] - -[[package]] -name = "tor-bytes" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64454947258e49f238a5f06a06250a0c54598a1c7409898b5c79505e6a99e7af" -dependencies = [ - "bytes", - "derive-deftly", - "digest", - "educe", - "getrandom 0.4.2", - "safelog", - "thiserror 2.0.16", - "tor-error", - "tor-llcrypto", - "zeroize", -] - -[[package]] -name = "tor-cell" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ab0c79bc92a957e85959cf397a2d8f9c8294c35fa4f65247a9393b20ac95551" -dependencies = [ - "amplify", - "bitflags 2.9.4", - "bytes", - "caret", - "derive-deftly", - "derive_more", - "educe", - "itertools 0.14.0", - "paste", - "rand 0.9.2", - "smallvec", - "thiserror 2.0.16", - "tor-basic-utils", - "tor-bytes", - "tor-cert", - "tor-error", - "tor-linkspec", - "tor-llcrypto", - "tor-memquota", - "tor-protover", - "tor-units", - "void", -] - -[[package]] -name = "tor-cert" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "debc911738298ee801fce4577c36a50c55295b0bb9c5519461b83cc486a1f86e" -dependencies = [ - "caret", - "derive_builder_fork_arti", - "derive_more", - "digest", - "thiserror 2.0.16", - "tor-bytes", - "tor-checkable", - "tor-error", - "tor-llcrypto", -] - -[[package]] -name = "tor-chanmgr" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7af5b7c2f1e16d1304b5185fcbc91ca5c8df991c21be00702f925f055573eea1" -dependencies = [ - "async-trait", - "caret", - "cfg-if", - "derive-deftly", - "derive_more", - "educe", - "futures", - "oneshot-fused-workaround", - "percent-encoding", - "postage", - "rand 0.9.2", - "safelog", - "serde", - "serde_with", - "thiserror 2.0.16", - "tor-async-utils", - "tor-basic-utils", - "tor-cell", - "tor-config", - "tor-error", - "tor-keymgr", - "tor-linkspec", - "tor-llcrypto", - "tor-memquota", - "tor-netdir", - "tor-proto", - "tor-rtcompat", - "tor-socksproto", - "tor-units", - "tracing", - "url", - "void", -] - -[[package]] -name = "tor-checkable" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15b13a5b50bb55037f2e81b25dde42f420d57c75154216b8ef989006cea3ebee" -dependencies = [ - "humantime", - "signature", - "thiserror 2.0.16", - "tor-llcrypto", -] - -[[package]] -name = "tor-circmgr" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b878f3f7c6be0c7f130d90b347ada2e7c46519dfbdde8273eae2e5d1792caa87" -dependencies = [ - "amplify", - "async-trait", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more", - "downcast-rs", - "dyn-clone", - "educe", - "futures", - "humantime-serde", - "itertools 0.14.0", - "once_cell", - "oneshot-fused-workaround", - "pin-project", - "rand 0.9.2", - "retry-error", - "safelog", - "serde", - "thiserror 2.0.16", - "tor-async-utils", - "tor-basic-utils", - "tor-cell", - "tor-chanmgr", - "tor-config", - "tor-dircommon", - "tor-error", - "tor-guardmgr", - "tor-linkspec", - "tor-memquota", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-relay-selection", - "tor-rtcompat", - "tor-units", - "tracing", - "void", - "weak-table", -] - -[[package]] -name = "tor-config" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbc74a00ab15bb986e3747c6969e40a58a63065d6f99077e7ee2f4657bb8b03" -dependencies = [ - "amplify", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "educe", - "either", - "figment", - "fs-mistrust", - "futures", - "humantime-serde", - "itertools 0.14.0", - "notify", - "paste", - "postage", - "regex", - "serde", - "serde-value", - "serde_ignored", - "strum", - "thiserror 2.0.16", - "toml 0.9.12+spec-1.1.0", - "tor-basic-utils", - "tor-error", - "tor-rtcompat", - "tracing", - "void", -] - -[[package]] -name = "tor-config-path" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3005ab7b9a26a7271e5adf3dfb4ae18c09a943e32aeccc4f6d1c53a60de74b8d" -dependencies = [ - "directories", - "serde", - "shellexpand", - "thiserror 2.0.16", - "tor-error", - "tor-general-addr", -] - -[[package]] -name = "tor-consdiff" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bfa2b7b71c72830f61c48da4bb3e13191e0c0e1404b9c5168c795e4f5feb4a8" -dependencies = [ - "digest", - "hex", - "thiserror 2.0.16", - "tor-llcrypto", -] - -[[package]] -name = "tor-dirclient" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccd6fac844ac77c33ccdfcb56bf23ff40ebbb821ea708be79a481ec30e8c39c" -dependencies = [ - "async-compression", - "base64ct", - "derive_more", - "futures", - "hex", - "http 1.3.1", - "httparse", - "httpdate", - "itertools 0.14.0", - "memchr", - "thiserror 2.0.16", - "tor-circmgr", - "tor-error", - "tor-linkspec", - "tor-llcrypto", - "tor-netdoc", - "tor-proto", - "tor-rtcompat", - "tracing", -] - -[[package]] -name = "tor-dircommon" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0cf39a3c30321d145a4d60753ae7ef5bb58a66a00ac9e2bfc30bd823faf2a4" -dependencies = [ - "base64ct", - "derive-deftly", - "getset", - "humantime", - "humantime-serde", - "serde", - "tor-basic-utils", - "tor-checkable", - "tor-config", - "tor-linkspec", - "tor-llcrypto", - "tor-netdoc", - "tracing", -] - -[[package]] -name = "tor-dirmgr" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b52919aa9dbb82a354c5b904bef82e91beb702b9f8ad14e6eac4237d6128bf67" -dependencies = [ - "async-trait", - "base64ct", - "derive_builder_fork_arti", - "derive_more", - "digest", - "educe", - "event-listener", - "fs-mistrust", - "fslock", - "futures", - "hex", - "humantime", - "humantime-serde", - "itertools 0.14.0", - "memmap2", - "oneshot-fused-workaround", - "paste", - "postage", - "rand 0.9.2", - "rusqlite", - "safelog", - "scopeguard", - "serde", - "serde_json", - "signature", - "static_assertions", - "strum", - "thiserror 2.0.16", - "time", - "tor-async-utils", - "tor-basic-utils", - "tor-checkable", - "tor-circmgr", - "tor-config", - "tor-consdiff", - "tor-dirclient", - "tor-dircommon", - "tor-error", - "tor-guardmgr", - "tor-llcrypto", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-rtcompat", - "tracing", -] - -[[package]] -name = "tor-error" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595b005e6f571ac3890a34a00f361200aab781fd0218f2c528c86fc7af088df5" -dependencies = [ - "derive_more", - "futures", - "paste", - "retry-error", - "static_assertions", - "strum", - "thiserror 2.0.16", - "tracing", - "void", -] - -[[package]] -name = "tor-general-addr" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727b8c8bc01c1587486055edab5c2cd0d5c960f5bb3fac796fc9911872b8b397" -dependencies = [ - "derive_more", - "thiserror 2.0.16", - "void", -] - -[[package]] -name = "tor-guardmgr" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d337f465a477c0fb3b2faafa4654d70ff9df3590e57d22707591dddb4e4450c1" -dependencies = [ - "amplify", - "base64ct", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more", - "dyn-clone", - "educe", - "futures", - "humantime", - "humantime-serde", - "itertools 0.14.0", - "num_enum", - "oneshot-fused-workaround", - "pin-project", - "postage", - "rand 0.9.2", - "safelog", - "serde", - "strum", - "thiserror 2.0.16", - "tor-async-utils", - "tor-basic-utils", - "tor-config", - "tor-dircommon", - "tor-error", - "tor-linkspec", - "tor-llcrypto", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-relay-selection", - "tor-rtcompat", - "tor-units", - "tracing", -] - -[[package]] -name = "tor-hscrypto" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3693cd43f05cd01ac0aaa060dae5c5e53c4364f89e0d769e33cd629a2fd3118" -dependencies = [ - "data-encoding", - "derive-deftly", - "derive_more", - "digest", - "hex", - "humantime", - "itertools 0.14.0", - "paste", - "rand 0.9.2", - "safelog", - "serde", - "signature", - "subtle", - "thiserror 2.0.16", - "tor-basic-utils", - "tor-bytes", - "tor-error", - "tor-key-forge", - "tor-llcrypto", - "tor-units", - "void", -] - -[[package]] -name = "tor-key-forge" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ade9065ae49cfe2ab020ca9ca9f2b3c5c9b5fc0d8980fa681d8b3a0668e042f" -dependencies = [ - "derive-deftly", - "derive_more", - "downcast-rs", - "paste", - "rand 0.9.2", - "rsa", - "signature", - "ssh-key", - "thiserror 2.0.16", - "tor-bytes", - "tor-cert", - "tor-checkable", - "tor-error", - "tor-llcrypto", -] - -[[package]] -name = "tor-keymgr" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243c3163d376c4723cd67271fcd6e5d6b498a6865c6b98299640e1be01c38826" -dependencies = [ - "amplify", - "arrayvec", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more", - "downcast-rs", - "dyn-clone", - "fs-mistrust", - "glob-match", - "humantime", - "inventory", - "itertools 0.14.0", - "rand 0.9.2", - "safelog", - "serde", - "signature", - "ssh-key", - "thiserror 2.0.16", - "tor-basic-utils", - "tor-bytes", - "tor-config", - "tor-config-path", - "tor-error", - "tor-hscrypto", - "tor-key-forge", - "tor-llcrypto", - "tor-persist", - "tracing", - "visibility", - "walkdir", - "zeroize", -] - -[[package]] -name = "tor-linkspec" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f1ea8786900d6fbe4c9f775d341b1ba01bbd1f750d89bd63be78b6b01e1836" -dependencies = [ - "base64ct", - "by_address", - "caret", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more", - "hex", - "itertools 0.14.0", - "safelog", - "serde", - "serde_with", - "strum", - "thiserror 2.0.16", - "tor-basic-utils", - "tor-bytes", - "tor-config", - "tor-llcrypto", - "tor-memquota", - "tor-protover", -] - -[[package]] -name = "tor-llcrypto" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c6989a1c6d06ffd6835e2917edaae4aeef544f8e5fdd68b54cc365f2af523de" -dependencies = [ - "aes", - "base64ct", - "ctr", - "curve25519-dalek", - "der-parser", - "derive-deftly", - "derive_more", - "digest", - "ed25519-dalek", - "educe", - "getrandom 0.4.2", - "hex", - "rand 0.9.2", - "rand_chacha 0.9.0", - "rand_core 0.6.4", - "rand_core 0.9.3", - "rand_jitter", - "rdrand", - "rsa", - "safelog", - "serde", - "sha1", - "sha2", - "sha3", - "signature", - "subtle", - "thiserror 2.0.16", - "tor-error", - "tor-memquota-cost", - "visibility", - "x25519-dalek", - "zeroize", -] - -[[package]] -name = "tor-log-ratelim" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f1cd642180923d12e3fab5996b4aa2189718da7f465df6eb196ce2b9c70e293" -dependencies = [ - "futures", - "humantime", - "thiserror 2.0.16", - "tor-error", - "tor-rtcompat", - "tracing", - "weak-table", -] - -[[package]] -name = "tor-memquota" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "599daea60fd3272eb72a795d1c593b45bbe15343cbc702340a81db124c06eed5" -dependencies = [ - "cfg-if", - "derive-deftly", - "derive_more", - "dyn-clone", - "educe", - "futures", - "itertools 0.14.0", - "paste", - "pin-project", - "serde", - "slotmap-careful", - "static_assertions", - "sysinfo", - "thiserror 2.0.16", - "tor-async-utils", - "tor-basic-utils", - "tor-config", - "tor-error", - "tor-log-ratelim", - "tor-memquota-cost", - "tor-rtcompat", - "tracing", - "void", -] - -[[package]] -name = "tor-memquota-cost" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd92b07c0fc24e6d8166a5ff45e5b8654e68d89743c46d01889a16ab74c0b578" -dependencies = [ - "derive-deftly", - "itertools 0.14.0", - "paste", - "void", -] - -[[package]] -name = "tor-netdir" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41be8f47f521fc95206d2ba5facac8fb1a5b5b82169bd41ebeecdf46d1e77246" -dependencies = [ - "async-trait", - "bitflags 2.9.4", - "derive_more", - "futures", - "humantime", - "itertools 0.14.0", - "num_enum", - "rand 0.9.2", - "serde", - "strum", - "thiserror 2.0.16", - "tor-basic-utils", - "tor-error", - "tor-linkspec", - "tor-llcrypto", - "tor-netdoc", - "tor-protover", - "tor-units", - "tracing", - "typed-index-collections", -] - -[[package]] -name = "tor-netdoc" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea8bce73d2c78bd78a2a927336ca639cf6bd5d8ad092ebcd0b3fdeaa47dcc77e" -dependencies = [ - "amplify", - "base64ct", - "cipher", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more", - "digest", - "educe", - "enumset", - "hex", - "humantime", - "itertools 0.14.0", - "memchr", - "paste", - "phf", - "saturating-time", - "serde", - "serde_with", - "signature", - "smallvec", - "strum", - "subtle", - "thiserror 2.0.16", - "time", - "tinystr", - "tor-basic-utils", - "tor-bytes", - "tor-cell", - "tor-cert", - "tor-checkable", - "tor-error", - "tor-llcrypto", - "tor-protover", - "void", - "zeroize", -] - -[[package]] -name = "tor-persist" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "507ab4b6a3d59ed0df5804eeed66dcacde75e3be13d3694216cdfdb666bce625" -dependencies = [ - "derive-deftly", - "derive_more", - "filetime", - "fs-mistrust", - "fslock", - "futures", - "itertools 0.14.0", - "oneshot-fused-workaround", - "paste", - "sanitize-filename", - "serde", - "serde_json", - "thiserror 2.0.16", - "time", - "tor-async-utils", - "tor-basic-utils", - "tor-error", - "tracing", - "void", -] - -[[package]] -name = "tor-proto" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfc552d535d36539d5782bb02028590bc472d219e49da51a96810725e80ff56" -dependencies = [ - "amplify", - "async-trait", - "asynchronous-codec", - "bitvec", - "bytes", - "caret", - "cfg-if", - "cipher", - "coarsetime", - "criterion-cycles-per-byte", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more", - "digest", - "educe", - "enum_dispatch", - "futures", - "futures-util", - "hkdf", - "hmac", - "itertools 0.14.0", - "nonany", - "oneshot-fused-workaround", - "pin-project", - "postage", - "rand 0.9.2", - "rand_core 0.9.3", - "safelog", - "slotmap-careful", - "smallvec", - "static_assertions", - "subtle", - "sync_wrapper 1.0.2", - "thiserror 2.0.16", - "tokio", - "tokio-util", - "tor-async-utils", - "tor-basic-utils", - "tor-bytes", - "tor-cell", - "tor-cert", - "tor-checkable", - "tor-config", - "tor-error", - "tor-linkspec", - "tor-llcrypto", - "tor-log-ratelim", - "tor-memquota", - "tor-protover", - "tor-relay-crypto", - "tor-rtcompat", - "tor-rtmock", - "tor-units", - "tracing", - "typenum", - "visibility", - "void", - "zeroize", -] - -[[package]] -name = "tor-protover" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aed88527d070c4b7ea4e55a36d2d56d0500e30ca66298b5264f047f7f2f89cfa" -dependencies = [ - "caret", - "paste", - "serde_with", - "thiserror 2.0.16", - "tor-basic-utils", - "tor-bytes", -] - -[[package]] -name = "tor-relay-crypto" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7e57e9f71b22ae1df63dbccc8e428cb07feec0abd654735109fa563c10bbb90" -dependencies = [ - "derive-deftly", - "derive_more", - "humantime", - "tor-cert", - "tor-checkable", - "tor-error", - "tor-key-forge", - "tor-keymgr", - "tor-llcrypto", - "tor-persist", -] - -[[package]] -name = "tor-relay-selection" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a372072ac9dea7d17e49693cc3f3ae77b3abf8125630516c9f2d622239b1920a" -dependencies = [ - "rand 0.9.2", - "serde", - "tor-basic-utils", - "tor-linkspec", - "tor-netdir", - "tor-netdoc", -] - -[[package]] -name = "tor-rtcompat" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14428b930e59003e801c0c32697c0aeb9b0495ad33ecbe8c6753bdb596233270" -dependencies = [ - "async-native-tls", - "async-trait", - "async_executors", - "asynchronous-codec", - "cfg-if", - "coarsetime", - "derive_more", - "dyn-clone", - "educe", - "futures", - "hex", - "libc", - "native-tls", - "paste", - "pin-project", - "socket2 0.6.3", - "thiserror 2.0.16", - "tokio", - "tokio-util", - "tor-error", - "tor-general-addr", - "tracing", - "void", - "zeroize", -] - -[[package]] -name = "tor-rtmock" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2da91a432cdaee8a93e0bb21b02f3e9c7667832ccbb4b54e00d9c1214638e70" -dependencies = [ - "amplify", - "assert_matches", - "async-trait", - "derive-deftly", - "derive_more", - "educe", - "futures", - "humantime", - "itertools 0.14.0", - "oneshot-fused-workaround", - "pin-project", - "priority-queue", - "slotmap-careful", - "strum", - "thiserror 2.0.16", - "tor-error", - "tor-general-addr", - "tor-rtcompat", - "tracing", - "tracing-test", - "void", -] - -[[package]] -name = "tor-socksproto" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adbc9115a2f506d9bb86ae4446f0ca70eb523dc2f5e900a33582e7c39decc23a" -dependencies = [ - "amplify", - "caret", - "derive-deftly", - "educe", - "safelog", - "subtle", - "thiserror 2.0.16", - "tor-bytes", - "tor-error", -] - -[[package]] -name = "tor-units" -version = "0.40.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da90e93b4b4aa4ec356ecbe9e19aced36fdd655e94ca459d1915120d873363f0" -dependencies = [ - "derive-deftly", - "derive_more", - "serde", - "thiserror 2.0.16", - "tor-memquota", -] - [[package]] name = "tower" version = "0.4.13" @@ -6734,7 +2395,7 @@ dependencies = [ "indexmap 1.9.3", "pin-project", "pin-project-lite", - "rand 0.8.5", + "rand", "slab", "tokio", "tokio-util", @@ -6743,59 +2404,24 @@ dependencies = [ "tracing", ] -[[package]] -name = "tower" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper 1.0.2", - "tokio", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" -dependencies = [ - "bitflags 2.9.4", - "bytes", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "iri-string", - "pin-project-lite", - "tower 0.5.2", - "tower-layer", - "tower-service", -] - [[package]] name = "tower-layer" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" [[package]] name = "tower-service" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.41" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -6803,20 +2429,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.30" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -6824,9 +2450,9 @@ dependencies = [ [[package]] name = "tracing-journald" -version = "0.3.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0b4143302cf1022dac868d521e36e8b27691f72c84b3311750d5188ebba657" +checksum = "ba316a74e8fc3c3896a850dba2375928a9fa171b085ecddfc7c054d39970f3fd" dependencies = [ "libc", "tracing-core", @@ -6872,14 +2498,14 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.20" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", "nu-ansi-term", "once_cell", - "regex-automata", + "regex", "sharded-slab", "smallvec", "thread_local", @@ -6888,27 +2514,6 @@ dependencies = [ "tracing-log 0.2.0", ] -[[package]] -name = "tracing-test" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19a4c448db514d4f24c5ddb9f73f2ee71bfb24c526cf0c570ba142d1119e0051" -dependencies = [ - "tracing-core", - "tracing-subscriber", - "tracing-test-macro", -] - -[[package]] -name = "tracing-test-macro" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad06847b7afb65c7866a36664b75c40b895e318cea4f71299f013fb22965329d" -dependencies = [ - "quote", - "syn 2.0.106", -] - [[package]] name = "try-lock" version = "0.2.5" @@ -6929,73 +2534,51 @@ dependencies = [ "libloading 0.7.4", "log", "nix 0.26.4", - "reqwest 0.11.27", - "schemars 0.8.22", + "reqwest", + "schemars", "serde", - "socket2 0.5.10", + "socket2", "ssri", "tempfile", "tokio", "tracing", "widestring", - "windows 0.48.0", + "windows", "zip", ] -[[package]] -name = "typed-index-collections" -version = "3.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "898160f1dfd383b4e92e17f0512a7d62f3c51c44937b23b6ffc3a1614a8eaccd" -dependencies = [ - "bincode", - "serde", -] - [[package]] name = "typenum" -version = "1.18.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] -name = "uncased" -version = "0.9.10" +name = "unicode-bidi" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" -dependencies = [ - "version_check", -] +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.19" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] -name = "unicode-segmentation" -version = "1.13.2" +name = "unicode-normalization" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9629274872b2bfaf8d66f5f15725007f635594914870f65218920345aa11aa8c" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] [[package]] name = "unicode-width" -version = "0.1.14" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" - -[[package]] -name = "unicode-width" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "universal-hash" @@ -7013,52 +2596,37 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" -[[package]] -name = "unty" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" - [[package]] name = "url" -version = "2.5.8" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", "percent-encoding", - "serde", ] -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - [[package]] name = "utf8parse" -version = "0.2.2" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.18.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" +checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" dependencies = [ - "js-sys", "serde", - "wasm-bindgen", ] [[package]] name = "valuable" -version = "0.1.1" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "vcpkg" @@ -7068,36 +2636,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.5" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "visibility" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "void" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "want" @@ -7110,74 +2651,52 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" +version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasi" -version = "0.14.7+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" -dependencies = [ - "wasip2", -] - -[[package]] -name = "wasip2" -version = "1.0.1+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" -dependencies = [ - "wit-bindgen 0.46.0", -] - -[[package]] -name = "wasip3" -version = "0.4.0+wasi-0.3.0-rc-2026-01-06" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" -dependencies = [ - "wit-bindgen 0.51.0", -] - -[[package]] -name = "wasix" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1757e0d1f8456693c7e5c6c629bdb54884e032aa0bb53c155f6a39f94440d332" -dependencies = [ - "wasi 0.11.1+wasi-snapshot-preview1", -] +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.116" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dc0882f7b5bb01ae8c5215a1230832694481c1a4be062fd410e12ea3da5b631" +checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" dependencies = [ "cfg-if", - "once_cell", - "rustversion", "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.48", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.66" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19280959e2844181895ef62f065c63e0ca07ece4771b53d89bfdb967d97cbf05" +checksum = "bde2032aeb86bdfaecc8b261eef3cba735cc426c1f3a3416d1e0791be95fc461" dependencies = [ + "cfg-if", "js-sys", "wasm-bindgen", + "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.116" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75973d3066e01d035dbedaad2864c398df42f8dd7b1ea057c35b8407c015b537" +checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -7185,95 +2704,33 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.116" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91af5e4be765819e0bcfee7322c14374dc821e35e72fa663a830bbc7dc199eac" +checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" dependencies = [ - "bumpalo", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", + "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.116" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9bf0406a78f02f336bf1e451799cca198e8acde4ffa278f0fb20487b150a633" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasm-encoder" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" -dependencies = [ - "leb128fmt", - "wasmparser", -] - -[[package]] -name = "wasm-metadata" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" -dependencies = [ - "anyhow", - "indexmap 2.11.4", - "wasm-encoder", - "wasmparser", -] - -[[package]] -name = "wasmparser" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" -dependencies = [ - "bitflags 2.9.4", - "hashbrown 0.15.5", - "indexmap 2.11.4", - "semver", -] - -[[package]] -name = "weak-table" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "323f4da9523e9a669e1eaf9c6e763892769b1d38c623913647bfdc1532fe4549" +checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" [[package]] name = "web-sys" -version = "0.3.93" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "749466a37ee189057f54748b200186b59a03417a117267baf3fd89cecc9fb837" +checksum = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed" dependencies = [ "js-sys", "wasm-bindgen", ] -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-roots" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b130c0d2d49f8b6889abc456e795e82525204f27c42cf767cf0d7734e089b8" -dependencies = [ - "rustls-pki-types", -] - [[package]] name = "which" version = "4.4.2" @@ -7283,14 +2740,14 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.44", + "rustix", ] [[package]] name = "widestring" -version = "1.2.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d" +checksum = "653f141f39ec16bba3c5abe400a0c60da7468261cc2cbf36805022876bc721a8" [[package]] name = "winapi" @@ -7308,15 +2765,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -[[package]] -name = "winapi-util" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" -dependencies = [ - "windows-sys 0.61.0", -] - [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" @@ -7332,145 +2780,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "windows" -version = "0.61.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" -dependencies = [ - "windows-collections", - "windows-core 0.61.2", - "windows-future", - "windows-link 0.1.3", - "windows-numerics", -] - -[[package]] -name = "windows-collections" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" -dependencies = [ - "windows-core 0.61.2", -] - -[[package]] -name = "windows-core" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-link 0.1.3", - "windows-result 0.3.4", - "windows-strings 0.4.2", -] - -[[package]] -name = "windows-core" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-link 0.2.1", - "windows-result 0.4.1", - "windows-strings 0.5.1", -] - -[[package]] -name = "windows-future" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" -dependencies = [ - "windows-core 0.61.2", - "windows-link 0.1.3", - "windows-threading", -] - -[[package]] -name = "windows-implement" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "windows-interface" -version = "0.59.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "windows-link" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-numerics" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" -dependencies = [ - "windows-core 0.61.2", - "windows-link 0.1.3", -] - -[[package]] -name = "windows-result" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" -dependencies = [ - "windows-link 0.1.3", -] - -[[package]] -name = "windows-result" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" -dependencies = [ - "windows-link 0.2.1", -] - -[[package]] -name = "windows-strings" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" -dependencies = [ - "windows-link 0.1.3", -] - -[[package]] -name = "windows-strings" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" -dependencies = [ - "windows-link 0.2.1", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -7486,34 +2795,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets 0.53.3", -] - -[[package]] -name = "windows-sys" -version = "0.61.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa" -dependencies = [ - "windows-link 0.2.1", + "windows-targets 0.52.0", ] [[package]] @@ -7533,44 +2815,17 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.53.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" -dependencies = [ - "windows-link 0.1.3", - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", -] - -[[package]] -name = "windows-threading" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" -dependencies = [ - "windows-link 0.1.3", + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", ] [[package]] @@ -7581,15 +2836,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" @@ -7599,15 +2848,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" @@ -7617,27 +2860,9 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnu" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" @@ -7647,15 +2872,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_i686_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" @@ -7665,15 +2884,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" [[package]] name = "windows_x86_64_gnullvm" @@ -7683,15 +2896,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" @@ -7701,30 +2908,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.6" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" - -[[package]] -name = "winnow" -version = "0.7.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" -dependencies = [ - "memchr", -] - -[[package]] -name = "winnow" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09dac053f1cd375980747450bfc7250c264eaae0583872e845c0c7cd578872b5" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winreg" @@ -7736,203 +2922,38 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "wit-bindgen" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" - -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" -dependencies = [ - "wit-bindgen-rust-macro", -] - -[[package]] -name = "wit-bindgen-core" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" -dependencies = [ - "anyhow", - "heck", - "wit-parser", -] - -[[package]] -name = "wit-bindgen-rust" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" -dependencies = [ - "anyhow", - "heck", - "indexmap 2.11.4", - "prettyplease", - "syn 2.0.106", - "wasm-metadata", - "wit-bindgen-core", - "wit-component", -] - -[[package]] -name = "wit-bindgen-rust-macro" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" -dependencies = [ - "anyhow", - "prettyplease", - "proc-macro2", - "quote", - "syn 2.0.106", - "wit-bindgen-core", - "wit-bindgen-rust", -] - -[[package]] -name = "wit-component" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" -dependencies = [ - "anyhow", - "bitflags 2.9.4", - "indexmap 2.11.4", - "log", - "serde", - "serde_derive", - "serde_json", - "wasm-encoder", - "wasm-metadata", - "wasmparser", - "wit-parser", -] - -[[package]] -name = "wit-parser" -version = "0.244.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" -dependencies = [ - "anyhow", - "id-arena", - "indexmap 2.11.4", - "log", - "semver", - "serde", - "serde_derive", - "serde_json", - "unicode-xid", - "wasmparser", -] - -[[package]] -name = "writeable" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - [[package]] name = "x25519-dalek" -version = "2.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" +checksum = "fb66477291e7e8d2b0ff1bcb900bf29489a9692816d79874bea351e7a8b6de96" dependencies = [ "curve25519-dalek", - "rand_core 0.6.4", + "rand_core", "serde", "zeroize", ] [[package]] name = "xxhash-rust" -version = "0.8.15" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" +checksum = "53be06678ed9e83edb1745eb72efc0bbcd7b5c3c35711a860906aed827a13d61" [[package]] -name = "yoke" -version = "0.8.0" +name = "yaml-rust" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" dependencies = [ - "serde", - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", - "synstructure", + "linked-hash-map", ] [[package]] name = "zeroize" -version = "1.8.2" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" dependencies = [ "zeroize_derive", ] @@ -7945,40 +2966,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", -] - -[[package]] -name = "zerotrie" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -7998,7 +2986,7 @@ dependencies = [ "pbkdf2", "sha1", "time", - "zstd 0.11.2+zstd.1.5.2", + "zstd", ] [[package]] @@ -8007,16 +2995,7 @@ version = "0.11.2+zstd.1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" dependencies = [ - "zstd-safe 5.0.2+zstd.1.5.2", -] - -[[package]] -name = "zstd" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" -dependencies = [ - "zstd-safe 7.2.4", + "zstd-safe", ] [[package]] @@ -8029,20 +3008,11 @@ dependencies = [ "zstd-sys", ] -[[package]] -name = "zstd-safe" -version = "7.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" -dependencies = [ - "zstd-sys", -] - [[package]] name = "zstd-sys" -version = "2.0.16+zstd.1.5.7" +version = "2.0.9+zstd.1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 362ba2b..44981a2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,8 +2,3 @@ members = ["burrow", "tun"] resolver = "2" exclude = ["burrow-gtk"] - -[profile.release] -lto = true -panic = "abort" -opt-level = "z" diff --git a/Dockerfile b/Dockerfile index 3497e22..9f54478 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/rust:1.85-slim-bookworm AS builder +FROM docker.io/library/rust:1.76.0-slim-bookworm AS builder ARG TARGETPLATFORM ARG LLVM_VERSION=16 @@ -8,11 +8,11 @@ ENV KEYRINGS /etc/apt/keyrings RUN set -eux && \ mkdir -p $KEYRINGS && \ apt-get update && \ - apt-get install --no-install-recommends -y gpg curl busybox make musl-dev && \ + apt-get install --no-install-recommends -y gpg curl musl-dev && \ curl --proto '=https' --tlsv1.2 -sSf https://apt.llvm.org/llvm-snapshot.gpg.key | gpg --dearmor --output $KEYRINGS/llvm.gpg && \ echo "deb [signed-by=$KEYRINGS/llvm.gpg] http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-$LLVM_VERSION main" > /etc/apt/sources.list.d/llvm.list && \ apt-get update && \ - apt-get install --no-install-recommends -y clang-$LLVM_VERSION llvm-$LLVM_VERSION lld-$LLVM_VERSION build-essential sqlite3 libsqlite3-dev musl musl-tools musl-dev protobuf-compiler libprotobuf-dev && \ + apt-get install --no-install-recommends -y clang-$LLVM_VERSION llvm-$LLVM_VERSION lld-$LLVM_VERSION && \ ln -s clang-$LLVM_VERSION /usr/bin/clang && \ ln -s clang /usr/bin/clang++ && \ ln -s lld-$LLVM_VERSION /usr/bin/ld.lld && \ @@ -25,47 +25,26 @@ RUN set -eux && \ rm -rf /var/lib/apt/lists/* RUN case $TARGETPLATFORM in \ - "linux/arm64") LLVM_TARGET=aarch64-unknown-linux-musl ;; \ - "linux/amd64") LLVM_TARGET=x86_64-unknown-linux-musl ;; \ - *) exit 1 ;; \ + "linux/arm64") LLVM_TARGET=aarch64-unknown-linux-musl ;; \ + "linux/amd64") LLVM_TARGET=x86_64-unknown-linux-musl ;; \ + *) exit 1 ;; \ esac && \ rustup target add $LLVM_TARGET -ARG SQLITE_VERSION=3460000 - -RUN case $TARGETPLATFORM in \ - "linux/arm64") LLVM_TARGET=aarch64-unknown-linux-musl MUSL_TARGET=aarch64-linux-musl ;; \ - "linux/amd64") LLVM_TARGET=x86_64-unknown-linux-musl MUSL_TARGET=x86_64-linux-musl ;; \ - *) exit 1 ;; \ - esac && \ - curl --proto '=https' --tlsv1.2 -sSfO https://www.sqlite.org/2024/sqlite-autoconf-$SQLITE_VERSION.tar.gz && \ - tar xf sqlite-autoconf-$SQLITE_VERSION.tar.gz && \ - cd sqlite-autoconf-$SQLITE_VERSION && \ - ./configure --disable-shared --disable-dependency-tracking \ - CC="clang-$LLVM_VERSION -target $LLVM_TARGET" \ - CFLAGS="-I/usr/local/include -I/usr/include/$MUSL_TARGET" \ - LDFLAGS="-L/usr/local/lib -L/usr/lib/$MUSL_TARGET -L/lib/$MUSL_TARGET" && \ - make && \ - make install && \ - cd .. && \ - rm -rf sqlite-autoconf-$SQLITE_VERSION sqlite-autoconf-$SQLITE_VERSION.tar.gz - ENV CC_x86_64_unknown_linux_musl=clang-$LLVM_VERSION \ AR_x86_64_unknown_linux_musl=llvm-ar-$LLVM_VERSION \ CC_aarch64_unknown_linux_musl=clang-$LLVM_VERSION \ AR_aarch64_unknown_linux_musl=llvm-ar-$LLVM_VERSION \ CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_RUSTFLAGS="-L/usr/lib/x86_64-linux-musl -L/lib/x86_64-linux-musl -C linker=rust-lld" \ CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_RUSTFLAGS="-L/usr/lib/aarch64-linux-musl -L/lib/aarch64-linux-musl -C linker=rust-lld" \ - SQLITE3_STATIC=1 \ - SQLITE3_INCLUDE_DIR=/usr/local/include \ - SQLITE3_LIB_DIR=/usr/local/lib + CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse COPY . . RUN case $TARGETPLATFORM in \ - "linux/arm64") LLVM_TARGET=aarch64-unknown-linux-musl ;; \ - "linux/amd64") LLVM_TARGET=x86_64-unknown-linux-musl ;; \ - *) exit 1 ;; \ + "linux/arm64") LLVM_TARGET=aarch64-unknown-linux-musl ;; \ + "linux/amd64") LLVM_TARGET=x86_64-unknown-linux-musl ;; \ + *) exit 1 ;; \ esac && \ cargo install --path burrow --target $LLVM_TARGET @@ -74,8 +53,7 @@ WORKDIR /tmp/rootfs RUN set -eux && \ mkdir -p ./bin ./etc ./tmp ./data && \ mv /usr/local/cargo/bin/burrow ./bin/burrow && \ - cp /bin/busybox ./bin/busybox && \ - echo 'burrow:x:10001:10001::/tmp:/bin/busybox' > ./etc/passwd && \ + echo 'burrow:x:10001:10001::/tmp:/sbin/nologin' > ./etc/passwd && \ echo 'burrow:x:10001:' > ./etc/group && \ chown -R 10001:10001 ./tmp ./data && \ chmod 0777 ./tmp @@ -94,6 +72,4 @@ USER 10001:10001 COPY --from=builder /tmp/rootfs / WORKDIR /data -EXPOSE 8080 - -CMD ["/bin/burrow", "auth-server"] +ENTRYPOINT ["/bin/burrow"] diff --git a/Makefile b/Makefile index 1a0488c..97d2d5a 100644 --- a/Makefile +++ b/Makefile @@ -1,38 +1,21 @@ tun := $(shell ifconfig -l | sed 's/ /\n/g' | grep utun | tail -n 1) -cargo_console := env RUST_BACKTRACE=1 RUST_LOG=debug RUSTFLAGS='--cfg tokio_unstable' cargo run --all-features -- -cargo_norm := env RUST_BACKTRACE=1 RUST_LOG=debug cargo run -- -sudo_cargo_console := sudo -E env RUST_BACKTRACE=1 RUST_LOG=debug RUSTFLAGS='--cfg tokio_unstable' cargo run --all-features -- -sudo_cargo_norm := sudo -E env RUST_BACKTRACE=1 RUST_LOG=debug cargo run -- +cargo_console := RUST_BACKTRACE=1 RUST_LOG=debug RUSTFLAGS='--cfg tokio_unstable' cargo run --all-features +cargo_norm := RUST_BACKTRACE=1 RUST_LOG=debug cargo run check: @cargo check build: - @cargo build - -bep-check: - @python3 Scripts/check-bep-metadata.py - -bep-list: - @Scripts/bep list + @cargo run build daemon-console: - @$(sudo_cargo_console) daemon + @$(cargo_console) daemon daemon: - @$(sudo_cargo_norm) daemon + @$(cargo_norm) daemon start: - @$(sudo_cargo_norm) start - -stop: - @$(cargo_norm) stop - -status: - @$(cargo_norm) server-status - -tunnel-config: - @$(cargo_norm) tunnel-config + @$(cargo_norm) start test-dns: @sudo route delete 8.8.8.8 diff --git a/README.md b/README.md index ba4f50c..7492039 100644 --- a/README.md +++ b/README.md @@ -5,29 +5,16 @@ Burrow is an open source tool for burrowing through firewalls, built by teenagers at [Hack Club](https://hackclub.com/). `burrow` provides a simple command-line tool to open virtual interfaces and direct traffic through them. -Routine verification now runs unprivileged with `cargo test --workspace --all-features`; only tunnel startup needs elevation. - -The repository now carries its own design and deployment record: - -- [Constitution](./CONSTITUTION.md) -- [Agent Instructions](./AGENTS.md) -- [Burrow Evolution](./evolution/README.md) -- [WireGuard Rust Lineage](./docs/WIREGUARD_LINEAGE.md) -- [Protocol Roadmap](./docs/PROTOCOL_ROADMAP.md) -- [Forward Email Runbook](./docs/FORWARDEMAIL.md) ## Contributing -Burrow is fully open source, you can fork the repo and start contributing easily. For more information and in-depth discussions, visit the `#burrow` channel on the [Hack Club Slack](https://hackclub.com/slack/), here you can ask for help and talk with other people interested in burrow. Checkout [GETTING_STARTED.md](./docs/GETTING_STARTED.md) for build instructions and [GTK_APP.md](./docs/GTK_APP.md) for the Linux app. Forge and deployment scaffolding live in [`flake.nix`](./flake.nix), [`nixos/`](./nixos), and [`.forgejo/workflows/`](./.forgejo/workflows/). Hosted mail backup operations live in [`docs/FORWARDEMAIL.md`](./docs/FORWARDEMAIL.md) and [`Tools/forwardemail-custom-s3.sh`](./Tools/forwardemail-custom-s3.sh). - -Agent and governance-sensitive work should start with [AGENTS.md](./AGENTS.md), [CONSTITUTION.md](./CONSTITUTION.md), and the relevant BEPs under [`evolution/proposals/`](./evolution/proposals/). Identity and bootstrap metadata now live in [`contributors.nix`](./contributors.nix). +Burrow is fully open source, you can fork the repo and start contributing easily. For more information and in-depth discussions, visit the `#burrow` channel on the [Hack Club Slack](https://hackclub.com/slack/), here you can ask for help and talk with other people interested in burrow! For more information on how to contribute, please see [CONTRIBUTING.md] The project structure is divided in the following folders: ``` Apple/ # Xcode project for burrow on macOS and iOS burrow/ # Higher-level API library for tun and tun-async -burrow-gtk/ # GTK project for burrow on Linux tun/ # Low-level interface to OS networking src/ tokio/ # Async/Tokio code diff --git a/Scripts/_burrow-flake.sh b/Scripts/_burrow-flake.sh deleted file mode 100755 index ba4e372..0000000 --- a/Scripts/_burrow-flake.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env bash - -burrow_require_cmd() { - if ! command -v "$1" >/dev/null 2>&1; then - echo "missing required command: $1" >&2 - exit 1 - fi -} - -burrow_cleanup_flake_tmpdirs() { - if [[ "${#BURROW_FLAKE_TMPDIRS[@]}" -eq 0 ]]; then - return - fi - rm -rf "${BURROW_FLAKE_TMPDIRS[@]}" -} - -burrow_prepare_flake_ref() { - local input="${1:-.}" - - case "${input}" in - path:*|git+*|github:*|tarball+*|http://*|https://*) - printf '%s\n' "${input}" - return 0 - ;; - esac - - local resolved - resolved="$(cd "${input}" && pwd)" - - local cache_root="${HOME}/.cache/burrow" - mkdir -p "${cache_root}" - - local copy_root - copy_root="$(mktemp -d "${cache_root}/flake-XXXXXX")" - mkdir -p "${copy_root}/repo" - - rsync -a \ - --delete \ - --exclude '.git' \ - --exclude '.direnv' \ - --exclude 'result' \ - --exclude 'burrow.sock' \ - --exclude 'node_modules' \ - --exclude 'target' \ - --exclude 'build' \ - "${resolved}/" "${copy_root}/repo/" - - BURROW_FLAKE_TMPDIRS+=("${copy_root}") - printf 'path:%s/repo\n' "${copy_root}" -} - -burrow_resolve_image_artifact() { - local store_path="$1" - - if [[ -f "${store_path}" ]]; then - printf '%s\n' "${store_path}" - return 0 - fi - - if [[ -d "${store_path}" ]]; then - local candidate - candidate="$( - find "${store_path}" -type f \ - \( -name '*.raw' -o -name '*.raw.*' -o -name '*.img' -o -name '*.img.*' \) \ - | sort \ - | head -n1 - )" - if [[ -n "${candidate}" ]]; then - printf '%s\n' "${candidate}" - return 0 - fi - fi - - echo "unable to locate disk image artifact under ${store_path}" >&2 - exit 1 -} - -burrow_detect_compression() { - local artifact="$1" - - case "${artifact}" in - *.bz2) - printf 'bz2\n' - ;; - *.xz) - printf 'xz\n' - ;; - *.zst|*.zstd) - printf 'zstd\n' - ;; - *) - printf '\n' - ;; - esac -} diff --git a/Scripts/authentik-sync-1password-oidc.sh b/Scripts/authentik-sync-1password-oidc.sh deleted file mode 100755 index f523d9a..0000000 --- a/Scripts/authentik-sync-1password-oidc.sh +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -application_slug="${AUTHENTIK_ONEPASSWORD_APPLICATION_SLUG:-onepassword}" -application_name="${AUTHENTIK_ONEPASSWORD_APPLICATION_NAME:-1Password}" -provider_name="${AUTHENTIK_ONEPASSWORD_PROVIDER_NAME:-1Password}" -template_slug="${AUTHENTIK_ONEPASSWORD_TEMPLATE_SLUG:-ts}" -client_id="${AUTHENTIK_ONEPASSWORD_CLIENT_ID:-1password.burrow.net}" -launch_url="${AUTHENTIK_ONEPASSWORD_LAUNCH_URL:-https://burrow-team.1password.com/}" -redirect_uris_json="${AUTHENTIK_ONEPASSWORD_REDIRECT_URIS_JSON:-[ - \"https://burrow-team.1password.com/sso/oidc/redirect/\", - \"onepassword://sso/oidc/redirect\" -]}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-1password-oidc.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_ONEPASSWORD_APPLICATION_SLUG - AUTHENTIK_ONEPASSWORD_APPLICATION_NAME - AUTHENTIK_ONEPASSWORD_PROVIDER_NAME - AUTHENTIK_ONEPASSWORD_TEMPLATE_SLUG - AUTHENTIK_ONEPASSWORD_CLIENT_ID - AUTHENTIK_ONEPASSWORD_LAUNCH_URL - AUTHENTIK_ONEPASSWORD_REDIRECT_URIS_JSON -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if ! printf '%s' "$redirect_uris_json" | jq -e 'type == "array" and length > 0' >/dev/null; then - echo "error: AUTHENTIK_ONEPASSWORD_REDIRECT_URIS_JSON must be a non-empty JSON array" >&2 - exit 1 -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -api_with_status() { - local method="$1" - local path="$2" - local data="${3:-}" - local response_file status - - response_file="$(mktemp)" - trap 'rm -f "$response_file"' RETURN - - if [[ -n "$data" ]]; then - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - )" - else - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - )" - fi - - printf '%s\n' "$status" - cat "$response_file" -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -wait_for_authentik - -template_provider="$( - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -c --arg template_slug "$template_slug" '.results[]? | select(.assigned_application_slug == $template_slug)' \ - | head -n1 -)" - -if [[ -z "$template_provider" ]]; then - echo "error: could not resolve the Authentik OAuth provider template ${template_slug}" >&2 - exit 1 -fi - -authorization_flow="$(printf '%s\n' "$template_provider" | jq -r '.authorization_flow')" -invalidation_flow="$(printf '%s\n' "$template_provider" | jq -r '.invalidation_flow')" -property_mappings="$(printf '%s\n' "$template_provider" | jq -c '.property_mappings')" -signing_key="$(printf '%s\n' "$template_provider" | jq -r '.signing_key')" - -provider_payload="$( - jq -n \ - --arg name "$provider_name" \ - --arg authorization_flow "$authorization_flow" \ - --arg invalidation_flow "$invalidation_flow" \ - --arg client_id "$client_id" \ - --arg signing_key "$signing_key" \ - --argjson property_mappings "$property_mappings" \ - --argjson redirect_uris "$redirect_uris_json" \ - '{ - name: $name, - authorization_flow: $authorization_flow, - invalidation_flow: $invalidation_flow, - client_type: "public", - client_id: $client_id, - include_claims_in_id_token: true, - redirect_uris: ($redirect_uris | map({matching_mode: "strict", url: .})), - property_mappings: $property_mappings, - signing_key: $signing_key, - issuer_mode: "per_provider", - sub_mode: "hashed_user_id" - }' -)" - -existing_provider="$( - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -c \ - --arg application_slug "$application_slug" \ - --arg provider_name "$provider_name" \ - '.results[]? | select(.assigned_application_slug == $application_slug or .name == $provider_name)' \ - | head -n1 -)" - -if [[ -n "$existing_provider" ]]; then - provider_pk="$(printf '%s\n' "$existing_provider" | jq -r '.pk')" - api PATCH "/api/v3/providers/oauth2/${provider_pk}/" "$provider_payload" >/dev/null -else - provider_pk="$( - api POST "/api/v3/providers/oauth2/" "$provider_payload" \ - | jq -r '.pk // empty' - )" -fi - -if [[ -z "${provider_pk:-}" ]]; then - echo "error: 1Password OIDC provider did not return a primary key" >&2 - exit 1 -fi - -application_payload="$( - jq -n \ - --arg name "$application_name" \ - --arg slug "$application_slug" \ - --arg provider "$provider_pk" \ - --arg launch_url "$launch_url" \ - '{ - name: $name, - slug: $slug, - provider: ($provider | tonumber), - meta_launch_url: $launch_url, - open_in_new_tab: true, - policy_engine_mode: "any" - }' -)" - -existing_application="$( - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -c --arg slug "$application_slug" '.results[]? | select(.slug == $slug)' \ - | head -n1 -)" - -if [[ -n "$existing_application" ]]; then - application_pk="$(printf '%s\n' "$existing_application" | jq -r '.pk')" -else - create_application_result="$( - api_with_status POST "/api/v3/core/applications/" "$application_payload" - )" - create_application_status="$(printf '%s\n' "$create_application_result" | sed -n '1p')" - create_application_body="$(printf '%s\n' "$create_application_result" | sed '1d')" - - if [[ "$create_application_status" =~ ^20[01]$ ]]; then - application_pk="$(printf '%s\n' "$create_application_body" | jq -r '.pk // empty')" - elif [[ "$create_application_status" == "400" ]] && printf '%s\n' "$create_application_body" | jq -e ' - (.slug // [] | index("Application with this slug already exists.")) != null - or (.provider // [] | index("Application with this provider already exists.")) != null - ' >/dev/null; then - application_pk="existing-duplicate" - else - printf '%s\n' "$create_application_body" >&2 - echo "error: could not reconcile Authentik application ${application_slug}" >&2 - exit 1 - fi -fi - -if [[ -z "${application_pk:-}" ]]; then - echo "error: 1Password OIDC application did not return a primary key" >&2 - exit 1 -fi - -for _ in $(seq 1 30); do - if curl -fsS "${authentik_url}/application/o/${application_slug}/.well-known/openid-configuration" >/dev/null 2>&1; then - echo "Synced Authentik 1Password OIDC application ${application_slug} (${application_name})." - exit 0 - fi - sleep 2 -done - -echo "warning: 1Password OIDC issuer document for ${application_slug} was not immediately readable; keeping reconciled config." >&2 -echo "Synced Authentik 1Password OIDC application ${application_slug} (${application_name})." diff --git a/Scripts/authentik-sync-burrow-directory.sh b/Scripts/authentik-sync-burrow-directory.sh deleted file mode 100644 index 277c5f4..0000000 --- a/Scripts/authentik-sync-burrow-directory.sh +++ /dev/null @@ -1,263 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -directory_json="${AUTHENTIK_BURROW_DIRECTORY_JSON:-[]}" -users_group="${AUTHENTIK_BURROW_USERS_GROUP:-burrow-users}" -admins_group="${AUTHENTIK_BURROW_ADMINS_GROUP:-burrow-admins}" -forgejo_application_slug="${AUTHENTIK_FORGEJO_APPLICATION_SLUG:-}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-burrow-directory.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - AUTHENTIK_BURROW_DIRECTORY_JSON - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_BURROW_USERS_GROUP - AUTHENTIK_BURROW_ADMINS_GROUP - AUTHENTIK_FORGEJO_APPLICATION_SLUG -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if ! printf '%s' "$directory_json" | jq -e 'type == "array"' >/dev/null; then - echo "error: AUTHENTIK_BURROW_DIRECTORY_JSON must be a JSON array" >&2 - exit 1 -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -lookup_group_pk() { - local group_name="$1" - - api GET "/api/v3/core/groups/?page_size=200&search=${group_name}" \ - | jq -r --arg name "$group_name" '.results[]? | select(.name == $name) | .pk // empty' \ - | head -n1 -} - -ensure_group() { - local group_name="$1" - local payload group_pk - - payload="$( - jq -cn \ - --arg name "$group_name" \ - '{name: $name}' - )" - - group_pk="$(lookup_group_pk "$group_name")" - if [[ -n "$group_pk" ]]; then - api PATCH "/api/v3/core/groups/${group_pk}/" "$payload" >/dev/null - else - group_pk="$( - api POST "/api/v3/core/groups/" "$payload" \ - | jq -r '.pk // empty' - )" - fi - - if [[ -z "$group_pk" ]]; then - echo "error: could not create Authentik group ${group_name}" >&2 - exit 1 - fi - - printf '%s\n' "$group_pk" -} - -lookup_user_pk() { - local username="$1" - - api GET "/api/v3/core/users/?page_size=200&search=${username}" \ - | jq -r --arg username "$username" '.results[]? | select(.username == $username) | .pk // empty' \ - | head -n1 -} - -ensure_user() { - local user_spec="$1" - local username name email is_admin groups_json password_file effective_groups_json group_name - local group_pks_json payload user_pk - - username="$(printf '%s\n' "$user_spec" | jq -r '.username')" - name="$(printf '%s\n' "$user_spec" | jq -r '.name')" - email="$(printf '%s\n' "$user_spec" | jq -r '.email')" - is_admin="$(printf '%s\n' "$user_spec" | jq -r '.isAdmin // false')" - groups_json="$(printf '%s\n' "$user_spec" | jq -c '.groups // []')" - password_file="$(printf '%s\n' "$user_spec" | jq -r '.passwordFile // empty')" - - if [[ -z "$username" || "$username" == "null" || -z "$email" || "$email" == "null" ]]; then - echo "error: each Burrow Authentik user requires username and email" >&2 - exit 1 - fi - - effective_groups_json="$( - printf '%s\n' "$groups_json" \ - | jq -c --arg users_group "$users_group" --arg admins_group "$admins_group" --argjson is_admin "$is_admin" ' - . + [$users_group] + (if $is_admin then [$admins_group] else [] end) | unique - ' - )" - - group_pks_json='[]' - while IFS= read -r group_name; do - group_pk="$(ensure_group "$group_name")" - group_pks_json="$( - jq -cn \ - --argjson current "$group_pks_json" \ - --arg next "$group_pk" \ - '$current + [$next]' - )" - done < <(printf '%s\n' "$effective_groups_json" | jq -r '.[]') - - payload="$( - jq -cn \ - --arg username "$username" \ - --arg name "$name" \ - --arg email "$email" \ - --argjson groups "$group_pks_json" \ - '{ - username: $username, - name: $name, - email: $email, - is_active: true, - path: "users", - groups: $groups - }' - )" - - user_pk="$(lookup_user_pk "$username")" - if [[ -n "$user_pk" ]]; then - api PATCH "/api/v3/core/users/${user_pk}/" "$payload" >/dev/null - else - user_pk="$( - api POST "/api/v3/core/users/" "$payload" \ - | jq -r '.pk // empty' - )" - fi - - if [[ -z "$user_pk" ]]; then - echo "error: could not create Authentik user ${username}" >&2 - exit 1 - fi - - if [[ -n "$password_file" ]]; then - if [[ ! -s "$password_file" ]]; then - echo "error: password file for Authentik user ${username} is missing: ${password_file}" >&2 - exit 1 - fi - - api POST "/api/v3/core/users/${user_pk}/set_password/" "$( - jq -cn \ - --arg password "$(tr -d '\r\n' < "$password_file")" \ - '{password: $password}' - )" >/dev/null - fi -} - -lookup_application_pk() { - local slug="$1" - - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -r --arg slug "$slug" '.results[]? | select(.slug == $slug) | .pk // empty' \ - | head -n1 -} - -ensure_application_group_binding() { - local application_slug="$1" - local group_name="$2" - local application_pk group_pk existing payload binding_pk - - application_pk="$(lookup_application_pk "$application_slug")" - if [[ -z "$application_pk" ]]; then - echo "warning: could not resolve Authentik application ${application_slug}; skipping application group binding" >&2 - return 0 - fi - - group_pk="$(lookup_group_pk "$group_name")" - if [[ -z "$group_pk" ]]; then - echo "error: could not resolve Authentik group ${group_name}" >&2 - exit 1 - fi - - existing="$( - api GET "/api/v3/policies/bindings/?page_size=200&target=${application_pk}" \ - | jq -c --arg group_pk "$group_pk" '.results[]? | select(.group == $group_pk)' \ - | head -n1 - )" - - payload="$( - jq -cn \ - --arg target "$application_pk" \ - --arg group "$group_pk" \ - '{ - group: $group, - target: $target, - negate: false, - enabled: true, - order: 100, - timeout: 30, - failure_result: false - }' - )" - - if [[ -n "$existing" ]]; then - binding_pk="$(printf '%s\n' "$existing" | jq -r '.pk')" - api PATCH "/api/v3/policies/bindings/${binding_pk}/" "$payload" >/dev/null - else - api POST "/api/v3/policies/bindings/" "$payload" >/dev/null - fi -} - -wait_for_authentik -ensure_group "$users_group" >/dev/null -ensure_group "$admins_group" >/dev/null - -while IFS= read -r user_spec; do - ensure_user "$user_spec" -done < <(printf '%s\n' "$directory_json" | jq -c '.[]') - -if [[ -n "$forgejo_application_slug" ]]; then - ensure_application_group_binding "$forgejo_application_slug" "$users_group" -fi - -echo "Synced Burrow Authentik directory." diff --git a/Scripts/authentik-sync-forgejo-oidc.sh b/Scripts/authentik-sync-forgejo-oidc.sh deleted file mode 100644 index 7b292dc..0000000 --- a/Scripts/authentik-sync-forgejo-oidc.sh +++ /dev/null @@ -1,250 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -application_slug="${AUTHENTIK_FORGEJO_APPLICATION_SLUG:-git}" -application_name="${AUTHENTIK_FORGEJO_APPLICATION_NAME:-burrow.net}" -provider_name="${AUTHENTIK_FORGEJO_PROVIDER_NAME:-burrow.net}" -client_id="${AUTHENTIK_FORGEJO_CLIENT_ID:-git.burrow.net}" -client_secret="${AUTHENTIK_FORGEJO_CLIENT_SECRET:-}" -launch_url="${AUTHENTIK_FORGEJO_LAUNCH_URL:-https://git.burrow.net/}" -redirect_uris_json="${AUTHENTIK_FORGEJO_REDIRECT_URIS_JSON:-[ - \"https://git.burrow.net/user/oauth2/burrow.net/callback\", - \"https://git.burrow.net/user/oauth2/authentik/callback\", - \"https://git.burrow.net/user/oauth2/GitHub/callback\" -]}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-forgejo-oidc.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - AUTHENTIK_FORGEJO_CLIENT_SECRET - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_FORGEJO_APPLICATION_SLUG - AUTHENTIK_FORGEJO_APPLICATION_NAME - AUTHENTIK_FORGEJO_PROVIDER_NAME - AUTHENTIK_FORGEJO_CLIENT_ID - AUTHENTIK_FORGEJO_LAUNCH_URL - AUTHENTIK_FORGEJO_REDIRECT_URIS_JSON -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if [[ -z "$client_secret" || "$client_secret" == PENDING* ]]; then - echo "Forgejo OIDC client secret is not configured; skipping Authentik Forgejo sync." >&2 - exit 0 -fi - -if ! printf '%s' "$redirect_uris_json" | jq -e 'type == "array" and length > 0' >/dev/null; then - echo "error: AUTHENTIK_FORGEJO_REDIRECT_URIS_JSON must be a non-empty JSON array" >&2 - exit 1 -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -api_with_status() { - local method="$1" - local path="$2" - local data="${3:-}" - local response_file status - - response_file="$(mktemp)" - trap 'rm -f "$response_file"' RETURN - - if [[ -n "$data" ]]; then - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - )" - else - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - )" - fi - - printf '%s\n' "$status" - cat "$response_file" -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -wait_for_authentik - -template_provider="$( - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -c '.results[]? | select(.assigned_application_slug == "ts")' \ - | head -n1 -)" - -if [[ -z "$template_provider" ]]; then - echo "error: could not resolve the Burrow Tailnet OAuth provider template" >&2 - exit 1 -fi - -authorization_flow="$(printf '%s\n' "$template_provider" | jq -r '.authorization_flow')" -invalidation_flow="$(printf '%s\n' "$template_provider" | jq -r '.invalidation_flow')" -property_mappings="$(printf '%s\n' "$template_provider" | jq -c '.property_mappings')" -signing_key="$(printf '%s\n' "$template_provider" | jq -r '.signing_key')" - -provider_payload="$( - jq -n \ - --arg name "$provider_name" \ - --arg authorization_flow "$authorization_flow" \ - --arg invalidation_flow "$invalidation_flow" \ - --arg client_id "$client_id" \ - --arg client_secret "$client_secret" \ - --arg signing_key "$signing_key" \ - --argjson property_mappings "$property_mappings" \ - --argjson redirect_uris "$redirect_uris_json" \ - '{ - name: $name, - authorization_flow: $authorization_flow, - invalidation_flow: $invalidation_flow, - client_type: "confidential", - client_id: $client_id, - client_secret: $client_secret, - include_claims_in_id_token: true, - redirect_uris: ($redirect_uris | map({matching_mode: "strict", url: .})), - property_mappings: $property_mappings, - signing_key: $signing_key, - issuer_mode: "per_provider", - sub_mode: "hashed_user_id" - }' -)" - -existing_provider="$( - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -c \ - --arg application_slug "$application_slug" \ - --arg provider_name "$provider_name" \ - '.results[]? | select(.assigned_application_slug == $application_slug or .name == $provider_name)' \ - | head -n1 -)" - -if [[ -n "$existing_provider" ]]; then - provider_pk="$(printf '%s\n' "$existing_provider" | jq -r '.pk')" - api PATCH "/api/v3/providers/oauth2/${provider_pk}/" "$provider_payload" >/dev/null -else - provider_pk="$( - api POST "/api/v3/providers/oauth2/" "$provider_payload" \ - | jq -r '.pk // empty' - )" -fi - -if [[ -z "${provider_pk:-}" ]]; then - echo "error: Forgejo OIDC provider did not return a primary key" >&2 - exit 1 -fi - -application_payload="$( - jq -n \ - --arg name "$application_name" \ - --arg slug "$application_slug" \ - --arg provider "$provider_pk" \ - --arg launch_url "$launch_url" \ - '{ - name: $name, - slug: $slug, - provider: ($provider | tonumber), - meta_launch_url: $launch_url, - open_in_new_tab: false, - policy_engine_mode: "any" - }' -)" - -existing_application="$( - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -c --arg slug "$application_slug" '.results[]? | select(.slug == $slug)' \ - | head -n1 -)" - -if [[ -n "$existing_application" ]]; then - application_pk="$(printf '%s\n' "$existing_application" | jq -r '.pk')" -else - create_application_result="$( - api_with_status POST "/api/v3/core/applications/" "$application_payload" - )" - create_application_status="$(printf '%s\n' "$create_application_result" | sed -n '1p')" - create_application_body="$(printf '%s\n' "$create_application_result" | sed '1d')" - - if [[ "$create_application_status" =~ ^20[01]$ ]]; then - application_pk="$(printf '%s\n' "$create_application_body" | jq -r '.pk // empty')" - elif [[ "$create_application_status" == "400" ]] && printf '%s\n' "$create_application_body" | jq -e ' - (.slug // [] | index("Application with this slug already exists.")) != null - or (.provider // [] | index("Application with this provider already exists.")) != null - ' >/dev/null; then - application_pk="existing-duplicate" - else - printf '%s\n' "$create_application_body" >&2 - echo "error: could not reconcile Authentik application ${application_slug}" >&2 - exit 1 - fi -fi - -if [[ -z "${application_pk:-}" ]]; then - echo "error: Forgejo OIDC application did not return a primary key" >&2 - exit 1 -fi - -for _ in $(seq 1 30); do - if curl -fsS "${authentik_url}/application/o/${application_slug}/.well-known/openid-configuration" >/dev/null 2>&1; then - echo "Synced Authentik Forgejo OIDC application ${application_slug} (${application_name})." - exit 0 - fi - sleep 2 -done - -echo "warning: Forgejo OIDC issuer document for ${application_slug} was not immediately readable; keeping reconciled config." >&2 -echo "Synced Authentik Forgejo OIDC application ${application_slug} (${application_name})." diff --git a/Scripts/authentik-sync-google-source.sh b/Scripts/authentik-sync-google-source.sh deleted file mode 100755 index a4c9edb..0000000 --- a/Scripts/authentik-sync-google-source.sh +++ /dev/null @@ -1,284 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -google_client_id="${AUTHENTIK_GOOGLE_CLIENT_ID:-}" -google_client_secret="${AUTHENTIK_GOOGLE_CLIENT_SECRET:-}" -source_slug="${AUTHENTIK_GOOGLE_SOURCE_SLUG:-google}" -source_name="${AUTHENTIK_GOOGLE_SOURCE_NAME:-Google}" -identification_stage_name="${AUTHENTIK_GOOGLE_IDENTIFICATION_STAGE_NAME:-default-authentication-identification}" -authentication_flow_slug="${AUTHENTIK_GOOGLE_AUTHENTICATION_FLOW_SLUG:-default-source-authentication}" -enrollment_flow_slug="${AUTHENTIK_GOOGLE_ENROLLMENT_FLOW_SLUG:-default-source-enrollment}" -login_mode="${AUTHENTIK_GOOGLE_LOGIN_MODE:-redirect}" -user_matching_mode="${AUTHENTIK_GOOGLE_USER_MATCHING_MODE:-email_link}" -policy_engine_mode="${AUTHENTIK_GOOGLE_POLICY_ENGINE_MODE:-any}" -google_account_map_json="${AUTHENTIK_GOOGLE_ACCOUNT_MAP_JSON:-[]}" -property_mapping_name="${AUTHENTIK_GOOGLE_PROPERTY_MAPPING_NAME:-Burrow Google Account Map}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-google-source.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - AUTHENTIK_GOOGLE_CLIENT_ID - AUTHENTIK_GOOGLE_CLIENT_SECRET - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_GOOGLE_SOURCE_SLUG - AUTHENTIK_GOOGLE_SOURCE_NAME - AUTHENTIK_GOOGLE_IDENTIFICATION_STAGE_NAME - AUTHENTIK_GOOGLE_AUTHENTICATION_FLOW_SLUG - AUTHENTIK_GOOGLE_ENROLLMENT_FLOW_SLUG - AUTHENTIK_GOOGLE_LOGIN_MODE promoted|redirect - AUTHENTIK_GOOGLE_USER_MATCHING_MODE identifier|email_link|email_deny|username_link|username_deny - AUTHENTIK_GOOGLE_POLICY_ENGINE_MODE all|any - AUTHENTIK_GOOGLE_ACCOUNT_MAP_JSON JSON array of alias mappings - AUTHENTIK_GOOGLE_PROPERTY_MAPPING_NAME -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if [[ -z "$google_client_id" || -z "$google_client_secret" || "$google_client_id" == PENDING* || "$google_client_secret" == PENDING* ]]; then - echo "Google OAuth credentials are not configured; skipping Authentik Google source sync." >&2 - echo "Set Authorized redirect URI in Google to ${authentik_url}/source/oauth/callback/${source_slug}/" >&2 - exit 0 -fi - -if ! printf '%s' "$google_account_map_json" | jq -e 'type == "array"' >/dev/null; then - echo "error: AUTHENTIK_GOOGLE_ACCOUNT_MAP_JSON must be a JSON array" >&2 - exit 1 -fi - -case "$login_mode" in - promoted|redirect) ;; - *) - echo "warning: unsupported AUTHENTIK_GOOGLE_LOGIN_MODE=$login_mode; falling back to redirect" >&2 - login_mode="redirect" - ;; -esac - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -lookup_single_result() { - local path="$1" - local jq_filter="$2" - - api GET "$path" | jq -r "$jq_filter" | head -n1 -} - -wait_for_authentik - -flow_pk="$( - lookup_single_result \ - "/api/v3/flows/instances/?slug=${authentication_flow_slug}" \ - '.results[] | select(.slug != null) | .pk // empty' -)" -if [[ -z "$flow_pk" ]]; then - echo "error: could not resolve Authentik authentication flow slug ${authentication_flow_slug}" >&2 - exit 1 -fi - -enrollment_flow_pk="$( - lookup_single_result \ - "/api/v3/flows/instances/?slug=${enrollment_flow_slug}" \ - '.results[] | select(.slug != null) | .pk // empty' -)" -if [[ -z "$enrollment_flow_pk" ]]; then - echo "error: could not resolve Authentik enrollment flow slug ${enrollment_flow_slug}" >&2 - exit 1 -fi - -identification_stage="$( - api GET "/api/v3/stages/identification/" \ - | jq -c --arg name "$identification_stage_name" '.results[] | select(.name == $name)' -)" -if [[ -z "$identification_stage" ]]; then - echo "error: could not resolve Authentik identification stage ${identification_stage_name}" >&2 - exit 1 -fi - -stage_pk="$(printf '%s\n' "$identification_stage" | jq -r '.pk')" - -property_mapping_payload='[]' -if [[ "$(printf '%s' "$google_account_map_json" | jq 'length')" -gt 0 ]]; then - alias_map_python="$( - printf '%s' "$google_account_map_json" \ - | jq -c ' - map({ - key: (.source_email | ascii_downcase), - value: { - username: .username, - email: .email, - name: .name - } - }) - | from_entries - ' - )" - - oauth_property_mapping_expression="$( - cat </dev/null - else - property_mapping_pk="$( - api POST "/api/v3/propertymappings/source/oauth/" "$oauth_property_mapping_payload" \ - | jq -r '.pk // empty' - )" - fi - - if [[ -z "${property_mapping_pk:-}" ]]; then - echo "error: Google OAuth property mapping did not return a primary key" >&2 - exit 1 - fi - - property_mapping_payload="$(jq -cn --arg property_mapping_pk "$property_mapping_pk" '[$property_mapping_pk]')" -fi - -oauth_source_payload="$( - jq -n \ - --arg name "$source_name" \ - --arg slug "$source_slug" \ - --arg authentication_flow "$flow_pk" \ - --arg enrollment_flow "$enrollment_flow_pk" \ - --arg user_matching_mode "$user_matching_mode" \ - --arg policy_engine_mode "$policy_engine_mode" \ - --argjson user_property_mappings "$property_mapping_payload" \ - --arg consumer_key "$google_client_id" \ - --arg consumer_secret "$google_client_secret" \ - '{ - name: $name, - slug: $slug, - enabled: true, - promoted: true, - authentication_flow: $authentication_flow, - enrollment_flow: $enrollment_flow, - user_property_mappings: $user_property_mappings, - group_property_mappings: [], - policy_engine_mode: $policy_engine_mode, - user_matching_mode: $user_matching_mode, - provider_type: "google", - consumer_key: $consumer_key, - consumer_secret: $consumer_secret - }' -)" - -existing_source="$( - api GET "/api/v3/sources/oauth/?slug=${source_slug}" \ - | jq -c '.results[]?' -)" - -if [[ -n "$existing_source" ]]; then - source_pk="$(printf '%s\n' "$existing_source" | jq -r '.pk')" - api PATCH "/api/v3/sources/oauth/${source_slug}/" "$oauth_source_payload" >/dev/null -else - source_pk="$( - api POST "/api/v3/sources/oauth/" "$oauth_source_payload" \ - | jq -r '.pk // empty' - )" -fi - -if [[ -z "$source_pk" ]]; then - echo "error: Google OAuth source did not return a primary key" >&2 - exit 1 -fi - -stage_patch="$( - printf '%s\n' "$identification_stage" \ - | jq -c \ - --arg source_pk "$source_pk" \ - --arg login_mode "$login_mode" ' - .sources = ( - if $login_mode == "redirect" then - [$source_pk] - else - ([ $source_pk ] + ((.sources // []) | map(select(. != $source_pk)))) - end - ) - | .show_source_labels = true - | if $login_mode == "redirect" then - .user_fields = [] - else - . - end - | { - sources, - show_source_labels, - user_fields - }' -)" - -api PATCH "/api/v3/stages/identification/${stage_pk}/" "$stage_patch" >/dev/null - -echo "Synced Authentik Google source ${source_slug} (${source_pk}) in ${login_mode} mode." diff --git a/Scripts/authentik-sync-linear-saml.sh b/Scripts/authentik-sync-linear-saml.sh deleted file mode 100755 index 5da64ad..0000000 --- a/Scripts/authentik-sync-linear-saml.sh +++ /dev/null @@ -1,344 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -application_slug="${AUTHENTIK_LINEAR_APPLICATION_SLUG:-linear}" -application_name="${AUTHENTIK_LINEAR_APPLICATION_NAME:-Linear}" -provider_name="${AUTHENTIK_LINEAR_PROVIDER_NAME:-Linear}" -launch_url="${AUTHENTIK_LINEAR_LAUNCH_URL:-https://linear.app/burrownet}" -acs_url="${AUTHENTIK_LINEAR_ACS_URL:-}" -audience="${AUTHENTIK_LINEAR_AUDIENCE:-}" -issuer="${AUTHENTIK_LINEAR_ISSUER:-${authentik_url}/application/saml/${application_slug}/metadata/}" -default_relay_state="${AUTHENTIK_LINEAR_DEFAULT_RELAY_STATE:-}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-linear-saml.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - AUTHENTIK_LINEAR_ACS_URL - AUTHENTIK_LINEAR_AUDIENCE - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_LINEAR_APPLICATION_SLUG - AUTHENTIK_LINEAR_APPLICATION_NAME - AUTHENTIK_LINEAR_PROVIDER_NAME - AUTHENTIK_LINEAR_LAUNCH_URL - AUTHENTIK_LINEAR_ISSUER - AUTHENTIK_LINEAR_DEFAULT_RELAY_STATE -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if [[ -z "$acs_url" ]]; then - echo "error: AUTHENTIK_LINEAR_ACS_URL is required" >&2 - exit 1 -fi - -if [[ -z "$audience" ]]; then - echo "error: AUTHENTIK_LINEAR_AUDIENCE is required" >&2 - exit 1 -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -api_with_status() { - local method="$1" - local path="$2" - local data="${3:-}" - local response_file status - - response_file="$(mktemp)" - trap 'rm -f "$response_file"' RETURN - - if [[ -n "$data" ]]; then - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - )" - else - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - )" - fi - - printf '%s\n' "$status" - cat "$response_file" -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -lookup_oauth_template_field() { - local field="$1" - - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -r --arg field "$field" '.results[]? | select(.assigned_application_slug == "ts") | .[$field]' \ - | head -n1 -} - -reconcile_property_mapping() { - local name="$1" - local saml_name="$2" - local friendly_name="$3" - local expression="$4" - local payload existing_pk - - payload="$( - jq -n \ - --arg name "$name" \ - --arg saml_name "$saml_name" \ - --arg friendly_name "$friendly_name" \ - --arg expression "$expression" \ - '{ - name: $name, - saml_name: $saml_name, - friendly_name: $friendly_name, - expression: $expression - }' - )" - - existing_pk="$( - api GET "/api/v3/propertymappings/provider/saml/?page_size=200" \ - | jq -r --arg name "$name" '.results[]? | select(.name == $name) | .pk' \ - | head -n1 - )" - - if [[ -n "$existing_pk" ]]; then - api PATCH "/api/v3/propertymappings/provider/saml/${existing_pk}/" "$payload" >/dev/null - printf '%s\n' "$existing_pk" - else - api POST "/api/v3/propertymappings/provider/saml/" "$payload" | jq -r '.pk // empty' - fi -} - -wait_for_authentik - -authorization_flow="$(lookup_oauth_template_field authorization_flow)" -invalidation_flow="$(lookup_oauth_template_field invalidation_flow)" -signing_kp="$(lookup_oauth_template_field signing_key)" - -if [[ -z "$authorization_flow" || -z "$invalidation_flow" || -z "$signing_kp" ]]; then - echo "error: could not resolve Authentik provider defaults from Burrow Tailnet template" >&2 - exit 1 -fi - -email_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Linear SAML Email" \ - "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress" \ - "email" \ - 'return request.user.email' -)" - -name_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Linear SAML Name" \ - "name" \ - "name" \ - 'return request.user.name or request.user.username' -)" - -first_name_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Linear SAML First Name" \ - "firstName" \ - "firstName" \ - $'parts = (request.user.name or "").split(" ", 1)\nif len(parts) > 0 and parts[0]:\n return parts[0]\nreturn request.user.username' -)" - -last_name_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Linear SAML Last Name" \ - "lastName" \ - "lastName" \ - $'parts = (request.user.name or "").rsplit(" ", 1)\nif len(parts) == 2 and parts[1]:\n return parts[1]\nreturn request.user.username' -)" - -if [[ -z "$email_mapping_pk" || -z "$name_mapping_pk" || -z "$first_name_mapping_pk" || -z "$last_name_mapping_pk" ]]; then - echo "error: failed to reconcile Linear SAML property mappings" >&2 - exit 1 -fi - -provider_payload="$( - jq -n \ - --arg name "$provider_name" \ - --arg authorization_flow "$authorization_flow" \ - --arg invalidation_flow "$invalidation_flow" \ - --arg acs_url "$acs_url" \ - --arg audience "$audience" \ - --arg issuer "$issuer" \ - --arg signing_kp "$signing_kp" \ - --arg default_relay_state "$default_relay_state" \ - --arg name_id_mapping "$email_mapping_pk" \ - --arg email_mapping "$email_mapping_pk" \ - --arg name_mapping "$name_mapping_pk" \ - --arg first_name_mapping "$first_name_mapping_pk" \ - --arg last_name_mapping "$last_name_mapping_pk" \ - '{ - name: $name, - authorization_flow: $authorization_flow, - invalidation_flow: $invalidation_flow, - acs_url: $acs_url, - audience: $audience, - issuer: $issuer, - signing_kp: $signing_kp, - sign_assertion: true, - sign_response: true, - sp_binding: "post", - name_id_mapping: $name_id_mapping, - property_mappings: [ - $email_mapping, - $name_mapping, - $first_name_mapping, - $last_name_mapping - ] - } - + (if $default_relay_state == "" then {} else {default_relay_state: $default_relay_state} end)' -)" - -existing_provider="$( - api GET "/api/v3/providers/saml/?page_size=200" \ - | jq -c \ - --arg application_slug "$application_slug" \ - --arg provider_name "$provider_name" \ - '.results[]? | select(.assigned_application_slug == $application_slug or .name == $provider_name)' \ - | head -n1 -)" - -if [[ -n "$existing_provider" ]]; then - provider_pk="$(printf '%s\n' "$existing_provider" | jq -r '.pk')" - api PATCH "/api/v3/providers/saml/${provider_pk}/" "$provider_payload" >/dev/null -else - provider_pk="$( - api POST "/api/v3/providers/saml/" "$provider_payload" \ - | jq -r '.pk // empty' - )" -fi - -if [[ -z "${provider_pk:-}" ]]; then - echo "error: Linear SAML provider did not return a primary key" >&2 - exit 1 -fi - -application_payload="$( - jq -n \ - --arg name "$application_name" \ - --arg slug "$application_slug" \ - --arg provider "$provider_pk" \ - --arg launch_url "$launch_url" \ - '{ - name: $name, - slug: $slug, - provider: ($provider | tonumber), - meta_launch_url: $launch_url, - open_in_new_tab: true, - policy_engine_mode: "any" - }' -)" - -existing_application="$( - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -c --arg slug "$application_slug" '.results[]? | select(.slug == $slug)' \ - | head -n1 -)" - -if [[ -n "$existing_application" ]]; then - application_pk="existing" - api PATCH "/api/v3/core/applications/${application_slug}/" "$application_payload" >/dev/null -else - create_application_result="$( - api_with_status POST "/api/v3/core/applications/" "$application_payload" - )" - create_application_status="$(printf '%s\n' "$create_application_result" | sed -n '1p')" - create_application_body="$(printf '%s\n' "$create_application_result" | sed '1d')" - - if [[ "$create_application_status" =~ ^20[01]$ ]]; then - application_pk="$(printf '%s\n' "$create_application_body" | jq -r '.pk // empty')" - elif [[ "$create_application_status" == "400" ]] && printf '%s\n' "$create_application_body" | jq -e ' - (.slug // [] | index("Application with this slug already exists.")) != null - or (.provider // [] | index("Application with this provider already exists.")) != null - ' >/dev/null; then - application_pk="existing-duplicate" - else - printf '%s\n' "$create_application_body" >&2 - echo "error: could not reconcile Authentik application ${application_slug}" >&2 - exit 1 - fi -fi - -if [[ -z "${application_pk:-}" ]]; then - echo "error: Linear SAML application did not return a primary key" >&2 - exit 1 -fi - -for _ in $(seq 1 30); do - metadata_status="$( - curl -sS \ - -o /dev/null \ - -w '%{http_code}' \ - --max-redirs 0 \ - "${authentik_url}/application/saml/${application_slug}/metadata/" \ - || true - )" - case "$metadata_status" in - 200|301|302|307|308) - echo "Synced Authentik Linear SAML application ${application_slug} (${application_name})." - exit 0 - ;; - esac - sleep 2 -done - -echo "warning: Linear SAML metadata for ${application_slug} was not immediately readable; keeping reconciled config." >&2 -echo "Synced Authentik Linear SAML application ${application_slug} (${application_name})." diff --git a/Scripts/authentik-sync-linear-scim.sh b/Scripts/authentik-sync-linear-scim.sh deleted file mode 100644 index 4ef83e4..0000000 --- a/Scripts/authentik-sync-linear-scim.sh +++ /dev/null @@ -1,311 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -application_slug="${AUTHENTIK_LINEAR_APPLICATION_SLUG:-linear}" -provider_name="${AUTHENTIK_LINEAR_SCIM_PROVIDER_NAME:-Linear SCIM}" -scim_url="${AUTHENTIK_LINEAR_SCIM_URL:-}" -scim_token_file="${AUTHENTIK_LINEAR_SCIM_TOKEN_FILE:-}" -user_identifier="${AUTHENTIK_LINEAR_SCIM_USER_IDENTIFIER:-email}" -owner_group="${AUTHENTIK_LINEAR_OWNER_GROUP:-linear-owners}" -admin_group="${AUTHENTIK_LINEAR_ADMIN_GROUP:-linear-admins}" -guest_group="${AUTHENTIK_LINEAR_GUEST_GROUP:-linear-guests}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-linear-scim.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - AUTHENTIK_LINEAR_SCIM_URL - AUTHENTIK_LINEAR_SCIM_TOKEN_FILE - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_LINEAR_APPLICATION_SLUG - AUTHENTIK_LINEAR_SCIM_PROVIDER_NAME - AUTHENTIK_LINEAR_SCIM_USER_IDENTIFIER - AUTHENTIK_LINEAR_OWNER_GROUP - AUTHENTIK_LINEAR_ADMIN_GROUP - AUTHENTIK_LINEAR_GUEST_GROUP -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if [[ -z "$scim_url" ]]; then - echo "error: AUTHENTIK_LINEAR_SCIM_URL is required" >&2 - exit 1 -fi - -if [[ -z "$scim_token_file" || ! -s "$scim_token_file" ]]; then - echo "error: AUTHENTIK_LINEAR_SCIM_TOKEN_FILE is required and must be readable" >&2 - exit 1 -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -lookup_group_pk() { - local group_name="$1" - - api GET "/api/v3/core/groups/?page_size=200&search=${group_name}" \ - | jq -r --arg name "$group_name" '.results[]? | select(.name == $name) | .pk // empty' \ - | head -n1 -} - -ensure_group() { - local group_name="$1" - local payload group_pk - - payload="$(jq -cn --arg name "$group_name" '{name: $name}')" - group_pk="$(lookup_group_pk "$group_name")" - - if [[ -n "$group_pk" ]]; then - api PATCH "/api/v3/core/groups/${group_pk}/" "$payload" >/dev/null - else - group_pk="$( - api POST "/api/v3/core/groups/" "$payload" \ - | jq -r '.pk // empty' - )" - fi - - if [[ -z "$group_pk" ]]; then - echo "error: could not reconcile Authentik group ${group_name}" >&2 - exit 1 - fi - - printf '%s\n' "$group_pk" -} - -lookup_application() { - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -c --arg slug "$application_slug" '.results[]? | select(.slug == $slug)' \ - | head -n1 -} - -lookup_scim_provider() { - api GET "/api/v3/providers/scim/?page_size=200" \ - | jq -c \ - --arg application_slug "$application_slug" \ - --arg provider_name "$provider_name" \ - '.results[]? | select(.assigned_backchannel_application_slug == $application_slug or .name == $provider_name)' \ - | head -n1 -} - -lookup_scim_mapping_pk() { - local managed_name="$1" - - api GET "/api/v3/propertymappings/provider/scim/?page_size=200" \ - | jq -r --arg managed "$managed_name" '.results[]? | select(.managed == $managed) | .pk // empty' \ - | head -n1 -} - -reconcile_property_mapping() { - local name="$1" - local expression="$2" - local payload existing_pk - - payload="$( - jq -n \ - --arg name "$name" \ - --arg expression "$expression" \ - '{ - name: $name, - expression: $expression - }' - )" - - existing_pk="$( - api GET "/api/v3/propertymappings/provider/scim/?page_size=200" \ - | jq -r --arg name "$name" '.results[]? | select(.name == $name) | .pk // empty' \ - | head -n1 - )" - - if [[ -n "$existing_pk" ]]; then - api PATCH "/api/v3/propertymappings/provider/scim/${existing_pk}/" "$payload" >/dev/null - printf '%s\n' "$existing_pk" - else - api POST "/api/v3/propertymappings/provider/scim/" "$payload" \ - | jq -r '.pk // empty' - fi -} - -sync_object() { - local provider_pk="$1" - local model="$2" - local object_id="$3" - - if ! api POST "/api/v3/providers/scim/${provider_pk}/sync/object/" "$( - jq -cn \ - --arg model "$model" \ - --arg object_id "$object_id" \ - '{ - sync_object_model: $model, - sync_object_id: $object_id, - override_dry_run: false - }' - )" >/dev/null; then - echo "warning: could not trigger immediate Linear SCIM sync for ${model} ${object_id}; provider will continue with its normal sync cycle." >&2 - fi -} - -wait_for_authentik - -group_mapping_pk="$(lookup_scim_mapping_pk "goauthentik.io/providers/scim/group")" -case "$user_identifier" in - email) - user_mapping_expression=$'# Some implementations require givenName and familyName to be set\ngivenName, familyName = request.user.name, " "\nformatted = request.user.name + " "\nif " " in request.user.name:\n givenName, _, familyName = request.user.name.partition(" ")\n formatted = request.user.name\n\navatar = request.user.avatar\nphotos = None\nif "://" in avatar:\n photos = [{"value": avatar, "type": "photo"}]\n\nlocale = request.user.locale()\nif locale == "":\n locale = None\n\nemails = []\nif request.user.email != "":\n emails = [{\n "value": request.user.email,\n "type": "other",\n "primary": True,\n }]\n\nidentifier = request.user.email\nif identifier == "":\n identifier = request.user.username\n\nreturn {\n "userName": identifier,\n "name": {\n "formatted": formatted,\n "givenName": givenName,\n "familyName": familyName,\n },\n "displayName": request.user.name,\n "photos": photos,\n "locale": locale,\n "active": request.user.is_active,\n "emails": emails,\n}' - ;; - username) - user_mapping_expression=$'# Some implementations require givenName and familyName to be set\ngivenName, familyName = request.user.name, " "\nformatted = request.user.name + " "\nif " " in request.user.name:\n givenName, _, familyName = request.user.name.partition(" ")\n formatted = request.user.name\n\navatar = request.user.avatar\nphotos = None\nif "://" in avatar:\n photos = [{"value": avatar, "type": "photo"}]\n\nlocale = request.user.locale()\nif locale == "":\n locale = None\n\nemails = []\nif request.user.email != "":\n emails = [{\n "value": request.user.email,\n "type": "other",\n "primary": True,\n }]\nreturn {\n "userName": request.user.username,\n "name": {\n "formatted": formatted,\n "givenName": givenName,\n "familyName": familyName,\n },\n "displayName": request.user.name,\n "photos": photos,\n "locale": locale,\n "active": request.user.is_active,\n "emails": emails,\n}' - ;; - *) - echo "error: unsupported AUTHENTIK_LINEAR_SCIM_USER_IDENTIFIER value: ${user_identifier}" >&2 - exit 1 - ;; -esac -user_mapping_pk="$(reconcile_property_mapping "Burrow Linear SCIM User" "$user_mapping_expression")" - -if [[ -z "$user_mapping_pk" || -z "$group_mapping_pk" ]]; then - echo "error: could not resolve managed Authentik SCIM property mappings" >&2 - exit 1 -fi - -owner_group_pk="$(ensure_group "$owner_group")" -admin_group_pk="$(ensure_group "$admin_group")" -guest_group_pk="$(ensure_group "$guest_group")" - -provider_payload="$( - jq -n \ - --arg name "$provider_name" \ - --arg url "$scim_url" \ - --arg token "$(tr -d '\r\n' < "$scim_token_file")" \ - --arg user_mapping_pk "$user_mapping_pk" \ - --arg group_mapping_pk "$group_mapping_pk" \ - --arg owner_group_pk "$owner_group_pk" \ - --arg admin_group_pk "$admin_group_pk" \ - --arg guest_group_pk "$guest_group_pk" \ - '{ - name: $name, - url: $url, - token: $token, - auth_mode: "token", - verify_certificates: true, - compatibility_mode: "default", - property_mappings: [$user_mapping_pk], - property_mappings_group: [$group_mapping_pk], - group_filters: [ - $owner_group_pk, - $admin_group_pk, - $guest_group_pk - ], - dry_run: false - }' -)" - -existing_provider="$(lookup_scim_provider)" -if [[ -n "$existing_provider" ]]; then - provider_pk="$(printf '%s\n' "$existing_provider" | jq -r '.pk')" - api PATCH "/api/v3/providers/scim/${provider_pk}/" "$provider_payload" >/dev/null -else - provider_pk="$( - api POST "/api/v3/providers/scim/" "$provider_payload" \ - | jq -r '.pk // empty' - )" -fi - -if [[ -z "${provider_pk:-}" ]]; then - echo "error: Linear SCIM provider did not return a primary key" >&2 - exit 1 -fi - -application="$(lookup_application)" -if [[ -z "$application" ]]; then - echo "error: could not resolve Authentik application ${application_slug}" >&2 - exit 1 -fi - -application_payload="$( - printf '%s\n' "$application" \ - | jq \ - --arg provider_pk "$provider_pk" \ - '{ - name: .name, - slug: .slug, - provider: .provider, - backchannel_providers: ((.backchannel_providers // []) + [($provider_pk | tonumber)] | unique), - open_in_new_tab: .open_in_new_tab, - meta_launch_url: .meta_launch_url, - policy_engine_mode: .policy_engine_mode - }' -)" -api PATCH "/api/v3/core/applications/${application_slug}/" "$application_payload" >/dev/null - -group_pks_json="$(jq -cn --arg owner "$owner_group_pk" --arg admin "$admin_group_pk" --arg guest "$guest_group_pk" '[$owner, $admin, $guest]')" -user_pks_json="$( - api GET "/api/v3/core/users/?page_size=200" \ - | jq -c \ - --argjson group_pks "$group_pks_json" \ - '[.results[]? - | select( - ([((.groups // [])[] | tostring)] as $user_groups - | ($group_pks | map(. as $wanted | ($user_groups | index($wanted)) != null) | any)) - ) - | .pk]' -)" - -while IFS= read -r group_pk; do - [[ -z "$group_pk" ]] && continue - sync_object "$provider_pk" "authentik.core.models.Group" "$group_pk" -done < <(printf '%s\n' "$group_pks_json" | jq -r '.[]') - -while IFS= read -r user_pk; do - [[ -z "$user_pk" ]] && continue - sync_object "$provider_pk" "authentik.core.models.User" "$user_pk" -done < <(printf '%s\n' "$user_pks_json" | jq -r '.[]') - -status_json="$(api GET "/api/v3/providers/scim/${provider_pk}/sync/status/" || true)" -if ! printf '%s\n' "$status_json" | jq -e 'has("last_sync_status")' >/dev/null 2>&1; then - echo "warning: could not read Linear SCIM sync status for provider ${provider_pk}; keeping reconciled configuration." >&2 -fi - -echo "Synced Authentik Linear SCIM provider ${provider_name} (${provider_pk}) with groups ${owner_group}, ${admin_group}, ${guest_group}." diff --git a/Scripts/authentik-sync-tailnet-auth-flow.sh b/Scripts/authentik-sync-tailnet-auth-flow.sh deleted file mode 100755 index 1c715cc..0000000 --- a/Scripts/authentik-sync-tailnet-auth-flow.sh +++ /dev/null @@ -1,309 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -provider_slug="${AUTHENTIK_TAILNET_PROVIDER_SLUG:-ts}" -provider_slugs_json="${AUTHENTIK_TAILNET_PROVIDER_SLUGS_JSON:-}" -authentication_flow_name="${AUTHENTIK_TAILNET_AUTHENTICATION_FLOW_NAME:-Burrow Tailnet Authentication}" -authentication_flow_slug="${AUTHENTIK_TAILNET_AUTHENTICATION_FLOW_SLUG:-burrow-tailnet-authentication}" -identification_stage_name="${AUTHENTIK_TAILNET_IDENTIFICATION_STAGE_NAME:-burrow-tailnet-identification-stage}" -password_stage_name="${AUTHENTIK_TAILNET_PASSWORD_STAGE_NAME:-burrow-tailnet-password-stage}" -user_login_stage_name="${AUTHENTIK_TAILNET_USER_LOGIN_STAGE_NAME:-burrow-tailnet-user-login-stage}" -google_source_slug="${AUTHENTIK_TAILNET_GOOGLE_SOURCE_SLUG:-google}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-tailnet-auth-flow.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_TAILNET_PROVIDER_SLUG - AUTHENTIK_TAILNET_PROVIDER_SLUGS_JSON - AUTHENTIK_TAILNET_AUTHENTICATION_FLOW_NAME - AUTHENTIK_TAILNET_AUTHENTICATION_FLOW_SLUG - AUTHENTIK_TAILNET_IDENTIFICATION_STAGE_NAME - AUTHENTIK_TAILNET_PASSWORD_STAGE_NAME - AUTHENTIK_TAILNET_USER_LOGIN_STAGE_NAME - AUTHENTIK_TAILNET_GOOGLE_SOURCE_SLUG -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if [[ -n "$provider_slugs_json" ]]; then - if ! printf '%s' "$provider_slugs_json" | jq -e 'type == "array" and length > 0 and all(.[]; type == "string" and length > 0)' >/dev/null; then - echo "error: AUTHENTIK_TAILNET_PROVIDER_SLUGS_JSON must be a non-empty JSON array of strings" >&2 - exit 1 - fi -else - provider_slugs_json="$(jq -cn --arg slug "$provider_slug" '[$slug]')" -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -lookup_stage_by_name() { - local path="$1" - local name="$2" - - api GET "${path}?page_size=200" \ - | jq -c --arg name "$name" '.results[]? | select(.name == $name)' \ - | head -n1 -} - -lookup_flow_pk() { - local slug="$1" - - api GET "/api/v3/flows/instances/?slug=${slug}" \ - | jq -r '.results[]? | select(.slug != null) | .pk // empty' \ - | head -n1 -} - -lookup_source_pk() { - local slug="$1" - - api GET "/api/v3/sources/oauth/?page_size=200&slug=${slug}" \ - | jq -r --arg slug "$slug" '.results[]? | select(.slug == $slug) | .pk // empty' \ - | head -n1 -} - -ensure_password_stage() { - local existing payload stage_pk - - existing="$(lookup_stage_by_name "/api/v3/stages/password/" "$password_stage_name")" - payload="$( - jq -cn \ - --arg name "$password_stage_name" \ - '{ - name: $name, - backends: [ - "authentik.core.auth.InbuiltBackend", - "authentik.core.auth.TokenBackend" - ], - allow_show_password: false, - failed_attempts_before_cancel: 5 - }' - )" - - if [[ -n "$existing" ]]; then - stage_pk="$(printf '%s\n' "$existing" | jq -r '.pk')" - api PATCH "/api/v3/stages/password/${stage_pk}/" "$payload" >/dev/null - else - stage_pk="$( - api POST "/api/v3/stages/password/" "$payload" \ - | jq -r '.pk // empty' - )" - fi - - printf '%s\n' "$stage_pk" -} - -ensure_identification_stage() { - local password_stage_pk="$1" - local google_source_pk="$2" - local existing payload stage_pk sources_json - - existing="$(lookup_stage_by_name "/api/v3/stages/identification/" "$identification_stage_name")" - if [[ -n "$google_source_pk" ]]; then - sources_json="$(jq -cn --arg source "$google_source_pk" '[$source]')" - else - sources_json='[]' - fi - - payload="$( - jq -cn \ - --arg name "$identification_stage_name" \ - --arg password_stage "$password_stage_pk" \ - --argjson sources "$sources_json" \ - '{ - name: $name, - user_fields: ["username", "email"], - password_stage: $password_stage, - case_insensitive_matching: true, - show_matched_user: true, - sources: $sources, - show_source_labels: true, - pretend_user_exists: false, - enable_remember_me: false - }' - )" - - if [[ -n "$existing" ]]; then - stage_pk="$(printf '%s\n' "$existing" | jq -r '.pk')" - api PATCH "/api/v3/stages/identification/${stage_pk}/" "$payload" >/dev/null - else - stage_pk="$( - api POST "/api/v3/stages/identification/" "$payload" \ - | jq -r '.pk // empty' - )" - fi - - printf '%s\n' "$stage_pk" -} - -ensure_user_login_stage() { - local existing payload stage_pk - - existing="$(lookup_stage_by_name "/api/v3/stages/user_login/" "$user_login_stage_name")" - payload="$( - jq -cn \ - --arg name "$user_login_stage_name" \ - '{ - name: $name, - session_duration: "hours=12", - terminate_other_sessions: false, - remember_me_offset: "seconds=0", - network_binding: "no_binding", - geoip_binding: "no_binding" - }' - )" - - if [[ -n "$existing" ]]; then - stage_pk="$(printf '%s\n' "$existing" | jq -r '.pk')" - api PATCH "/api/v3/stages/user_login/${stage_pk}/" "$payload" >/dev/null - else - stage_pk="$( - api POST "/api/v3/stages/user_login/" "$payload" \ - | jq -r '.pk // empty' - )" - fi - - printf '%s\n' "$stage_pk" -} - -ensure_authentication_flow() { - local existing_pk payload - - existing_pk="$(lookup_flow_pk "$authentication_flow_slug")" - payload="$( - jq -cn \ - --arg name "$authentication_flow_name" \ - --arg slug "$authentication_flow_slug" \ - '{ - name: $name, - title: $name, - slug: $slug, - designation: "authentication", - policy_engine_mode: "any", - layout: "stacked" - }' - )" - - if [[ -n "$existing_pk" ]]; then - api PATCH "/api/v3/flows/instances/${authentication_flow_slug}/" "$payload" >/dev/null - printf '%s\n' "$existing_pk" - else - api POST "/api/v3/flows/instances/" "$payload" \ - | jq -r '.pk // empty' - fi -} - -ensure_flow_binding() { - local flow_pk="$1" - local stage_pk="$2" - local order="$3" - local existing payload binding_pk - - existing="$( - api GET "/api/v3/flows/bindings/?target=${flow_pk}&stage=${stage_pk}&page_size=200" \ - | jq -c '.results[]?' \ - | head -n1 - )" - - payload="$( - jq -cn \ - --arg target "$flow_pk" \ - --arg stage "$stage_pk" \ - --argjson order "$order" \ - '{ - target: $target, - stage: $stage, - order: $order, - policy_engine_mode: "any" - }' - )" - - if [[ -n "$existing" ]]; then - binding_pk="$(printf '%s\n' "$existing" | jq -r '.pk')" - api PATCH "/api/v3/flows/bindings/${binding_pk}/" "$payload" >/dev/null - else - api POST "/api/v3/flows/bindings/" "$payload" >/dev/null - fi -} - -wait_for_authentik - -mapfile -t provider_pks < <( - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -r --argjson provider_slugs "$provider_slugs_json" ' - .results[]? - | select( - ((.assigned_application_slug // empty) as $assigned | ($provider_slugs | index($assigned)) != null) - or ((.slug // empty) as $slug | ($provider_slugs | index($slug)) != null) - ) - | .pk // empty - ' -) - -if [[ "${#provider_pks[@]}" -eq 0 ]]; then - echo "error: could not resolve any Authentik Tailnet OAuth providers from ${provider_slugs_json}" >&2 - exit 1 -fi - -google_source_pk="$(lookup_source_pk "$google_source_slug" || true)" -password_stage_pk="$(ensure_password_stage)" -identification_stage_pk="$(ensure_identification_stage "$password_stage_pk" "$google_source_pk")" -user_login_stage_pk="$(ensure_user_login_stage)" -authentication_flow_pk="$(ensure_authentication_flow)" - -ensure_flow_binding "$authentication_flow_pk" "$identification_stage_pk" 10 -ensure_flow_binding "$authentication_flow_pk" "$user_login_stage_pk" 30 - -for provider_pk in "${provider_pks[@]}"; do - api PATCH "/api/v3/providers/oauth2/${provider_pk}/" "$( - jq -cn --arg flow "$authentication_flow_pk" '{authentication_flow: $flow}' - )" >/dev/null -done - -echo "Synced Burrow Tailnet authentication flow for providers ${provider_slugs_json}." diff --git a/Scripts/authentik-sync-tailscale-oidc.sh b/Scripts/authentik-sync-tailscale-oidc.sh deleted file mode 100755 index 58fe7e4..0000000 --- a/Scripts/authentik-sync-tailscale-oidc.sh +++ /dev/null @@ -1,369 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -application_slug="${AUTHENTIK_TAILSCALE_APPLICATION_SLUG:-tailscale}" -application_name="${AUTHENTIK_TAILSCALE_APPLICATION_NAME:-Tailscale}" -provider_name="${AUTHENTIK_TAILSCALE_PROVIDER_NAME:-Tailscale}" -template_slug="${AUTHENTIK_TAILSCALE_TEMPLATE_SLUG:-ts}" -client_id="${AUTHENTIK_TAILSCALE_CLIENT_ID:-tailscale.burrow.net}" -client_secret="${AUTHENTIK_TAILSCALE_CLIENT_SECRET:-}" -launch_url="${AUTHENTIK_TAILSCALE_LAUNCH_URL:-https://login.tailscale.com/start/oidc}" -access_group="${AUTHENTIK_TAILSCALE_ACCESS_GROUP:-}" -default_external_application_slug="${AUTHENTIK_DEFAULT_EXTERNAL_APPLICATION_SLUG:-}" -redirect_uris_json="${AUTHENTIK_TAILSCALE_REDIRECT_URIS_JSON:-[ - \"https://login.tailscale.com/a/oauth_response\" -]}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-tailscale-oidc.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - AUTHENTIK_TAILSCALE_CLIENT_SECRET - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_TAILSCALE_APPLICATION_SLUG - AUTHENTIK_TAILSCALE_APPLICATION_NAME - AUTHENTIK_TAILSCALE_PROVIDER_NAME - AUTHENTIK_TAILSCALE_TEMPLATE_SLUG - AUTHENTIK_TAILSCALE_CLIENT_ID - AUTHENTIK_TAILSCALE_LAUNCH_URL - AUTHENTIK_TAILSCALE_REDIRECT_URIS_JSON - AUTHENTIK_TAILSCALE_ACCESS_GROUP - AUTHENTIK_DEFAULT_EXTERNAL_APPLICATION_SLUG -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -if [[ -z "$client_secret" || "$client_secret" == PENDING* ]]; then - echo "Tailscale OIDC client secret is not configured; skipping Authentik Tailscale sync." >&2 - exit 0 -fi - -if ! printf '%s' "$redirect_uris_json" | jq -e 'type == "array" and length > 0' >/dev/null; then - echo "error: AUTHENTIK_TAILSCALE_REDIRECT_URIS_JSON must be a non-empty JSON array" >&2 - exit 1 -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -api_with_status() { - local method="$1" - local path="$2" - local data="${3:-}" - local response_file status - - response_file="$(mktemp)" - trap 'rm -f "$response_file"' RETURN - - if [[ -n "$data" ]]; then - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - )" - else - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - )" - fi - - printf '%s\n' "$status" - cat "$response_file" -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -wait_for_authentik - -lookup_group_pk() { - local group_name="$1" - - api GET "/api/v3/core/groups/?page_size=200" \ - | jq -r --arg group_name "$group_name" '.results[]? | select(.name == $group_name) | .pk // empty' \ - | head -n1 -} - -lookup_application_pk() { - local slug="$1" - local application_pk lookup_result lookup_status - - application_pk="$( - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -r --arg slug "$slug" '.results[]? | select(.slug == $slug) | .pk // empty' \ - | head -n1 - )" - - if [[ -n "$application_pk" ]]; then - printf '%s\n' "$application_pk" - return 0 - fi - - lookup_result="$(api_with_status GET "/api/v3/core/applications/${slug}/")" - lookup_status="$(printf '%s\n' "$lookup_result" | sed -n '1p')" - if [[ "$lookup_status" =~ ^20[01]$ ]]; then - printf '%s\n' "$lookup_result" | sed '1d' | jq -r '.pk // empty' - fi -} - -ensure_application_group_binding() { - local application_slug="$1" - local group_name="$2" - local application_pk group_pk existing payload binding_pk - - application_pk="$(lookup_application_pk "$application_slug")" - if [[ -z "$application_pk" ]]; then - echo "warning: could not resolve Authentik application ${application_slug}; skipping application group binding" >&2 - return 0 - fi - - group_pk="$(lookup_group_pk "$group_name")" - if [[ -z "$group_pk" ]]; then - echo "error: could not resolve Authentik group ${group_name}" >&2 - exit 1 - fi - - existing="$( - api GET "/api/v3/policies/bindings/?page_size=200&target=${application_pk}" \ - | jq -c --arg group_pk "$group_pk" '.results[]? | select(.group == $group_pk)' \ - | head -n1 - )" - - payload="$( - jq -cn \ - --arg target "$application_pk" \ - --arg group "$group_pk" \ - '{ - group: $group, - target: $target, - negate: false, - enabled: true, - order: 100, - timeout: 30, - failure_result: false - }' - )" - - if [[ -n "$existing" ]]; then - binding_pk="$(printf '%s\n' "$existing" | jq -r '.pk')" - api PATCH "/api/v3/policies/bindings/${binding_pk}/" "$payload" >/dev/null - else - api POST "/api/v3/policies/bindings/" "$payload" >/dev/null - fi -} - -ensure_default_external_application() { - local application_slug="$1" - local application_pk default_brand brand_payload - - application_pk="$(lookup_application_pk "$application_slug")" - if [[ -z "$application_pk" ]]; then - echo "error: could not resolve Authentik application ${application_slug} for brand default application" >&2 - exit 1 - fi - - default_brand="$( - api GET "/api/v3/core/brands/?page_size=200" \ - | jq -c '.results[]? | select(.default == true)' \ - | head -n1 - )" - - if [[ -z "$default_brand" ]]; then - echo "warning: could not resolve the default Authentik brand; skipping external default application" >&2 - return 0 - fi - - brand_payload="$( - printf '%s\n' "$default_brand" \ - | jq --arg application_pk "$application_pk" '.default_application = $application_pk' - )" - - api PUT "/api/v3/core/brands/$(printf '%s\n' "$default_brand" | jq -r '.brand_uuid')/" "$brand_payload" >/dev/null -} - -template_provider="$( - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -c --arg template_slug "$template_slug" '.results[]? | select(.assigned_application_slug == $template_slug)' \ - | head -n1 -)" - -if [[ -z "$template_provider" ]]; then - echo "error: could not resolve the Authentik OAuth provider template ${template_slug}" >&2 - exit 1 -fi - -authorization_flow="$(printf '%s\n' "$template_provider" | jq -r '.authorization_flow')" -invalidation_flow="$(printf '%s\n' "$template_provider" | jq -r '.invalidation_flow')" -property_mappings="$(printf '%s\n' "$template_provider" | jq -c '.property_mappings')" -signing_key="$(printf '%s\n' "$template_provider" | jq -r '.signing_key')" - -provider_payload="$( - jq -n \ - --arg name "$provider_name" \ - --arg authorization_flow "$authorization_flow" \ - --arg invalidation_flow "$invalidation_flow" \ - --arg client_id "$client_id" \ - --arg client_secret "$client_secret" \ - --arg signing_key "$signing_key" \ - --argjson property_mappings "$property_mappings" \ - --argjson redirect_uris "$redirect_uris_json" \ - '{ - name: $name, - authorization_flow: $authorization_flow, - invalidation_flow: $invalidation_flow, - client_type: "confidential", - client_id: $client_id, - client_secret: $client_secret, - include_claims_in_id_token: true, - redirect_uris: ($redirect_uris | map({matching_mode: "strict", url: .})), - property_mappings: $property_mappings, - signing_key: $signing_key, - issuer_mode: "per_provider", - sub_mode: "hashed_user_id" - }' -)" - -existing_provider="$( - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -c \ - --arg application_slug "$application_slug" \ - --arg provider_name "$provider_name" \ - '.results[]? | select(.assigned_application_slug == $application_slug or .name == $provider_name)' \ - | head -n1 -)" - -if [[ -n "$existing_provider" ]]; then - provider_pk="$(printf '%s\n' "$existing_provider" | jq -r '.pk')" - api PATCH "/api/v3/providers/oauth2/${provider_pk}/" "$provider_payload" >/dev/null -else - provider_pk="$( - api POST "/api/v3/providers/oauth2/" "$provider_payload" \ - | jq -r '.pk // empty' - )" -fi - -if [[ -z "${provider_pk:-}" ]]; then - echo "error: Tailscale OIDC provider did not return a primary key" >&2 - exit 1 -fi - -application_payload="$( - jq -n \ - --arg name "$application_name" \ - --arg slug "$application_slug" \ - --arg provider "$provider_pk" \ - --arg launch_url "$launch_url" \ - '{ - name: $name, - slug: $slug, - provider: ($provider | tonumber), - meta_launch_url: $launch_url, - open_in_new_tab: true, - policy_engine_mode: "any" - }' -)" - -existing_application="$( - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -c --arg slug "$application_slug" '.results[]? | select(.slug == $slug)' \ - | head -n1 -)" - -if [[ -n "$existing_application" ]]; then - application_pk="$(printf '%s\n' "$existing_application" | jq -r '.pk')" - api PATCH "/api/v3/core/applications/${application_pk}/" "$application_payload" >/dev/null -else - create_application_result="$( - api_with_status POST "/api/v3/core/applications/" "$application_payload" - )" - create_application_status="$(printf '%s\n' "$create_application_result" | sed -n '1p')" - create_application_body="$(printf '%s\n' "$create_application_result" | sed '1d')" - - if [[ "$create_application_status" =~ ^20[01]$ ]]; then - application_pk="$(printf '%s\n' "$create_application_body" | jq -r '.pk // empty')" - elif [[ "$create_application_status" == "400" ]] && printf '%s\n' "$create_application_body" | jq -e ' - (.slug // [] | index("Application with this slug already exists.")) != null - or (.provider // [] | index("Application with this provider already exists.")) != null - ' >/dev/null; then - application_pk="existing-duplicate" - else - printf '%s\n' "$create_application_body" >&2 - echo "error: could not reconcile Authentik application ${application_slug}" >&2 - exit 1 - fi -fi - -if [[ -z "${application_pk:-}" ]]; then - echo "error: Tailscale OIDC application did not return a primary key" >&2 - exit 1 -fi - -if [[ -n "$access_group" ]]; then - ensure_application_group_binding "$application_slug" "$access_group" -fi - -if [[ -n "$default_external_application_slug" ]]; then - ensure_default_external_application "$default_external_application_slug" -fi - -for _ in $(seq 1 30); do - if curl -fsS "${authentik_url}/application/o/${application_slug}/.well-known/openid-configuration" >/dev/null 2>&1; then - echo "Synced Authentik Tailscale OIDC application ${application_slug} (${application_name})." - exit 0 - fi - sleep 2 -done - -echo "warning: Tailscale OIDC issuer document for ${application_slug} was not immediately readable; keeping reconciled config." >&2 -echo "Synced Authentik Tailscale OIDC application ${application_slug} (${application_name})." diff --git a/Scripts/authentik-sync-zulip-saml.sh b/Scripts/authentik-sync-zulip-saml.sh deleted file mode 100644 index cd18752..0000000 --- a/Scripts/authentik-sync-zulip-saml.sh +++ /dev/null @@ -1,412 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -authentik_url="${AUTHENTIK_URL:-https://auth.burrow.net}" -bootstrap_token="${AUTHENTIK_BOOTSTRAP_TOKEN:-}" -application_slug="${AUTHENTIK_ZULIP_APPLICATION_SLUG:-zulip}" -application_name="${AUTHENTIK_ZULIP_APPLICATION_NAME:-Zulip}" -provider_name="${AUTHENTIK_ZULIP_PROVIDER_NAME:-Zulip}" -acs_url="${AUTHENTIK_ZULIP_ACS_URL:-https://chat.burrow.net/complete/saml/}" -audience="${AUTHENTIK_ZULIP_AUDIENCE:-https://chat.burrow.net}" -launch_url="${AUTHENTIK_ZULIP_LAUNCH_URL:-https://chat.burrow.net/}" -access_group="${AUTHENTIK_ZULIP_ACCESS_GROUP:-}" -admin_group="${AUTHENTIK_ZULIP_ADMIN_GROUP:-}" -issuer="${AUTHENTIK_ZULIP_ISSUER:-$authentik_url}" - -usage() { - cat <<'EOF' -Usage: Scripts/authentik-sync-zulip-saml.sh - -Required environment: - AUTHENTIK_BOOTSTRAP_TOKEN - -Optional environment: - AUTHENTIK_URL - AUTHENTIK_ZULIP_APPLICATION_SLUG - AUTHENTIK_ZULIP_APPLICATION_NAME - AUTHENTIK_ZULIP_PROVIDER_NAME - AUTHENTIK_ZULIP_ACS_URL - AUTHENTIK_ZULIP_AUDIENCE - AUTHENTIK_ZULIP_LAUNCH_URL - AUTHENTIK_ZULIP_ACCESS_GROUP - AUTHENTIK_ZULIP_ADMIN_GROUP - AUTHENTIK_ZULIP_ISSUER -EOF -} - -if [[ "${1:-}" == "-h" || "${1:-}" == "--help" ]]; then - usage - exit 0 -fi - -if [[ -z "$bootstrap_token" ]]; then - echo "error: AUTHENTIK_BOOTSTRAP_TOKEN is required" >&2 - exit 1 -fi - -api() { - local method="$1" - local path="$2" - local data="${3:-}" - - if [[ -n "$data" ]]; then - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - else - curl -fsS \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - fi -} - -api_with_status() { - local method="$1" - local path="$2" - local data="${3:-}" - local response_file status - - response_file="$(mktemp)" - trap 'rm -f "$response_file"' RETURN - - if [[ -n "$data" ]]; then - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - -H "Content-Type: application/json" \ - -d "$data" \ - "${authentik_url}${path}" - )" - else - status="$( - curl -sS \ - -o "$response_file" \ - -w '%{http_code}' \ - -X "$method" \ - -H "Authorization: Bearer ${bootstrap_token}" \ - "${authentik_url}${path}" - )" - fi - - printf '%s\n' "$status" - cat "$response_file" -} - -wait_for_authentik() { - for _ in $(seq 1 90); do - if curl -fsS "${authentik_url}/-/health/ready/" >/dev/null 2>&1; then - return 0 - fi - sleep 2 - done - - echo "error: Authentik did not become ready at ${authentik_url}" >&2 - exit 1 -} - -lookup_oauth_template_field() { - local field="$1" - - api GET "/api/v3/providers/oauth2/?page_size=200" \ - | jq -r --arg field "$field" '.results[]? | select(.assigned_application_slug == "ts") | .[$field]' \ - | head -n1 -} - -lookup_group_pk() { - local group_name="$1" - - api GET "/api/v3/core/groups/?page_size=200" \ - | jq -r --arg group_name "$group_name" '.results[]? | select(.name == $group_name) | .pk // empty' \ - | head -n1 -} - -lookup_application_pk() { - local slug="$1" - - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -r --arg slug "$slug" '.results[]? | select(.slug == $slug) | .pk // empty' \ - | head -n1 -} - -ensure_application_group_binding() { - local application_slug="$1" - local group_name="$2" - local application_pk group_pk existing payload binding_pk - - application_pk="$(lookup_application_pk "$application_slug")" - if [[ -z "$application_pk" ]]; then - echo "warning: could not resolve Authentik application ${application_slug}; skipping application group binding" >&2 - return 0 - fi - - group_pk="$(lookup_group_pk "$group_name")" - if [[ -z "$group_pk" ]]; then - echo "error: could not resolve Authentik group ${group_name}" >&2 - exit 1 - fi - - existing="$( - api GET "/api/v3/policies/bindings/?page_size=200&target=${application_pk}" \ - | jq -c --arg group_pk "$group_pk" '.results[]? | select(.group == $group_pk)' \ - | head -n1 - )" - - payload="$( - jq -cn \ - --arg target "$application_pk" \ - --arg group "$group_pk" \ - '{ - group: $group, - target: $target, - negate: false, - enabled: true, - order: 100, - timeout: 30, - failure_result: false - }' - )" - - if [[ -n "$existing" ]]; then - binding_pk="$(printf '%s\n' "$existing" | jq -r '.pk')" - api PATCH "/api/v3/policies/bindings/${binding_pk}/" "$payload" >/dev/null - else - api POST "/api/v3/policies/bindings/" "$payload" >/dev/null - fi -} - -reconcile_property_mapping() { - local name="$1" - local saml_name="$2" - local friendly_name="$3" - local expression="$4" - local payload existing_pk - - payload="$( - jq -n \ - --arg name "$name" \ - --arg saml_name "$saml_name" \ - --arg friendly_name "$friendly_name" \ - --arg expression "$expression" \ - '{ - name: $name, - saml_name: $saml_name, - friendly_name: $friendly_name, - expression: $expression - }' - )" - - existing_pk="$( - api GET "/api/v3/propertymappings/provider/saml/?page_size=200" \ - | jq -r --arg name "$name" '.results[]? | select(.name == $name) | .pk' \ - | head -n1 - )" - - if [[ -n "$existing_pk" ]]; then - api PATCH "/api/v3/propertymappings/provider/saml/${existing_pk}/" "$payload" >/dev/null - printf '%s\n' "$existing_pk" - else - api POST "/api/v3/propertymappings/provider/saml/" "$payload" | jq -r '.pk // empty' - fi -} - -wait_for_authentik - -authorization_flow="$(lookup_oauth_template_field authorization_flow)" -invalidation_flow="$(lookup_oauth_template_field invalidation_flow)" -signing_kp="$(lookup_oauth_template_field signing_key)" - -if [[ -z "$authorization_flow" || -z "$invalidation_flow" || -z "$signing_kp" ]]; then - echo "error: could not resolve Authentik provider defaults from Burrow Tailnet template" >&2 - exit 1 -fi - -email_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Zulip SAML Email" \ - "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress" \ - "email" \ - 'return request.user.email' -)" - -name_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Zulip SAML Name" \ - "name" \ - "name" \ - 'return request.user.name or request.user.username' -)" - -first_name_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Zulip SAML First Name" \ - "firstName" \ - "firstName" \ - $'parts = (request.user.name or "").split(" ", 1)\nif len(parts) > 0 and parts[0]:\n return parts[0]\nreturn request.user.username' -)" - -last_name_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Zulip SAML Last Name" \ - "lastName" \ - "lastName" \ - $'parts = (request.user.name or "").rsplit(" ", 1)\nif len(parts) == 2 and parts[1]:\n return parts[1]\nreturn request.user.username' -)" - -role_mapping_pk="" -if [[ -n "$admin_group" ]]; then - role_mapping_pk="$( - reconcile_property_mapping \ - "Burrow Zulip SAML Role" \ - "zulip_role" \ - "zulip_role" \ - $'admin_group = "'$admin_group$'"\nif any(group.name == admin_group for group in request.user.ak_groups.all()):\n return "owner"\nreturn None' - )" -fi - -if [[ -z "$email_mapping_pk" || -z "$name_mapping_pk" || -z "$first_name_mapping_pk" || -z "$last_name_mapping_pk" ]]; then - echo "error: failed to reconcile Zulip SAML property mappings" >&2 - exit 1 -fi - -provider_payload="$( - jq -n \ - --arg name "$provider_name" \ - --arg authorization_flow "$authorization_flow" \ - --arg invalidation_flow "$invalidation_flow" \ - --arg acs_url "$acs_url" \ - --arg audience "$audience" \ - --arg issuer "$issuer" \ - --arg signing_kp "$signing_kp" \ - --arg name_id_mapping "$email_mapping_pk" \ - --arg email_mapping "$email_mapping_pk" \ - --arg name_mapping "$name_mapping_pk" \ - --arg first_name_mapping "$first_name_mapping_pk" \ - --arg last_name_mapping "$last_name_mapping_pk" \ - --arg role_mapping "$role_mapping_pk" \ - '{ - name: $name, - authorization_flow: $authorization_flow, - invalidation_flow: $invalidation_flow, - acs_url: $acs_url, - audience: $audience, - issuer: $issuer, - signing_kp: $signing_kp, - sign_assertion: true, - sign_response: true, - sp_binding: "post", - name_id_mapping: $name_id_mapping, - property_mappings: [ - $email_mapping, - $name_mapping, - $first_name_mapping, - $last_name_mapping - ] + (if $role_mapping != "" then [$role_mapping] else [] end) - }' -)" - -existing_provider="$( - api GET "/api/v3/providers/saml/?page_size=200" \ - | jq -c \ - --arg application_slug "$application_slug" \ - --arg provider_name "$provider_name" \ - '.results[]? | select(.assigned_application_slug == $application_slug or .name == $provider_name)' \ - | head -n1 -)" - -if [[ -n "$existing_provider" ]]; then - provider_pk="$(printf '%s\n' "$existing_provider" | jq -r '.pk')" - api PATCH "/api/v3/providers/saml/${provider_pk}/" "$provider_payload" >/dev/null -else - provider_pk="$( - api POST "/api/v3/providers/saml/" "$provider_payload" \ - | jq -r '.pk // empty' - )" -fi - -if [[ -z "${provider_pk:-}" ]]; then - echo "error: Zulip SAML provider did not return a primary key" >&2 - exit 1 -fi - -application_payload="$( - jq -n \ - --arg name "$application_name" \ - --arg slug "$application_slug" \ - --arg provider "$provider_pk" \ - --arg launch_url "$launch_url" \ - '{ - name: $name, - slug: $slug, - provider: ($provider | tonumber), - meta_launch_url: $launch_url, - open_in_new_tab: true, - policy_engine_mode: "any" - }' -)" - -existing_application="$( - api GET "/api/v3/core/applications/?page_size=200" \ - | jq -c --arg slug "$application_slug" '.results[]? | select(.slug == $slug)' \ - | head -n1 -)" - -if [[ -n "$existing_application" ]]; then - application_pk="$(printf '%s\n' "$existing_application" | jq -r '.pk')" - api PATCH "/api/v3/core/applications/${application_pk}/" "$application_payload" >/dev/null -else - create_application_result="$( - api_with_status POST "/api/v3/core/applications/" "$application_payload" - )" - create_application_status="$(printf '%s\n' "$create_application_result" | sed -n '1p')" - create_application_body="$(printf '%s\n' "$create_application_result" | sed '1d')" - - if [[ "$create_application_status" =~ ^20[01]$ ]]; then - application_pk="$(printf '%s\n' "$create_application_body" | jq -r '.pk // empty')" - elif [[ "$create_application_status" == "400" ]] && printf '%s\n' "$create_application_body" | jq -e ' - (.slug // [] | index("Application with this slug already exists.")) != null - or (.provider // [] | index("Application with this provider already exists.")) != null - ' >/dev/null; then - application_pk="existing-duplicate" - else - printf '%s\n' "$create_application_body" >&2 - echo "error: could not reconcile Authentik application ${application_slug}" >&2 - exit 1 - fi -fi - -if [[ -z "${application_pk:-}" ]]; then - echo "error: Zulip SAML application did not return a primary key" >&2 - exit 1 -fi - -if [[ -n "$access_group" ]]; then - ensure_application_group_binding "$application_slug" "$access_group" -fi - -for _ in $(seq 1 30); do - metadata_status="$( - curl -sS \ - -o /dev/null \ - -w '%{http_code}' \ - --max-redirs 0 \ - "${authentik_url}/application/saml/${application_slug}/metadata/" \ - || true - )" - case "$metadata_status" in - 200|301|302|307|308) - echo "Synced Authentik Zulip SAML application ${application_slug} (${application_name})." - exit 0 - ;; - esac - sleep 2 -done - -echo "warning: Zulip SAML metadata for ${application_slug} was not immediately readable; keeping reconciled config." >&2 -echo "Synced Authentik Zulip SAML application ${application_slug} (${application_name})." diff --git a/Scripts/bep b/Scripts/bep deleted file mode 100755 index 1c6bd64..0000000 --- a/Scripts/bep +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -repo_root=$(git rev-parse --show-toplevel) -proposals_dir="$repo_root/evolution/proposals" - -auto_browse() { - if command -v wisu >/dev/null 2>&1; then - exec wisu -i -g --icons "$repo_root/evolution" - fi - exec ls -la "$repo_root/evolution" -} - -usage() { - cat <<'USAGE' -Usage: bep [command] - -Commands: - list [--status ] List BEPs, optionally filtered by status. - open Open a BEP in $EDITOR. - help Show this help. - -If no command is provided, bep launches a simple browser for evolution/. -USAGE -} - -normalize_id() { - local raw="$1" - if [[ "$raw" =~ ^BEP-[0-9]+$ ]]; then - printf '%s' "$raw" - return 0 - fi - if [[ "$raw" =~ ^[0-9]+$ ]]; then - printf 'BEP-%04d' "$raw" - return 0 - fi - return 1 -} - -read_status() { - local file="$1" - awk -F ': ' '/^Status:/ {print $2; exit}' "$file" -} - -read_title() { - local file="$1" - local line - line=$(head -n 1 "$file" || true) - printf '%s' "$line" | sed -E 's/^# `[^`]+`[[:space:]]+//; s/^[^A-Za-z0-9]+//' -} - -list_bep() { - local filter="${1:-}" - local filter_lower="" - if [[ -n "$filter" ]]; then - filter_lower=$(printf '%s' "$filter" | tr '[:upper:]' '[:lower:]') - fi - - printf '%-10s %-18s %s\n' "BEP" "Status" "Title" - local file - local entries=() - for file in "$proposals_dir"/BEP-*.md; do - [[ -e "$file" ]] || continue - local base - base=$(basename "$file") - local id - id=$(printf '%s' "$base" | cut -d- -f1-2) - local status - status=$(read_status "$file") - local status_lower - status_lower=$(printf '%s' "$status" | tr '[:upper:]' '[:lower:]') - if [[ -n "$filter_lower" && "$status_lower" != "$filter_lower" ]]; then - continue - fi - local title - title=$(read_title "$file") - entries+=("$(printf '%-10s %-18s %s' "$id" "$status" "$title")") - done - if [[ ${#entries[@]} -gt 0 ]]; then - printf '%s\n' "${entries[@]}" | sort - fi -} - -open_bep() { - local raw="$1" - local id - if ! id=$(normalize_id "$raw"); then - echo "Unknown BEP id: $raw" >&2 - exit 1 - fi - local matches - matches=("$proposals_dir"/"$id"-*.md) - if [[ ${#matches[@]} -eq 0 || ! -e "${matches[0]}" ]]; then - echo "No proposal found for $id" >&2 - exit 1 - fi - if [[ ${#matches[@]} -gt 1 ]]; then - echo "Multiple proposals match $id:" >&2 - printf ' %s\n' "${matches[@]}" >&2 - exit 1 - fi - local editor="${EDITOR:-vi}" - exec "$editor" "${matches[0]}" -} - -command=${1:-} -case "$command" in - "") - auto_browse - ;; - list) - if [[ ${2:-} == "--status" && -n ${3:-} ]]; then - list_bep "$3" - else - list_bep - fi - ;; - open) - if [[ -z ${2:-} ]]; then - echo "bep open requires an id" >&2 - exit 1 - fi - open_bep "$2" - ;; - help|-h|--help) - usage - ;; - *) - echo "Unknown command: $command" >&2 - usage - exit 1 - ;; -esac diff --git a/Scripts/bootstrap-forge-intake.sh b/Scripts/bootstrap-forge-intake.sh deleted file mode 100644 index 0cc1d91..0000000 --- a/Scripts/bootstrap-forge-intake.sh +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" - -usage() { - cat <<'EOF' -Usage: Scripts/bootstrap-forge-intake.sh [options] - -Copy the minimum Burrow forge bootstrap secrets onto the target host under -/var/lib/burrow/intake with the ownership expected by the NixOS services. - -Options: - --host SSH target (default: root@git.burrow.net) - --ssh-key SSH private key used to reach the host - (default: intake/agent_at_burrow_net_ed25519) - --password-file Forgejo admin bootstrap password file - (default: intake/forgejo_pass_contact_at_burrow_net.txt) - --agent-key-file Agent SSH private key copied for runner bootstrap - (default: intake/agent_at_burrow_net_ed25519) - --no-verify Skip remote ls/stat verification after install - -h, --help Show this help text -EOF -} - -HOST="${BURROW_FORGE_HOST:-root@git.burrow.net}" -SSH_KEY="${BURROW_FORGE_SSH_KEY:-${REPO_ROOT}/intake/agent_at_burrow_net_ed25519}" -PASSWORD_FILE="${BURROW_FORGE_PASSWORD_FILE:-${REPO_ROOT}/intake/forgejo_pass_contact_at_burrow_net.txt}" -AGENT_KEY_FILE="${BURROW_FORGE_AGENT_KEY_FILE:-${REPO_ROOT}/intake/agent_at_burrow_net_ed25519}" -KNOWN_HOSTS_FILE="${BURROW_FORGE_KNOWN_HOSTS_FILE:-${HOME}/.cache/burrow/forge-known_hosts}" -VERIFY=1 - -while [[ $# -gt 0 ]]; do - case "$1" in - --host) - HOST="${2:?missing value for --host}" - shift 2 - ;; - --ssh-key) - SSH_KEY="${2:?missing value for --ssh-key}" - shift 2 - ;; - --password-file) - PASSWORD_FILE="${2:?missing value for --password-file}" - shift 2 - ;; - --agent-key-file) - AGENT_KEY_FILE="${2:?missing value for --agent-key-file}" - shift 2 - ;; - --no-verify) - VERIFY=0 - shift - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "unknown option: $1" >&2 - usage >&2 - exit 64 - ;; - esac -done - -mkdir -p "$(dirname "${KNOWN_HOSTS_FILE}")" - -for path in "${SSH_KEY}" "${PASSWORD_FILE}" "${AGENT_KEY_FILE}"; do - if [[ ! -s "${path}" ]]; then - echo "required file missing or empty: ${path}" >&2 - exit 1 - fi -done - -ssh_opts=( - -i "${SSH_KEY}" - -o IdentitiesOnly=yes - -o UserKnownHostsFile="${KNOWN_HOSTS_FILE}" - -o StrictHostKeyChecking=accept-new -) - -remote_tmp="$(ssh "${ssh_opts[@]}" "${HOST}" "mktemp -d")" -cleanup() { - if [[ -n "${remote_tmp:-}" ]]; then - ssh "${ssh_opts[@]}" "${HOST}" "rm -rf '${remote_tmp}'" >/dev/null 2>&1 || true - fi -} -trap cleanup EXIT - -scp "${ssh_opts[@]}" \ - "${PASSWORD_FILE}" \ - "${AGENT_KEY_FILE}" \ - "${HOST}:${remote_tmp}/" - -ssh "${ssh_opts[@]}" "${HOST}" " - set -euo pipefail - install -d -m 0755 /var/lib/burrow/intake - install -m 0400 -o forgejo -g forgejo '${remote_tmp}/$(basename "${PASSWORD_FILE}")' /var/lib/burrow/intake/forgejo_pass_contact_at_burrow_net.txt - install -m 0400 -o root -g root '${remote_tmp}/$(basename "${AGENT_KEY_FILE}")' /var/lib/burrow/intake/agent_at_burrow_net_ed25519 -" - -if [[ "${VERIFY}" -eq 1 ]]; then - ssh "${ssh_opts[@]}" "${HOST}" " - set -euo pipefail - ls -l \ - /var/lib/burrow/intake/forgejo_pass_contact_at_burrow_net.txt \ - /var/lib/burrow/intake/agent_at_burrow_net_ed25519 - " -fi - -echo "Burrow forge bootstrap intake sync complete (host=${HOST})." diff --git a/Scripts/check-bep-metadata.py b/Scripts/check-bep-metadata.py deleted file mode 100755 index d054934..0000000 --- a/Scripts/check-bep-metadata.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import annotations - -import pathlib -import re -import sys - - -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent -PROPOSALS_DIR = REPO_ROOT / "evolution" / "proposals" -ALLOWED_STATUSES = { - "Pitch", - "Draft", - "In Review", - "Accepted", - "Implemented", - "Rejected", - "Returned for Revision", - "Superseded", - "Archived", -} -REQUIRED_FIELDS = [ - "Status", - "Proposal", - "Authors", - "Coordinator", - "Reviewers", - "Constitution Sections", - "Implementation PRs", - "Decision Date", -] - - -def text_block_lines(path: pathlib.Path) -> list[str]: - content = path.read_text(encoding="utf-8") - match = re.search(r"```text\n(.*?)\n```", content, re.DOTALL) - if not match: - raise ValueError("missing leading ```text metadata block") - return [line.rstrip() for line in match.group(1).splitlines() if line.strip()] - - -def validate(path: pathlib.Path) -> list[str]: - errors: list[str] = [] - proposal_id = path.name.split("-", 2)[:2] - expected_id = "-".join(proposal_id).removesuffix(".md") - - try: - lines = text_block_lines(path) - except ValueError as exc: - return [f"{path}: {exc}"] - - field_names = [line.split(":", 1)[0] for line in lines] - if field_names != REQUIRED_FIELDS: - errors.append( - f"{path}: metadata fields must appear in order {', '.join(REQUIRED_FIELDS)}" - ) - return errors - - fields = dict(line.split(":", 1) for line in lines) - fields = {key.strip(): value.strip() for key, value in fields.items()} - - if fields["Status"] not in ALLOWED_STATUSES: - errors.append(f"{path}: invalid Status {fields['Status']!r}") - - if fields["Proposal"] != expected_id: - errors.append( - f"{path}: Proposal field {fields['Proposal']!r} does not match filename id {expected_id!r}" - ) - - if fields["Status"] in {"Accepted", "Implemented", "Superseded", "Rejected", "Archived"} and fields["Decision Date"] == "Pending": - errors.append( - f"{path}: Decision Date must not be Pending once status is {fields['Status']}" - ) - - return errors - - -def main() -> int: - errors: list[str] = [] - for path in sorted(PROPOSALS_DIR.glob("BEP-*.md")): - errors.extend(validate(path)) - - if errors: - for error in errors: - print(error, file=sys.stderr) - return 1 - - print(f"checked {len(list(PROPOSALS_DIR.glob('BEP-*.md')))} BEPs") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) diff --git a/Scripts/check-forge-host.sh b/Scripts/check-forge-host.sh deleted file mode 100755 index 0f79bf4..0000000 --- a/Scripts/check-forge-host.sh +++ /dev/null @@ -1,185 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" - -usage() { - cat <<'EOF' -Usage: Scripts/check-forge-host.sh [options] - -Run a post-boot verification pass against the Burrow forge host. - -Options: - --host SSH target (default: root@git.burrow.net) - --ssh-key SSH private key (default: intake/agent_at_burrow_net_ed25519) - --expect-nsc Fail if forgejo-nsc services are not active - --expect-tailnet Fail if Authentik and Headscale services are not active - -h, --help Show this help text -EOF -} - -HOST="${BURROW_FORGE_HOST:-root@git.burrow.net}" -SSH_KEY="${BURROW_FORGE_SSH_KEY:-${REPO_ROOT}/intake/agent_at_burrow_net_ed25519}" -KNOWN_HOSTS_FILE="${BURROW_FORGE_KNOWN_HOSTS_FILE:-${HOME}/.cache/burrow/forge-known_hosts}" -EXPECT_NSC=0 -EXPECT_TAILNET=0 - -while [[ $# -gt 0 ]]; do - case "$1" in - --host) - HOST="${2:?missing value for --host}" - shift 2 - ;; - --ssh-key) - SSH_KEY="${2:?missing value for --ssh-key}" - shift 2 - ;; - --expect-nsc) - EXPECT_NSC=1 - shift - ;; - --expect-tailnet) - EXPECT_TAILNET=1 - shift - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "unknown option: $1" >&2 - usage >&2 - exit 64 - ;; - esac -done - -mkdir -p "$(dirname "${KNOWN_HOSTS_FILE}")" - -if [[ ! -f "${SSH_KEY}" ]]; then - echo "forge SSH key not found: ${SSH_KEY}" >&2 - exit 1 -fi - -ssh \ - -i "${SSH_KEY}" \ - -o IdentitiesOnly=yes \ - -o UserKnownHostsFile="${KNOWN_HOSTS_FILE}" \ - -o StrictHostKeyChecking=accept-new \ - "${HOST}" \ - EXPECT_NSC="${EXPECT_NSC}" \ - EXPECT_TAILNET="${EXPECT_TAILNET}" \ - 'bash -s' <<'EOF' -set -euo pipefail - -base_services=( - forgejo.service - caddy.service - burrow-forgejo-bootstrap.service - burrow-forgejo-runner-bootstrap.service - burrow-forgejo-runner.service -) - -nsc_services=( - forgejo-nsc-dispatcher.service - forgejo-nsc-autoscaler.service -) - -tailnet_services=( - burrow-authentik-runtime.service - burrow-authentik-ready.service - headscale.service - headscale-bootstrap.service -) - -show_service() { - local service="$1" - systemctl show \ - --no-pager \ - --property Id \ - --property LoadState \ - --property UnitFileState \ - --property ActiveState \ - --property SubState \ - --property Result \ - "${service}" -} - -service_is_healthy() { - local service="$1" - local active_state - local result - local unit_type - - active_state="$(systemctl show --property ActiveState --value "${service}")" - result="$(systemctl show --property Result --value "${service}")" - unit_type="$(systemctl show --property Type --value "${service}")" - - if [[ "${active_state}" == "active" ]]; then - return 0 - fi - - if [[ "${unit_type}" == "oneshot" && "${active_state}" == "inactive" && "${result}" == "success" ]]; then - return 0 - fi - - return 1 -} - -for service in "${base_services[@]}"; do - echo "== ${service} ==" - show_service "${service}" - if ! service_is_healthy "${service}"; then - echo "required service is not active: ${service}" >&2 - exit 1 - fi -done - -for service in "${nsc_services[@]}"; do - echo "== ${service} ==" - show_service "${service}" || true - if [[ "${EXPECT_NSC}" == "1" && "$(systemctl is-active "${service}" 2>/dev/null || true)" != "active" ]]; then - echo "required NSC service is not active: ${service}" >&2 - exit 1 - fi -done - -for service in "${tailnet_services[@]}"; do - echo "== ${service} ==" - show_service "${service}" || true - if [[ "${EXPECT_TAILNET}" == "1" ]] && ! service_is_healthy "${service}"; then - echo "required tailnet service is not active: ${service}" >&2 - exit 1 - fi -done - -echo "== intake ==" -ls -l /var/lib/burrow/intake || true - -if [[ "${EXPECT_TAILNET}" == "1" ]]; then - echo "== agenix ==" - ls -l /run/agenix || true - test -s /run/agenix/burrowAuthentikEnv - test -s /run/agenix/burrowHeadscaleOidcClientSecret -fi - -if [[ "${EXPECT_NSC}" == "1" ]]; then - echo "== agenix-nsc ==" - ls -l /run/agenix || true - test -s /run/agenix/burrowForgejoNscToken - test -s /run/agenix/burrowForgejoNscDispatcherConfig - test -s /run/agenix/burrowForgejoNscAutoscalerConfig -fi - -if command -v curl >/dev/null 2>&1; then - echo "== http-local ==" - curl -fsS -o /dev/null -w 'forgejo_login %{http_code}\n' http://127.0.0.1:3000/user/login - curl -fsS -o /dev/null -H 'Host: burrow.net' -w 'burrow_root %{http_code}\n' http://127.0.0.1/ - curl -fsS -o /dev/null -H 'Host: git.burrow.net' -w 'git_login %{http_code}\n' http://127.0.0.1/user/login - if [[ "${EXPECT_TAILNET}" == "1" ]]; then - curl -fsS -o /dev/null -H 'Host: auth.burrow.net' -w 'authentik_ready %{http_code}\n' http://127.0.0.1/-/health/ready/ - curl -sS -o /dev/null -H 'Host: ts.burrow.net' -w 'headscale_root %{http_code}\n' http://127.0.0.1/ || true - fi -fi -EOF diff --git a/Scripts/ci/build-release-artifacts.sh b/Scripts/ci/build-release-artifacts.sh deleted file mode 100755 index 20b4c06..0000000 --- a/Scripts/ci/build-release-artifacts.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -repo_root="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")/../.." && pwd)" -cd "${repo_root}" - -release_ref="${RELEASE_REF:-manual-${GITHUB_SHA:-unknown}}" -target="x86_64-unknown-linux-gnu" -out_dir="${repo_root}/dist" -staging="${out_dir}/burrow-${release_ref}-${target}" - -mkdir -p "${staging}" - -cargo build --locked --release -p burrow --bin burrow -install -m 0755 target/release/burrow "${staging}/burrow" -cp README.md "${staging}/README.md" - -tarball="${out_dir}/burrow-${release_ref}-${target}.tar.gz" -tar -C "${out_dir}" -czf "${tarball}" "$(basename "${staging}")" -shasum -a 256 "${tarball}" > "${tarball}.sha256" diff --git a/Scripts/ci/ensure-nix.sh b/Scripts/ci/ensure-nix.sh deleted file mode 100755 index 14be895..0000000 --- a/Scripts/ci/ensure-nix.sh +++ /dev/null @@ -1,157 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -source_nix_profile() { - local candidate - for candidate in \ - "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh" \ - "${HOME}/.nix-profile/etc/profile.d/nix.sh" - do - if [[ -f "${candidate}" ]]; then - # shellcheck disable=SC1090 - . "${candidate}" - return 0 - fi - done - return 1 -} - -linux_cp_supports_preserve() { - cp --help 2>&1 | grep -q -- '--preserve' -} - -ensure_root_owned_home() { - if [[ "$(id -u)" -ne 0 ]]; then - return 0 - fi - - if [[ ! -d "${HOME}" ]] || [[ ! -O "${HOME}" ]]; then - export HOME="/root" - fi - - mkdir -p "${HOME}" -} - -ensure_linux_nixbld_accounts() { - if [[ "$(id -u)" -ne 0 ]]; then - return 0 - fi - - if command -v getent >/dev/null 2>&1 && getent group nixbld >/dev/null 2>&1; then - return 0 - fi - - if command -v addgroup >/dev/null 2>&1 && ! command -v groupadd >/dev/null 2>&1; then - addgroup -S nixbld >/dev/null 2>&1 || true - for i in $(seq 1 10); do - adduser -S -D -H -h /var/empty -s /sbin/nologin -G nixbld "nixbld${i}" >/dev/null 2>&1 || true - done - return 0 - fi - - if command -v groupadd >/dev/null 2>&1; then - groupadd -r nixbld >/dev/null 2>&1 || true - for i in $(seq 1 10); do - useradd \ - --system \ - --no-create-home \ - --home-dir /var/empty \ - --shell /usr/sbin/nologin \ - --gid nixbld \ - "nixbld${i}" >/dev/null 2>&1 || true - done - return 0 - fi - - echo "linux nix bootstrap requires nixbld group creation support" >&2 - exit 1 -} - -ensure_linux_nix_bootstrap_prereqs() { - if linux_cp_supports_preserve; then - ensure_root_owned_home - ensure_linux_nixbld_accounts - return 0 - fi - - if command -v apk >/dev/null 2>&1; then - apk add --no-cache coreutils xz >/dev/null - elif command -v apt-get >/dev/null 2>&1; then - export DEBIAN_FRONTEND=noninteractive - apt-get update -y >/dev/null - apt-get install -y coreutils xz-utils >/dev/null - elif command -v dnf >/dev/null 2>&1; then - dnf install -y coreutils xz >/dev/null - elif command -v yum >/dev/null 2>&1; then - yum install -y coreutils xz >/dev/null - else - echo "linux nix bootstrap requires GNU cp but no supported package manager was found" >&2 - exit 1 - fi - - linux_cp_supports_preserve || { - echo "linux nix bootstrap still lacks GNU cp after installing prerequisites" >&2 - exit 1 - } - - ensure_root_owned_home - ensure_linux_nixbld_accounts -} - -if ! command -v nix >/dev/null 2>&1; then - if ! command -v curl >/dev/null 2>&1; then - echo "curl is required to install nix" >&2 - exit 1 - fi - - case "$(uname -s)" in - Linux) - ensure_linux_nix_bootstrap_prereqs - curl -fsSL https://nixos.org/nix/install | sh -s -- --no-daemon - ;; - Darwin) - installer="$(mktemp -t burrow-nix.XXXXXX)" - trap 'rm -f "${installer}"' EXIT - curl -fsSL -o "${installer}" https://install.determinate.systems/nix - chmod +x "${installer}" - if command -v sudo >/dev/null 2>&1; then - if sudo -n true 2>/dev/null; then - sudo -n sh "${installer}" install --no-confirm - else - sudo sh "${installer}" install --no-confirm - fi - else - sh "${installer}" install --no-confirm - fi - ;; - *) - echo "unsupported platform for nix bootstrap: $(uname -s)" >&2 - exit 1 - ;; - esac -fi - -source_nix_profile || true -export PATH="${HOME}/.nix-profile/bin:/nix/var/nix/profiles/default/bin:/nix/var/nix/profiles/default/sbin:${PATH}" - -config_root="${XDG_CONFIG_HOME:-$HOME/.config}" -config_file="${config_root}/nix/nix.conf" -if [[ -e "${config_file}" && ! -w "${config_file}" ]]; then - config_root="$(mktemp -d -t burrow-nix-config.XXXXXX)" - export XDG_CONFIG_HOME="${config_root}" - config_file="${XDG_CONFIG_HOME}/nix/nix.conf" -fi - -mkdir -p "$(dirname -- "${config_file}")" -cat > "${config_file}" <<'EOF' -experimental-features = nix-command flakes -sandbox = true -fallback = true -substituters = https://cache.nixos.org -trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= -EOF - -command -v nix >/dev/null 2>&1 || { - echo "nix is still unavailable after bootstrap" >&2 - exit 1 -} diff --git a/Scripts/ci/publish-forgejo-release.sh b/Scripts/ci/publish-forgejo-release.sh deleted file mode 100755 index 338f71b..0000000 --- a/Scripts/ci/publish-forgejo-release.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -: "${API_URL:?API_URL is required}" -: "${REPOSITORY:?REPOSITORY is required}" -: "${RELEASE_TAG:?RELEASE_TAG is required}" -: "${TOKEN:?TOKEN is required}" - -release_api="${API_URL}/repos/${REPOSITORY}/releases" -tag_api="${release_api}/tags/${RELEASE_TAG}" -release_json="$(mktemp)" -create_json="$(mktemp)" -trap 'rm -f "${release_json}" "${create_json}"' EXIT - -status="$( - curl -sS -o "${release_json}" -w '%{http_code}' \ - -H "Authorization: token ${TOKEN}" \ - "${tag_api}" -)" - -if [[ "${status}" == "404" ]]; then - jq -n \ - --arg tag "${RELEASE_TAG}" \ - --arg name "Burrow ${RELEASE_TAG}" \ - '{ - tag_name: $tag, - target_commitish: $tag, - name: $name, - body: "Automated prerelease built on Forgejo Namespace runners.", - draft: false, - prerelease: true - }' > "${create_json}" - - curl -fsS \ - -H "Authorization: token ${TOKEN}" \ - -H "Content-Type: application/json" \ - -d @"${create_json}" \ - "${release_api}" > "${release_json}" -elif [[ "${status}" != "200" ]]; then - echo "failed to query Forgejo release for ${RELEASE_TAG} (HTTP ${status})" >&2 - cat "${release_json}" >&2 - exit 1 -fi - -release_id="$(jq -r '.id' "${release_json}")" -if [[ -z "${release_id}" || "${release_id}" == "null" ]]; then - echo "Forgejo release payload is missing an id" >&2 - cat "${release_json}" >&2 - exit 1 -fi - -for file in dist/*; do - name="$(basename "${file}")" - asset_id="$(jq -r --arg name "${name}" '.assets[]? | select(.name == $name) | .id' "${release_json}" | head -n1)" - if [[ -n "${asset_id}" ]]; then - curl -fsS -X DELETE \ - -H "Authorization: token ${TOKEN}" \ - "${release_api}/${release_id}/assets/${asset_id}" >/dev/null - fi - - curl -fsS \ - -H "Authorization: token ${TOKEN}" \ - -F "attachment=@${file}" \ - "${release_api}/${release_id}/assets?name=${name}" >/dev/null -done diff --git a/Scripts/cloudflare-upsert-a-record.sh b/Scripts/cloudflare-upsert-a-record.sh deleted file mode 100755 index 88745af..0000000 --- a/Scripts/cloudflare-upsert-a-record.sh +++ /dev/null @@ -1,165 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -usage() { - cat <<'EOF' -Usage: Scripts/cloudflare-upsert-a-record.sh --zone --name --ipv4
[options] - -Upsert a DNS-only or proxied Cloudflare A record without putting the API token on -the process list. - -Options: - --zone Cloudflare zone name, for example burrow.net - --name Fully-qualified DNS record name - --ipv4
IPv4 address for the A record - --token-file Cloudflare API token file - default: intake/cloudflare-token.txt - --ttl Record TTL, or auto - default: auto - --proxied Whether to proxy through Cloudflare - default: false - -h, --help Show this help -EOF -} - -ZONE_NAME="" -RECORD_NAME="" -IPV4="" -TOKEN_FILE="intake/cloudflare-token.txt" -TTL_VALUE="auto" -PROXIED="false" - -while [[ $# -gt 0 ]]; do - case "$1" in - --zone) - ZONE_NAME="${2:?missing value for --zone}" - shift 2 - ;; - --name) - RECORD_NAME="${2:?missing value for --name}" - shift 2 - ;; - --ipv4) - IPV4="${2:?missing value for --ipv4}" - shift 2 - ;; - --token-file) - TOKEN_FILE="${2:?missing value for --token-file}" - shift 2 - ;; - --ttl) - TTL_VALUE="${2:?missing value for --ttl}" - shift 2 - ;; - --proxied) - PROXIED="${2:?missing value for --proxied}" - shift 2 - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "Unknown argument: $1" >&2 - usage >&2 - exit 2 - ;; - esac -done - -if [[ -z "${ZONE_NAME}" || -z "${RECORD_NAME}" || -z "${IPV4}" ]]; then - usage >&2 - exit 2 -fi - -if [[ ! -f "${TOKEN_FILE}" ]]; then - echo "Cloudflare token file not found: ${TOKEN_FILE}" >&2 - exit 1 -fi - -if [[ ! "${IPV4}" =~ ^([0-9]{1,3}\.){3}[0-9]{1,3}$ ]]; then - echo "Invalid IPv4 address: ${IPV4}" >&2 - exit 1 -fi - -case "${PROXIED}" in - true|false) - ;; - *) - echo "--proxied must be true or false" >&2 - exit 1 - ;; -esac - -case "${TTL_VALUE}" in - auto) - TTL_JSON=1 - ;; - ''|*[!0-9]*) - echo "--ttl must be a number of seconds or auto" >&2 - exit 1 - ;; - *) - TTL_JSON="${TTL_VALUE}" - ;; -esac - -TOKEN="$(tr -d '\r\n' < "${TOKEN_FILE}")" -if [[ -z "${TOKEN}" ]]; then - echo "Cloudflare token file is empty: ${TOKEN_FILE}" >&2 - exit 1 -fi - -cf_api() { - local method="$1" - local path="$2" - local body="${3-}" - if [[ -n "${body}" ]]; then - curl -fsS -X "${method}" \ - -H "Authorization: Bearer ${TOKEN}" \ - -H "Content-Type: application/json" \ - --data "${body}" \ - "https://api.cloudflare.com/client/v4${path}" - else - curl -fsS -X "${method}" \ - -H "Authorization: Bearer ${TOKEN}" \ - -H "Content-Type: application/json" \ - "https://api.cloudflare.com/client/v4${path}" - fi -} - -zone_lookup="$(cf_api GET "/zones?name=${ZONE_NAME}&status=active")" -zone_id="$(jq -r '.result[0].id // empty' <<<"${zone_lookup}")" - -if [[ -z "${zone_id}" ]]; then - echo "Active Cloudflare zone not found: ${ZONE_NAME}" >&2 - exit 1 -fi - -payload="$(jq -cn \ - --arg type "A" \ - --arg name "${RECORD_NAME}" \ - --arg content "${IPV4}" \ - --argjson proxied "${PROXIED}" \ - --argjson ttl "${TTL_JSON}" \ - '{type: $type, name: $name, content: $content, proxied: $proxied, ttl: $ttl}')" - -record_lookup="$(cf_api GET "/zones/${zone_id}/dns_records?type=A&name=${RECORD_NAME}")" -record_id="$(jq -r '.result[0].id // empty' <<<"${record_lookup}")" - -if [[ -n "${record_id}" ]]; then - result="$(cf_api PUT "/zones/${zone_id}/dns_records/${record_id}" "${payload}")" - action="updated" -else - result="$(cf_api POST "/zones/${zone_id}/dns_records" "${payload}")" - action="created" -fi - -jq -r --arg action "${action}" ' - if .success != true then - .errors | tostring | halt_error(1) - else - "Cloudflare DNS " + $action + ": " + .result.name + " -> " + .result.content + - " (proxied=" + (.result.proxied | tostring) + ", ttl=" + (.result.ttl | tostring) + ")" - end -' <<<"${result}" diff --git a/Scripts/forge-deploy.sh b/Scripts/forge-deploy.sh deleted file mode 100755 index 5c4b959..0000000 --- a/Scripts/forge-deploy.sh +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" - -# shellcheck source=Scripts/_burrow-flake.sh -source "${SCRIPT_DIR}/_burrow-flake.sh" - -usage() { - cat <<'EOF' -Usage: Scripts/forge-deploy.sh [--test|--switch] [--flake-attr ] [--allow-dirty] - -Standardized remote deploy path for the Burrow forge host. - -Defaults: - --switch - --flake-attr burrow-forge - -Environment: - BURROW_FORGE_HOST root@git.burrow.net - BURROW_FORGE_SSH_KEY intake/agent_at_burrow_net_ed25519 -EOF -} - -MODE="switch" -FLAKE_ATTR="burrow-forge" -ALLOW_DIRTY=0 -BURROW_FLAKE_TMPDIRS=() - -cleanup() { - burrow_cleanup_flake_tmpdirs -} -trap cleanup EXIT - -while [[ $# -gt 0 ]]; do - case "$1" in - --test) - MODE="test" - shift - ;; - --switch) - MODE="switch" - shift - ;; - --flake-attr) - FLAKE_ATTR="${2:?missing value for --flake-attr}" - shift 2 - ;; - --allow-dirty) - ALLOW_DIRTY=1 - shift - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "Unknown argument: $1" >&2 - usage >&2 - exit 2 - ;; - esac -done - -REPO_ROOT="$(git rev-parse --show-toplevel)" -cd "${REPO_ROOT}" - -if [[ ${ALLOW_DIRTY} -ne 1 ]] && [[ -n "$(git status --short)" ]]; then - echo "Refusing to deploy from a dirty checkout. Commit first, or pass --allow-dirty for incident-only work." >&2 - exit 1 -fi - -FORGE_HOST="${BURROW_FORGE_HOST:-root@git.burrow.net}" -FORGE_SSH_KEY="${BURROW_FORGE_SSH_KEY:-}" - -if [[ -z "${FORGE_SSH_KEY}" ]]; then - if [[ -f "${REPO_ROOT}/intake/agent_at_burrow_net_ed25519" ]]; then - FORGE_SSH_KEY="${REPO_ROOT}/intake/agent_at_burrow_net_ed25519" - else - FORGE_SSH_KEY="${HOME}/.ssh/agent_at_burrow_net_ed25519" - fi -fi - -if [[ ! -f "${FORGE_SSH_KEY}" ]]; then - echo "Forge SSH key not found at ${FORGE_SSH_KEY}." >&2 - echo "Set BURROW_FORGE_SSH_KEY or place the agent key in intake/." >&2 - exit 1 -fi - -FORGE_KNOWN_HOSTS_FILE="${BURROW_FORGE_KNOWN_HOSTS_FILE:-${HOME}/.cache/burrow/forge-known_hosts}" -mkdir -p "$(dirname "${FORGE_KNOWN_HOSTS_FILE}")" - -export NIX_SSHOPTS="-i ${FORGE_SSH_KEY} -o IdentitiesOnly=yes -o UserKnownHostsFile=${FORGE_KNOWN_HOSTS_FILE} -o StrictHostKeyChecking=accept-new" -flake_ref="$(burrow_prepare_flake_ref "${REPO_ROOT}")" - -nix --extra-experimental-features "nix-command flakes" shell nixpkgs#nixos-rebuild -c \ - nixos-rebuild "${MODE}" \ - --flake "${flake_ref}#${FLAKE_ATTR}" \ - --build-host "${FORGE_HOST}" \ - --target-host "${FORGE_HOST}" diff --git a/Scripts/hcloud-upload-nixos-image.sh b/Scripts/hcloud-upload-nixos-image.sh deleted file mode 100755 index 2590519..0000000 --- a/Scripts/hcloud-upload-nixos-image.sh +++ /dev/null @@ -1,327 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" - -# shellcheck source=Scripts/_burrow-flake.sh -source "${SCRIPT_DIR}/_burrow-flake.sh" - -DEFAULT_CONFIG="burrow-forge" -DEFAULT_FLAKE="." -DEFAULT_LOCATION="hel1" -DEFAULT_ARCHITECTURE="x86" -DEFAULT_TOKEN_FILE="${REPO_ROOT}/intake/hetzner-api-token.txt" - -CONFIG="${HCLOUD_IMAGE_CONFIG:-${DEFAULT_CONFIG}}" -FLAKE="${HCLOUD_IMAGE_FLAKE:-${DEFAULT_FLAKE}}" -LOCATION="${HCLOUD_IMAGE_LOCATION:-${DEFAULT_LOCATION}}" -ARCHITECTURE="${HCLOUD_IMAGE_ARCHITECTURE:-${DEFAULT_ARCHITECTURE}}" -TOKEN_FILE="${HCLOUD_TOKEN_FILE:-${DEFAULT_TOKEN_FILE}}" -DESCRIPTION="${HCLOUD_IMAGE_DESCRIPTION:-}" -UPLOAD_SERVER_TYPE="${HCLOUD_IMAGE_UPLOAD_SERVER_TYPE:-}" -UPLOAD_VERBOSE="${HCLOUD_IMAGE_UPLOAD_VERBOSE:-0}" -ARTIFACT_PATH_INPUT="" -OUTPUT_HASH="" -NO_UPDATE=0 -BUILDER_SPEC="${HCLOUD_IMAGE_BUILDER_SPEC:-}" -EXTRA_LABELS=() -NIX_BUILD_FLAGS=() -BURROW_FLAKE_TMPDIRS=() -LOCAL_STORE_DIR="" - -usage() { - cat <<'EOF' -Usage: Scripts/hcloud-upload-nixos-image.sh [options] - -Build a raw Burrow NixOS image and upload it into Hetzner Cloud as a snapshot. - -Options: - --config images.-raw output to build (default: burrow-forge) - --flake Flake path to build from (default: .) - --location Hetzner location for the temporary upload server (default: hel1) - --architecture CPU architecture of the image (default: x86) - --server-type Hetzner server type for the temporary upload server - --token-file Hetzner API token file (default: intake/hetzner-api-token.txt) - --artifact-path Prebuilt raw image artifact to upload directly - --output-hash Stable hash label for --artifact-path uploads - --builder-spec Complete builders string passed to nix build - --description Description for the resulting snapshot - --upload-verbose Pass -v N times to hcloud-upload-image - --label key=value Extra Hetzner image label (repeatable) - --nix-flag Extra argument passed to nix build (repeatable) - --no-update Reuse an existing snapshot with the same config/output hash - -h, --help Show this help text -EOF -} - -while [[ $# -gt 0 ]]; do - case "$1" in - --config) - CONFIG="${2:?missing value for --config}" - shift 2 - ;; - --flake) - FLAKE="${2:?missing value for --flake}" - shift 2 - ;; - --location) - LOCATION="${2:?missing value for --location}" - shift 2 - ;; - --architecture) - ARCHITECTURE="${2:?missing value for --architecture}" - shift 2 - ;; - --server-type) - UPLOAD_SERVER_TYPE="${2:?missing value for --server-type}" - shift 2 - ;; - --token-file) - TOKEN_FILE="${2:?missing value for --token-file}" - shift 2 - ;; - --artifact-path) - ARTIFACT_PATH_INPUT="${2:?missing value for --artifact-path}" - shift 2 - ;; - --output-hash) - OUTPUT_HASH="${2:?missing value for --output-hash}" - shift 2 - ;; - --builder-spec) - BUILDER_SPEC="${2:?missing value for --builder-spec}" - shift 2 - ;; - --description) - DESCRIPTION="${2:?missing value for --description}" - shift 2 - ;; - --upload-verbose) - UPLOAD_VERBOSE="${2:?missing value for --upload-verbose}" - shift 2 - ;; - --label) - EXTRA_LABELS+=("${2:?missing value for --label}") - shift 2 - ;; - --nix-flag) - NIX_BUILD_FLAGS+=("${2:?missing value for --nix-flag}") - shift 2 - ;; - --no-update) - NO_UPDATE=1 - shift - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "unknown option: $1" >&2 - usage >&2 - exit 64 - ;; - esac -done - -cleanup() { - burrow_cleanup_flake_tmpdirs - if [[ -n "${LOCAL_STORE_DIR}" && -d "${LOCAL_STORE_DIR}" ]]; then - rm -rf "${LOCAL_STORE_DIR}" >/dev/null 2>&1 || true - fi -} -trap cleanup EXIT - -burrow_require_cmd nix -burrow_require_cmd curl -burrow_require_cmd python3 -burrow_require_cmd rsync - -if [[ ! -f "${TOKEN_FILE}" ]]; then - echo "Hetzner API token file not found: ${TOKEN_FILE}" >&2 - exit 1 -fi - -HCLOUD_TOKEN="$(tr -d '\r\n' < "${TOKEN_FILE}")" -if [[ -z "${HCLOUD_TOKEN}" ]]; then - echo "Hetzner API token file is empty: ${TOKEN_FILE}" >&2 - exit 1 -fi - -flake_ref="$(burrow_prepare_flake_ref "${FLAKE}")" - -if [[ -z "${DESCRIPTION}" ]]; then - DESCRIPTION="Burrow ${CONFIG} $(date -u +%Y-%m-%dT%H:%M:%SZ)" -fi - -printf 'Building raw image for %s from %s\n' "${CONFIG}" "${flake_ref}" >&2 - -if [[ -z "${ARTIFACT_PATH_INPUT}" && -n "${BUILDER_SPEC}" && -z "${NIX_BUILD_STORE:-}" ]]; then - mkdir -p "${HOME}/.cache/burrow" - LOCAL_STORE_DIR="$(mktemp -d "${HOME}/.cache/burrow/local-store-XXXXXX")" -fi - -artifact_path="" -compression="" -output_hash="${OUTPUT_HASH}" -if [[ -n "${ARTIFACT_PATH_INPUT}" ]]; then - artifact_path="${ARTIFACT_PATH_INPUT}" - if [[ ! -f "${artifact_path}" ]]; then - echo "artifact path does not exist: ${artifact_path}" >&2 - exit 1 - fi - compression="$(burrow_detect_compression "${artifact_path}")" - if [[ -z "${output_hash}" ]]; then - if command -v sha256sum >/dev/null 2>&1; then - output_hash="$(sha256sum "${artifact_path}" | awk '{print $1}')" - else - output_hash="$(shasum -a 256 "${artifact_path}" | awk '{print $1}')" - fi - fi -else - nix_build_cmd=( - nix - --extra-experimental-features - "nix-command flakes" - build - "${flake_ref}#images.${CONFIG}-raw" - --no-link - --print-out-paths - ) - - if [[ -n "${BUILDER_SPEC}" ]]; then - nix_build_cmd+=(--builders "${BUILDER_SPEC}") - fi - if [[ -n "${NIX_BUILD_STORE:-}" ]]; then - nix_build_cmd+=(--store "${NIX_BUILD_STORE}") - elif [[ -n "${LOCAL_STORE_DIR}" ]]; then - nix_build_cmd+=(--store "${LOCAL_STORE_DIR}") - fi - - if [[ "${#NIX_BUILD_FLAGS[@]}" -gt 0 ]]; then - nix_build_cmd+=("${NIX_BUILD_FLAGS[@]}") - fi - - build_output="" - if ! build_output="$("${nix_build_cmd[@]}" 2>&1)"; then - printf '%s\n' "${build_output}" >&2 - exit 1 - fi - - store_path="$(printf '%s\n' "${build_output}" | tail -n1)" - if [[ -z "${store_path}" ]]; then - echo "nix build did not return a store path" >&2 - printf '%s\n' "${build_output}" >&2 - exit 1 - fi - - artifact_path="$(burrow_resolve_image_artifact "${store_path}")" - compression="$(burrow_detect_compression "${artifact_path}")" - output_hash="$(basename "${store_path}")" - output_hash="${output_hash%%-*}" -fi - -label_args=( - "burrow.nixos-config=${CONFIG}" - "burrow.nixos-output-hash=${output_hash}" -) -if [[ "${#EXTRA_LABELS[@]}" -gt 0 ]]; then - label_args+=("${EXTRA_LABELS[@]}") -fi -label_csv="$(IFS=,; printf '%s' "${label_args[*]}")" - -find_existing_image() { - HCLOUD_TOKEN="${HCLOUD_TOKEN}" \ - BURROW_LABEL_SELECTOR="burrow.nixos-config=${CONFIG},burrow.nixos-output-hash=${output_hash}" \ - python3 - <<'PY' -import json -import os -import sys -import urllib.parse -import urllib.request - -selector = urllib.parse.quote(os.environ["BURROW_LABEL_SELECTOR"], safe=",=") -req = urllib.request.Request( - f"https://api.hetzner.cloud/v1/images?type=snapshot&label_selector={selector}", - headers={"Authorization": f"Bearer {os.environ['HCLOUD_TOKEN']}"}, -) -with urllib.request.urlopen(req, timeout=30) as resp: - data = json.load(resp) - -images = sorted(data.get("images", []), key=lambda item: item.get("created") or "") -if images: - print(images[-1]["id"]) -PY -} - -if [[ "${NO_UPDATE}" -eq 1 ]]; then - existing_id="$(find_existing_image || true)" - if [[ -n "${existing_id}" ]]; then - printf 'Reusing existing Hetzner snapshot %s for %s\n' "${existing_id}" "${CONFIG}" >&2 - printf '%s\n' "${existing_id}" - exit 0 - fi -fi - -uploader_bin="${HCLOUD_UPLOAD_IMAGE_BIN:-}" -if [[ -z "${uploader_bin}" ]]; then - uploader_build_output="$( - nix --extra-experimental-features "nix-command flakes" build \ - "${flake_ref}#hcloud-upload-image" \ - --no-link \ - --print-out-paths 2>&1 - )" || { - printf '%s\n' "${uploader_build_output}" >&2 - exit 1 - } - uploader_bin="$(printf '%s\n' "${uploader_build_output}" | tail -n1)/bin/hcloud-upload-image" -fi - -if [[ ! -x "${uploader_bin}" ]]; then - echo "unable to resolve an executable hcloud-upload-image binary; set HCLOUD_UPLOAD_IMAGE_BIN explicitly" >&2 - exit 1 -fi - -upload_cmd=( - "${uploader_bin}" -) -if [[ "${UPLOAD_VERBOSE}" =~ ^[0-9]+$ ]] && [[ "${UPLOAD_VERBOSE}" -gt 0 ]]; then - for _ in $(seq 1 "${UPLOAD_VERBOSE}"); do - upload_cmd+=(-v) - done -fi -upload_cmd+=( - upload - --image-path "${artifact_path}" - --location "${LOCATION}" - --description "${DESCRIPTION}" - --labels "${label_csv}" -) -if [[ -n "${UPLOAD_SERVER_TYPE}" ]]; then - upload_cmd+=(--server-type "${UPLOAD_SERVER_TYPE}") -else - upload_cmd+=(--architecture "${ARCHITECTURE}") -fi -if [[ -n "${compression}" ]]; then - upload_cmd+=(--compression "${compression}") -fi - -printf 'Uploading %s to Hetzner Cloud via %s\n' "${artifact_path}" "${uploader_bin}" >&2 -HCLOUD_TOKEN="${HCLOUD_TOKEN}" "${upload_cmd[@]}" >&2 - -image_id="" -for _ in $(seq 1 24); do - image_id="$(find_existing_image || true)" - if [[ -n "${image_id}" ]]; then - break - fi - sleep 5 -done - -if [[ -z "${image_id}" ]]; then - echo "failed to locate uploaded Hetzner snapshot after upload completed" >&2 - exit 1 -fi - -printf '%s\n' "${image_id}" diff --git a/Scripts/hetzner-forge.sh b/Scripts/hetzner-forge.sh deleted file mode 100755 index cfce7eb..0000000 --- a/Scripts/hetzner-forge.sh +++ /dev/null @@ -1,284 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" - -usage() { - cat <<'EOF' -Usage: Scripts/hetzner-forge.sh [show|create|delete|recreate|build-image|create-from-image|recreate-from-image] [options] - -Manage the Burrow forge server and its Hetzner snapshot lifecycle. - -Defaults: - action: show - server-name: burrow-forge - server-type: ccx23 - location: hel1 - image: ubuntu-24.04 - ssh keys: contact@burrow.net,agent@burrow.net - -Options: - --server-name Server name to manage. - --server-type Hetzner server type. - --location Hetzner location. - --image Image used at create time. - --config Burrow image config name for snapshot lookup/build (default: burrow-forge). - --ssh-key SSH key name to attach. Repeatable. - --token-file Hetzner API token file. - --flake Flake path used by image-build actions (default: .) - --upload-location Hetzner location used for image upload (default: same as --location) - --yes Required for delete and recreate. - -h, --help Show this help text. - -Environment: - HCLOUD_TOKEN_FILE Defaults to intake/hetzner-api-token.txt -EOF -} - -ACTION="show" -SERVER_NAME="burrow-forge" -SERVER_TYPE="ccx23" -LOCATION="hel1" -IMAGE="ubuntu-24.04" -CONFIG="burrow-forge" -FLAKE="." -UPLOAD_LOCATION="" -TOKEN_FILE="${HCLOUD_TOKEN_FILE:-intake/hetzner-api-token.txt}" -YES=0 -SSH_KEYS=("contact@burrow.net" "agent@burrow.net") - -if [[ $# -gt 0 ]]; then - case "$1" in - show|create|delete|recreate|build-image|create-from-image|recreate-from-image) - ACTION="$1" - shift - ;; - esac -fi - -while [[ $# -gt 0 ]]; do - case "$1" in - --server-name) - SERVER_NAME="${2:?missing value for --server-name}" - shift 2 - ;; - --server-type) - SERVER_TYPE="${2:?missing value for --server-type}" - shift 2 - ;; - --location) - LOCATION="${2:?missing value for --location}" - shift 2 - ;; - --image) - IMAGE="${2:?missing value for --image}" - shift 2 - ;; - --config) - CONFIG="${2:?missing value for --config}" - shift 2 - ;; - --ssh-key) - SSH_KEYS+=("${2:?missing value for --ssh-key}") - shift 2 - ;; - --token-file) - TOKEN_FILE="${2:?missing value for --token-file}" - shift 2 - ;; - --flake) - FLAKE="${2:?missing value for --flake}" - shift 2 - ;; - --upload-location) - UPLOAD_LOCATION="${2:?missing value for --upload-location}" - shift 2 - ;; - --yes) - YES=1 - shift - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "Unknown argument: $1" >&2 - usage >&2 - exit 2 - ;; - esac -done - -if [[ ! -f "${TOKEN_FILE}" ]]; then - echo "Hetzner API token file not found: ${TOKEN_FILE}" >&2 - exit 1 -fi - -if [[ -z "${UPLOAD_LOCATION}" ]]; then - UPLOAD_LOCATION="${LOCATION}" -fi - -if [[ "${ACTION}" == "delete" || "${ACTION}" == "recreate" || "${ACTION}" == "recreate-from-image" ]] && [[ ${YES} -ne 1 ]]; then - echo "--yes is required for ${ACTION}" >&2 - exit 1 -fi - -latest_snapshot_id() { - HCLOUD_TOKEN="$(tr -d '\r\n' < "${TOKEN_FILE}")" \ - BURROW_CONFIG="${CONFIG}" \ - python3 - <<'PY' -import json -import os -import urllib.parse -import urllib.request - -selector = urllib.parse.quote(f"burrow.nixos-config={os.environ['BURROW_CONFIG']}", safe=",=") -req = urllib.request.Request( - f"https://api.hetzner.cloud/v1/images?type=snapshot&label_selector={selector}", - headers={"Authorization": f"Bearer {os.environ['HCLOUD_TOKEN']}"}, -) -with urllib.request.urlopen(req, timeout=30) as resp: - data = json.load(resp) -images = sorted(data.get("images", []), key=lambda item: item.get("created") or "") -if images: - print(images[-1]["id"]) -PY -} - -if [[ "${ACTION}" == "build-image" ]]; then - exec "${SCRIPT_DIR}/nsc-build-and-upload-image.sh" \ - --config "${CONFIG}" \ - --flake "${FLAKE}" \ - --location "${UPLOAD_LOCATION}" \ - --upload-server-type "${SERVER_TYPE}" \ - --token-file "${TOKEN_FILE}" -fi - -if [[ "${ACTION}" == "create-from-image" || "${ACTION}" == "recreate-from-image" ]]; then - if [[ "${IMAGE}" == "ubuntu-24.04" ]]; then - IMAGE="$(latest_snapshot_id)" - fi - if [[ -z "${IMAGE}" ]]; then - echo "No Burrow snapshot found for config ${CONFIG}. Run build-image first." >&2 - exit 1 - fi - if [[ "${ACTION}" == "create-from-image" ]]; then - ACTION="create" - else - ACTION="recreate" - fi -fi - -ssh_keys_csv="" -for key in "${SSH_KEYS[@]}"; do - if [[ -n "${ssh_keys_csv}" ]]; then - ssh_keys_csv+="," - fi - ssh_keys_csv+="${key}" -done - -export BURROW_HCLOUD_ACTION="${ACTION}" -export BURROW_HCLOUD_SERVER_NAME="${SERVER_NAME}" -export BURROW_HCLOUD_SERVER_TYPE="${SERVER_TYPE}" -export BURROW_HCLOUD_LOCATION="${LOCATION}" -export BURROW_HCLOUD_IMAGE="${IMAGE}" -export BURROW_HCLOUD_TOKEN_FILE="${TOKEN_FILE}" -export BURROW_HCLOUD_SSH_KEYS="${ssh_keys_csv}" - -python3 - <<'PY' -import json -import os -import sys -from pathlib import Path - -import requests - -base = "https://api.hetzner.cloud/v1" -action = os.environ["BURROW_HCLOUD_ACTION"] -server_name = os.environ["BURROW_HCLOUD_SERVER_NAME"] -server_type = os.environ["BURROW_HCLOUD_SERVER_TYPE"] -location = os.environ["BURROW_HCLOUD_LOCATION"] -image = os.environ["BURROW_HCLOUD_IMAGE"] -token = Path(os.environ["BURROW_HCLOUD_TOKEN_FILE"]).read_text(encoding="utf-8").strip() -ssh_keys = [key for key in os.environ["BURROW_HCLOUD_SSH_KEYS"].split(",") if key] - -session = requests.Session() -session.headers.update({"Authorization": f"Bearer {token}", "Content-Type": "application/json"}) - - -def request(method: str, path: str, **kwargs) -> requests.Response: - response = session.request(method, f"{base}{path}", timeout=30, **kwargs) - response.raise_for_status() - return response - - -def find_server(): - response = request("GET", "/servers", params={"name": server_name}) - data = response.json() - for server in data.get("servers", []): - if server.get("name") == server_name: - return server - return None - - -def summarize(server): - ipv4 = (((server.get("public_net") or {}).get("ipv4")) or {}).get("ip") - image_name = ((server.get("image") or {}).get("name")) or "" - summary = { - "id": server.get("id"), - "name": server.get("name"), - "status": server.get("status"), - "server_type": ((server.get("server_type") or {}).get("name")), - "location": ((server.get("location") or {}).get("name")), - "image": image_name, - "ipv4": ipv4, - "created": server.get("created"), - } - print(json.dumps(summary, indent=2)) - - -server = find_server() - -if action == "show": - if server is None: - print(json.dumps({"name": server_name, "present": False}, indent=2)) - else: - summarize(server) - sys.exit(0) - -if action == "delete": - if server is None: - print(json.dumps({"name": server_name, "deleted": False, "reason": "not found"}, indent=2)) - sys.exit(0) - request("DELETE", f"/servers/{server['id']}") - print(json.dumps({"name": server_name, "deleted": True, "id": server["id"]}, indent=2)) - sys.exit(0) - -if action == "recreate" and server is not None: - request("DELETE", f"/servers/{server['id']}") - server = None - -if action in {"create", "recreate"}: - if server is not None: - summarize(server) - sys.exit(0) - - payload = { - "name": server_name, - "server_type": server_type, - "location": location, - "image": image, - "ssh_keys": ssh_keys, - "labels": { - "project": "burrow", - "role": "forge", - }, - } - response = request("POST", "/servers", json=payload) - created = response.json()["server"] - summarize(created) - sys.exit(0) - -raise SystemExit(f"unsupported action: {action}") -PY diff --git a/Scripts/nsc-build-and-upload-image.sh b/Scripts/nsc-build-and-upload-image.sh deleted file mode 100755 index 6fb99a9..0000000 --- a/Scripts/nsc-build-and-upload-image.sh +++ /dev/null @@ -1,542 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" - -# shellcheck source=Scripts/_burrow-flake.sh -source "${SCRIPT_DIR}/_burrow-flake.sh" - -CONFIG="${HCLOUD_IMAGE_CONFIG:-burrow-forge}" -FLAKE="${HCLOUD_IMAGE_FLAKE:-.}" -LOCATION="${HCLOUD_IMAGE_LOCATION:-hel1}" -TOKEN_FILE="${HCLOUD_TOKEN_FILE:-${REPO_ROOT}/intake/hetzner-api-token.txt}" -NSC_SSH_HOST="${NSC_SSH_HOST:-ssh.ord2.namespace.so}" -NSC_MACHINE_TYPE="${NSC_MACHINE_TYPE:-linux/amd64:32x64}" -NSC_BUILDER_DURATION="${NSC_BUILDER_DURATION:-4h}" -NSC_BUILDER_JOBS="${NSC_BUILDER_JOBS:-32}" -NSC_BUILDER_FEATURES="${NSC_BUILDER_FEATURES:-kvm,big-parallel}" -NSC_BIN="${NSC_BIN:-}" -REMOTE_COMPRESSION="${HCLOUD_IMAGE_REMOTE_COMPRESSION:-auto}" -UPLOAD_SERVER_TYPE="${HCLOUD_IMAGE_UPLOAD_SERVER_TYPE:-}" -KEEP_TMPDIR="${HCLOUD_IMAGE_KEEP_TMPDIR:-0}" -NO_UPDATE=0 -NIX_BUILD_FLAGS=() -EXTRA_LABELS=() -BURROW_FLAKE_TMPDIRS=() -BUILDER_ID="" - -usage() { - cat <<'EOF' -Usage: Scripts/nsc-build-and-upload-image.sh [options] - -Create a temporary Namespace Linux builder, build the Burrow raw image on it, -and upload the resulting artifact to Hetzner Cloud. - -Options: - --config images.-raw output to build (default: burrow-forge) - --flake Flake path to build from (default: .) - --location Hetzner upload location (default: hel1) - --token-file Hetzner API token file (default: intake/hetzner-api-token.txt) - --machine-type Namespace machine type (default: linux/amd64:32x64) - --ssh-host Namespace SSH endpoint (default: ssh.ord2.namespace.so) - --duration Namespace builder lifetime (default: 4h) - --builder-jobs Nix builder job count advertised to the local client - --builder-features Comma-separated Nix system features (default: "kvm,big-parallel") - --remote-compression - Compress raw/image artifacts on the Namespace builder - before copy-back. Modes: auto, none, xz, zstd - (default: auto) - --upload-server-type - Hetzner server type for the temporary upload host - --label key=value Extra Hetzner snapshot label (repeatable) - --nix-flag Extra argument passed to nix build (repeatable) - --no-update Reuse an existing snapshot with the same config/output hash - -h, --help Show this help text -EOF -} - -while [[ $# -gt 0 ]]; do - case "$1" in - --config) - CONFIG="${2:?missing value for --config}" - shift 2 - ;; - --flake) - FLAKE="${2:?missing value for --flake}" - shift 2 - ;; - --location) - LOCATION="${2:?missing value for --location}" - shift 2 - ;; - --token-file) - TOKEN_FILE="${2:?missing value for --token-file}" - shift 2 - ;; - --machine-type) - NSC_MACHINE_TYPE="${2:?missing value for --machine-type}" - shift 2 - ;; - --ssh-host) - NSC_SSH_HOST="${2:?missing value for --ssh-host}" - shift 2 - ;; - --duration) - NSC_BUILDER_DURATION="${2:?missing value for --duration}" - shift 2 - ;; - --builder-jobs) - NSC_BUILDER_JOBS="${2:?missing value for --builder-jobs}" - shift 2 - ;; - --builder-features) - NSC_BUILDER_FEATURES="${2:?missing value for --builder-features}" - shift 2 - ;; - --remote-compression) - REMOTE_COMPRESSION="${2:?missing value for --remote-compression}" - shift 2 - ;; - --upload-server-type) - UPLOAD_SERVER_TYPE="${2:?missing value for --upload-server-type}" - shift 2 - ;; - --label) - EXTRA_LABELS+=("${2:?missing value for --label}") - shift 2 - ;; - --nix-flag) - NIX_BUILD_FLAGS+=("${2:?missing value for --nix-flag}") - shift 2 - ;; - --no-update) - NO_UPDATE=1 - shift - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "unknown option: $1" >&2 - usage >&2 - exit 64 - ;; - esac -done - -cleanup() { - if [[ -n "${BUILDER_ID}" && -n "${NSC_BIN}" ]]; then - "${NSC_BIN}" destroy "${BUILDER_ID}" --force >/dev/null 2>&1 || true - fi - burrow_cleanup_flake_tmpdirs - if [[ "${KEEP_TMPDIR}" != "1" && -n "${TMPDIR_BURROW_NSC:-}" && -d "${TMPDIR_BURROW_NSC}" ]]; then - rm -rf "${TMPDIR_BURROW_NSC}" - fi -} -trap cleanup EXIT - -burrow_require_cmd nix -burrow_require_cmd curl -burrow_require_cmd python3 -burrow_require_cmd ssh -burrow_require_cmd ssh-keygen -burrow_require_cmd ssh-keyscan -burrow_require_cmd tar - -flake_ref="$(burrow_prepare_flake_ref "${FLAKE}")" - -if [[ -z "${NSC_BIN}" ]]; then - nsc_build_output="$( - nix --extra-experimental-features "nix-command flakes" build \ - "${flake_ref}#nsc" \ - --no-link \ - --print-out-paths 2>&1 - )" || { - printf '%s\n' "${nsc_build_output}" >&2 - exit 1 - } - NSC_BIN="$(printf '%s\n' "${nsc_build_output}" | tail -n1)/bin/nsc" -fi - -if [[ ! -x "${NSC_BIN}" ]]; then - echo "unable to resolve an executable nsc binary; set NSC_BIN explicitly" >&2 - exit 1 -fi - -if [[ -n "${NSC_SESSION:-}" && ! -f "${HOME}/.ns/session" ]]; then - mkdir -p "${HOME}/.ns" - printf '%s\n' "${NSC_SESSION}" > "${HOME}/.ns/session" - chmod 600 "${HOME}/.ns/session" -fi - -"${NSC_BIN}" auth check-login --duration 20m >/dev/null -"${NSC_BIN}" version >/dev/null || true - -TMPDIR_BURROW_NSC="$(mktemp -d "${HOME}/.cache/burrow/nsc-XXXXXX")" -ssh_key="${TMPDIR_BURROW_NSC}/builder" -known_hosts="${TMPDIR_BURROW_NSC}/known_hosts" -id_file="${TMPDIR_BURROW_NSC}/builder.id" - -ssh-keygen -q -t ed25519 -N "" -f "${ssh_key}" -ssh-keyscan -H "${NSC_SSH_HOST}" > "${known_hosts}" - -ssh_base=( - ssh - -i "${ssh_key}" - -o UserKnownHostsFile="${known_hosts}" - -o StrictHostKeyChecking=yes -) - -wait_for_ssh() { - local instance_id="$1" - for _ in $(seq 1 30); do - if "${ssh_base[@]}" -q "${instance_id}@${NSC_SSH_HOST}" true >/dev/null 2>&1; then - return 0 - fi - sleep 5 - done - return 1 -} - -configure_builder() { - local instance_id="$1" - "${ssh_base[@]}" "${instance_id}@${NSC_SSH_HOST}" <<'EOF' -set -euo pipefail - -if ! command -v nix >/dev/null 2>&1; then - curl -fsSL https://install.determinate.systems/nix | sh -s -- install linux --determinate --init none --no-confirm -fi - -if [ -e /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh ]; then - . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh -fi - -mkdir -p /etc/nix -cat </etc/nix/nix.conf -build-users-group = -trusted-users = root $USER -auto-optimise-store = true -substituters = https://cache.nixos.org -builders-use-substitutes = true -CFG - -mkdir -p /nix/var/nix/daemon-socket - -if ! pgrep -x nix-daemon >/dev/null 2>&1; then - nohup nix-daemon >/dev/null 2>&1 /dev/null 2>&1; then - nohup nix-daemon >/dev/null 2>&1 &2 -exit 1 -EOF -} - -printf 'Creating temporary Namespace builder (%s)\n' "${NSC_MACHINE_TYPE}" >&2 -"${NSC_BIN}" create \ - --bare \ - --machine_type "${NSC_MACHINE_TYPE}" \ - --ssh_key "${ssh_key}.pub" \ - --duration "${NSC_BUILDER_DURATION}" \ - --label "burrow=true" \ - --label "purpose=hetzner-image-build" \ - --output_to "${id_file}" \ - >/dev/null - -BUILDER_ID="$(tr -d '\r\n' < "${id_file}")" -if [[ -z "${BUILDER_ID}" ]]; then - echo "nsc create did not return a builder id" >&2 - exit 1 -fi - -printf 'Waiting for Namespace builder %s\n' "${BUILDER_ID}" >&2 -wait_for_ssh "${BUILDER_ID}" -configure_builder "${BUILDER_ID}" >&2 - -remote_root="burrow-image-build-${BUILDER_ID}" -remote_flake_path="./${remote_root}" -local_flake_dir="${flake_ref#path:}" -remote_build_stdout="/tmp/burrow-image-build-${BUILDER_ID}.stdout" -remote_build_stderr="/tmp/burrow-image-build-${BUILDER_ID}.stderr" - -printf 'Syncing flake to Namespace builder %s\n' "${BUILDER_ID}" >&2 -tar -C "${local_flake_dir}" -cf - . \ - | "${ssh_base[@]}" "${BUILDER_ID}@${NSC_SSH_HOST}" "rm -rf '${remote_root}' && mkdir -p '${remote_root}' && tar -C '${remote_root}' -xf -" - -run_remote_build() { - local remote_cmd=( - env - "CONFIG=${CONFIG}" - "REMOTE_FLAKE_PATH=${remote_flake_path}" - "REMOTE_BUILD_STDOUT=${remote_build_stdout}" - "REMOTE_BUILD_STDERR=${remote_build_stderr}" - bash - -s - -- - ) - if [[ "${#NIX_BUILD_FLAGS[@]}" -gt 0 ]]; then - remote_cmd+=("${NIX_BUILD_FLAGS[@]}") - fi - - "${ssh_base[@]}" "${BUILDER_ID}@${NSC_SSH_HOST}" "${remote_cmd[@]}" <<'EOF' -set -euo pipefail - -config="${CONFIG}" -remote_flake_path="${REMOTE_FLAKE_PATH}" -remote_build_stdout="${REMOTE_BUILD_STDOUT}" -remote_build_stderr="${REMOTE_BUILD_STDERR}" -nix_build_cmd=( - nix - --extra-experimental-features - "nix-command flakes" - build - "path:${remote_flake_path}#images.${config}-raw" - --no-link - --print-out-paths -) -if [[ "$#" -gt 0 ]]; then - nix_build_cmd+=("$@") -fi - -rm -f "${remote_build_stdout}" "${remote_build_stderr}" -if ! "${nix_build_cmd[@]}" >"${remote_build_stdout}" 2>"${remote_build_stderr}"; then - cat "${remote_build_stderr}" >&2 - exit 1 -fi -EOF -} - -resolve_remote_store_path() { - "${ssh_base[@]}" "${BUILDER_ID}@${NSC_SSH_HOST}" \ - env "REMOTE_BUILD_STDOUT=${remote_build_stdout}" "REMOTE_BUILD_STDERR=${remote_build_stderr}" bash -s <<'EOF' -set -euo pipefail - -remote_build_stdout="${REMOTE_BUILD_STDOUT}" -remote_build_stderr="${REMOTE_BUILD_STDERR}" - -if [[ ! -s "${remote_build_stdout}" ]]; then - echo "remote build stdout file is missing or empty: ${remote_build_stdout}" >&2 - if [[ -s "${remote_build_stderr}" ]]; then - cat "${remote_build_stderr}" >&2 - fi - exit 1 -fi - -tail -n1 "${remote_build_stdout}" -EOF -} - -resolve_remote_artifact_path() { - local store_path="$1" - "${ssh_base[@]}" "${BUILDER_ID}@${NSC_SSH_HOST}" \ - env "REMOTE_STORE_PATH=${store_path}" bash -s <<'EOF' -set -euo pipefail - -store_path="${REMOTE_STORE_PATH}" -artifact_path="${store_path}" -if [[ -d "${artifact_path}" ]]; then - artifact_path="$(find "${artifact_path}" -type f \( -name '*.raw' -o -name '*.raw.*' -o -name '*.img' -o -name '*.img.*' \) | sort | head -n1)" -fi -if [[ -z "${artifact_path}" || ! -f "${artifact_path}" ]]; then - echo "unable to locate image artifact under ${store_path}" >&2 - exit 1 -fi - -printf '%s\n' "${artifact_path}" -EOF -} - -plan_remote_artifact_transfer() { - local artifact_path="$1" - local compression_mode="$2" - - "${ssh_base[@]}" "${BUILDER_ID}@${NSC_SSH_HOST}" \ - env "REMOTE_ARTIFACT_PATH=${artifact_path}" "REMOTE_COMPRESSION=${compression_mode}" bash -s <<'EOF' -set -euo pipefail - -artifact_path="${REMOTE_ARTIFACT_PATH}" -compression_mode="${REMOTE_COMPRESSION}" - -case "${artifact_path}" in - *.bz2) - printf '%s\tbz2\n' "$(basename "${artifact_path}")" - exit 0 - ;; - *.xz) - printf '%s\txz\n' "$(basename "${artifact_path}")" - exit 0 - ;; - *.zst|*.zstd) - printf '%s\tzstd\n' "$(basename "${artifact_path}")" - exit 0 - ;; -esac - -select_compression() { - case "${compression_mode}" in - auto) - if command -v zstd >/dev/null 2>&1; then - printf 'zstd\n' - return 0 - fi - if command -v xz >/dev/null 2>&1; then - printf 'xz\n' - return 0 - fi - printf 'none\n' - ;; - none|xz|zstd) - printf '%s\n' "${compression_mode}" - ;; - *) - echo "unsupported remote compression mode: ${compression_mode}" >&2 - exit 1 - ;; - esac -} - -mode="$(select_compression)" -case "${mode}" in - none) - printf '%s\tnone\n' "$(basename "${artifact_path}")" - ;; - zstd) - printf '%s.zst\tzstd\n' "$(basename "${artifact_path}")" - ;; - xz) - printf '%s.xz\txz\n' "$(basename "${artifact_path}")" - ;; -esac -EOF -} - -stream_remote_artifact() { - local artifact_path="$1" - local compression_mode="$2" - local destination="$3" - - "${ssh_base[@]}" "${BUILDER_ID}@${NSC_SSH_HOST}" \ - env "REMOTE_ARTIFACT_PATH=${artifact_path}" "REMOTE_COMPRESSION=${compression_mode}" bash -s <<'EOF' > "${destination}" -set -euo pipefail - -artifact_path="${REMOTE_ARTIFACT_PATH}" -compression_mode="${REMOTE_COMPRESSION}" - -case "${artifact_path}" in - *.bz2|*.xz|*.zst|*.zstd) - cat "${artifact_path}" - exit 0 - ;; -esac - -select_compression() { - case "${compression_mode}" in - auto) - if command -v zstd >/dev/null 2>&1; then - printf 'zstd\n' - return 0 - fi - if command -v xz >/dev/null 2>&1; then - printf 'xz\n' - return 0 - fi - printf 'none\n' - ;; - none|xz|zstd) - printf '%s\n' "${compression_mode}" - ;; - *) - echo "unsupported remote compression mode: ${compression_mode}" >&2 - exit 1 - ;; - esac -} - -mode="$(select_compression)" -case "${mode}" in - none) - cat "${artifact_path}" - ;; - zstd) - if ! command -v zstd >/dev/null 2>&1; then - echo "zstd requested but not available on Namespace builder" >&2 - exit 1 - fi - zstd -T0 -19 -c "${artifact_path}" - ;; - xz) - if ! command -v xz >/dev/null 2>&1; then - echo "xz requested but not available on Namespace builder" >&2 - exit 1 - fi - xz -T0 -c "${artifact_path}" - ;; -esac -EOF -} - -printf 'Building raw image on Namespace builder %s\n' "${BUILDER_ID}" >&2 -run_remote_build - -remote_store_path="$(resolve_remote_store_path)" -if [[ -z "${remote_store_path}" ]]; then - echo "remote build did not return a store path" >&2 - exit 1 -fi - -remote_artifact_path="$(resolve_remote_artifact_path "${remote_store_path}")" -if [[ -z "${remote_artifact_path}" ]]; then - echo "remote build did not return an artifact path" >&2 - exit 1 -fi - -transfer_plan="$(plan_remote_artifact_transfer "${remote_artifact_path}" "${REMOTE_COMPRESSION}")" -local_artifact_name="$(printf '%s\n' "${transfer_plan}" | cut -f1)" -transfer_compression="$(printf '%s\n' "${transfer_plan}" | cut -f2)" -if [[ -z "${local_artifact_name}" || -z "${transfer_compression}" ]]; then - echo "unable to determine artifact transfer plan for ${remote_artifact_path}" >&2 - exit 1 -fi - -output_hash="$(basename "${remote_store_path}")" -output_hash="${output_hash%%-*}" -local_artifact="${TMPDIR_BURROW_NSC}/${local_artifact_name}" - -printf 'Streaming built artifact back from Namespace builder %s (%s)\n' "${BUILDER_ID}" "${transfer_compression}" >&2 -stream_remote_artifact "${remote_artifact_path}" "${REMOTE_COMPRESSION}" "${local_artifact}" - -cmd=( - "${SCRIPT_DIR}/hcloud-upload-nixos-image.sh" - --config "${CONFIG}" - --flake "${FLAKE}" - --location "${LOCATION}" - --token-file "${TOKEN_FILE}" - --artifact-path "${local_artifact}" - --output-hash "${output_hash}" -) - -if [[ -n "${UPLOAD_SERVER_TYPE}" ]]; then - cmd+=(--server-type "${UPLOAD_SERVER_TYPE}") -fi - -if [[ "${NO_UPDATE}" -eq 1 ]]; then - cmd+=(--no-update) -fi -if [[ "${#EXTRA_LABELS[@]}" -gt 0 ]]; then - for label in "${EXTRA_LABELS[@]}"; do - cmd+=(--label "${label}") - done -fi - -"${cmd[@]}" diff --git a/Scripts/provision-forgejo-nsc.sh b/Scripts/provision-forgejo-nsc.sh deleted file mode 100755 index b31de21..0000000 --- a/Scripts/provision-forgejo-nsc.sh +++ /dev/null @@ -1,237 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" - -# shellcheck source=Scripts/_burrow-flake.sh -source "${SCRIPT_DIR}/_burrow-flake.sh" - -usage() { - cat <<'EOF' -Usage: Scripts/provision-forgejo-nsc.sh [options] - -Generate Burrow forgejo-nsc runtime inputs in intake/ and optionally refresh the -Namespace token from the currently logged-in namespace account. - -Options: - --host SSH target used to mint the Forgejo PAT. - Default: root@git.burrow.net - --ssh-key SSH private key for the forge host. - Default: intake/agent_at_burrow_net_ed25519 - --nsc-bin Override the nsc binary. - --no-refresh-token Reuse intake/forgejo_nsc_token.txt if it already exists. - --token-name Forgejo PAT name prefix (default: forgejo-nsc) - --contact-user Forgejo username used for PAT creation (default: contact) - --scope-owner Forgejo org/user owner for the default NSC scope (default: burrow) - --scope-name Forgejo repository name for the default NSC scope (default: burrow) - -h, --help Show this help text. -EOF -} - -HOST="${BURROW_FORGE_HOST:-root@git.burrow.net}" -SSH_KEY="${BURROW_FORGE_SSH_KEY:-${REPO_ROOT}/intake/agent_at_burrow_net_ed25519}" -NSC_BIN="${NSC_BIN:-}" -KNOWN_HOSTS_FILE="${BURROW_FORGE_KNOWN_HOSTS_FILE:-${HOME}/.cache/burrow/forge-known_hosts}" -REFRESH_TOKEN=1 -TOKEN_NAME_PREFIX="${FORGEJO_PAT_NAME:-forgejo-nsc}" -CONTACT_USER="${FORGEJO_CONTACT_USER:-contact}" -SCOPE_OWNER="${FORGEJO_SCOPE_OWNER:-burrow}" -SCOPE_NAME="${FORGEJO_SCOPE_NAME:-burrow}" -BURROW_FLAKE_TMPDIRS=() - -cleanup() { - burrow_cleanup_flake_tmpdirs -} -trap cleanup EXIT - -while [[ $# -gt 0 ]]; do - case "$1" in - --host) - HOST="${2:?missing value for --host}" - shift 2 - ;; - --ssh-key) - SSH_KEY="${2:?missing value for --ssh-key}" - shift 2 - ;; - --nsc-bin) - NSC_BIN="${2:?missing value for --nsc-bin}" - shift 2 - ;; - --no-refresh-token) - REFRESH_TOKEN=0 - shift - ;; - --token-name) - TOKEN_NAME_PREFIX="${2:?missing value for --token-name}" - shift 2 - ;; - --contact-user) - CONTACT_USER="${2:?missing value for --contact-user}" - shift 2 - ;; - --scope-owner) - SCOPE_OWNER="${2:?missing value for --scope-owner}" - shift 2 - ;; - --scope-name) - SCOPE_NAME="${2:?missing value for --scope-name}" - shift 2 - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "unknown option: $1" >&2 - usage >&2 - exit 64 - ;; - esac -done - -mkdir -p "$(dirname "${KNOWN_HOSTS_FILE}")" - -burrow_require_cmd nix -burrow_require_cmd ssh -burrow_require_cmd python3 - -if [[ ! -f "${SSH_KEY}" ]]; then - echo "forge SSH key not found: ${SSH_KEY}" >&2 - exit 1 -fi - -mkdir -p "${REPO_ROOT}/intake" -chmod 700 "${REPO_ROOT}/intake" - -flake_ref="$(burrow_prepare_flake_ref "${REPO_ROOT}")" -if [[ -z "${NSC_BIN}" ]]; then - if command -v nsc >/dev/null 2>&1; then - NSC_BIN="$(command -v nsc)" - else - nsc_build_output="$( - nix --extra-experimental-features "nix-command flakes" build \ - "${flake_ref}#nsc" \ - --no-link \ - --print-out-paths 2>&1 - )" || { - printf '%s\n' "${nsc_build_output}" >&2 - exit 1 - } - NSC_BIN="$(printf '%s\n' "${nsc_build_output}" | tail -n1)/bin/nsc" - fi -fi - -if [[ ! -x "${NSC_BIN}" ]]; then - echo "unable to resolve an executable nsc binary; set NSC_BIN explicitly" >&2 - exit 1 -fi - -token_file="${REPO_ROOT}/intake/forgejo_nsc_token.txt" -dispatcher_out="${REPO_ROOT}/intake/forgejo_nsc_dispatcher.yaml" -autoscaler_out="${REPO_ROOT}/intake/forgejo_nsc_autoscaler.yaml" -dispatcher_src="${REPO_ROOT}/services/forgejo-nsc/deploy/dispatcher.yaml" -autoscaler_src="${REPO_ROOT}/services/forgejo-nsc/deploy/autoscaler.yaml" - -if [[ "${REFRESH_TOKEN}" -eq 1 || ! -s "${token_file}" ]]; then - "${NSC_BIN}" auth check-login --duration 20m >/dev/null - "${NSC_BIN}" auth generate-dev-token --output_to "${token_file}" >/dev/null - chmod 600 "${token_file}" -fi - -webhook_secret="$(python3 - <<'PY' -import secrets -print(secrets.token_hex(32)) -PY -)" - -token_name="${TOKEN_NAME_PREFIX}-$(date -u +%Y%m%dT%H%M%SZ)" -forgejo_pat="$( - ssh \ - -i "${SSH_KEY}" \ - -o IdentitiesOnly=yes \ - -o UserKnownHostsFile="${KNOWN_HOSTS_FILE}" \ - -o StrictHostKeyChecking=accept-new \ - "${HOST}" \ - "set -euo pipefail; forgejo_bin=\$(systemctl show -p ExecStart forgejo.service --value | sed -E 's/^\\{ path=([^ ;]+).*/\\1/'); sudo -u forgejo \"\${forgejo_bin}\" --config /var/lib/forgejo/custom/conf/app.ini --custom-path /var/lib/forgejo/custom --work-path /var/lib/forgejo admin user generate-access-token --username '${CONTACT_USER}' --scopes all --raw --token-name '${token_name}'" \ - | tr -d '\r\n' -)" - -if [[ -z "${forgejo_pat}" ]]; then - echo "failed to mint Forgejo PAT on ${HOST}" >&2 - exit 1 -fi - -ssh \ - -i "${SSH_KEY}" \ - -o IdentitiesOnly=yes \ - -o UserKnownHostsFile="${KNOWN_HOSTS_FILE}" \ - -o StrictHostKeyChecking=accept-new \ - "${HOST}" \ - 'bash -s' </tmp/forgejo-provision-org.json <&2 - cat /tmp/forgejo-provision-response.json >&2 - exit 1 - fi -fi - -repo_code="\$(api "\${base_url}/api/v1/repos/\${scope_owner}/\${scope_name}")" -if [[ "\${repo_code}" == "404" ]]; then - cat >/tmp/forgejo-provision-repo.json <&2 - cat /tmp/forgejo-provision-response.json >&2 - exit 1 - fi -fi -EOF - -FORGEJO_PAT="${forgejo_pat}" \ -WEBHOOK_SECRET="${webhook_secret}" \ -DISPATCHER_SRC="${dispatcher_src}" \ -AUTOSCALER_SRC="${autoscaler_src}" \ -DISPATCHER_OUT="${dispatcher_out}" \ -AUTOSCALER_OUT="${autoscaler_out}" \ -python3 - <<'PY' -import os -from pathlib import Path - -def render(src: str, dst: str) -> None: - text = Path(src).read_text(encoding="utf-8") - text = text.replace("PENDING-FORGEJO-PAT", os.environ["FORGEJO_PAT"]) - text = text.replace("PENDING-WEBHOOK-SECRET", os.environ["WEBHOOK_SECRET"]) - Path(dst).write_text(text, encoding="utf-8") - -render(os.environ["DISPATCHER_SRC"], os.environ["DISPATCHER_OUT"]) -render(os.environ["AUTOSCALER_SRC"], os.environ["AUTOSCALER_OUT"]) -PY - -chmod 600 "${dispatcher_out}" "${autoscaler_out}" - -echo "Rendered intake/forgejo_nsc_token.txt, intake/forgejo_nsc_dispatcher.yaml, and intake/forgejo_nsc_autoscaler.yaml." -echo "Minted Forgejo PAT ${token_name} for ${CONTACT_USER} on ${HOST}." diff --git a/Scripts/run-ios-tailnet-ui-tests.sh b/Scripts/run-ios-tailnet-ui-tests.sh deleted file mode 100755 index 5170a1e..0000000 --- a/Scripts/run-ios-tailnet-ui-tests.sh +++ /dev/null @@ -1,163 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -repo_root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -bundle_id="${BURROW_UI_TEST_APP_BUNDLE_ID:-com.hackclub.burrow}" -simulator_name="${BURROW_UI_TEST_SIMULATOR_NAME:-iPhone 17 Pro}" -simulator_os="${BURROW_UI_TEST_SIMULATOR_OS:-26.4}" -simulator_id="${BURROW_UI_TEST_SIMULATOR_ID:-}" -derived_data_path="${BURROW_UI_TEST_DERIVED_DATA_PATH:-/tmp/burrow-ui-tests-deriveddata}" -source_packages_path="${BURROW_UI_TEST_SOURCE_PACKAGES_PATH:-/tmp/burrow-ui-tests-sourcepackages}" -fallback_dir="/tmp/${bundle_id}/SimulatorFallback" -socket_path="${fallback_dir}/burrow.sock" -tailnet_state_root="/tmp/${bundle_id}/SimulatorTailnetState" -daemon_log="${BURROW_UI_TEST_DAEMON_LOG:-/tmp/burrow-ui-test-daemon.log}" -ui_test_config_path="${BURROW_UI_TEST_CONFIG_PATH:-/tmp/burrow-ui-test-config.json}" -ui_test_runner_bundle_id="${bundle_id}.uitests.xctrunner" -ui_test_email="${BURROW_UI_TEST_EMAIL:-ui-test@burrow.net}" -ui_test_username="${BURROW_UI_TEST_USERNAME:-ui-test}" -ui_test_tailnet_mode="${BURROW_UI_TEST_TAILNET_MODE:-tailscale}" -password_secret="${repo_root}/secrets/infra/authentik-ui-test-password.age" -age_identity="${BURROW_UI_TEST_AGE_IDENTITY:-${HOME}/.ssh/id_ed25519}" - -ui_test_password="${BURROW_UI_TEST_PASSWORD:-}" -if [[ -z "$ui_test_password" ]]; then - if [[ -f "$password_secret" && -f "$age_identity" ]]; then - ui_test_password="$(age -d -i "$age_identity" "$password_secret" | tr -d '\r\n')" - else - echo "error: BURROW_UI_TEST_PASSWORD is unset and ${password_secret} could not be decrypted" >&2 - exit 1 - fi -fi - -rm -rf "$fallback_dir" "$tailnet_state_root" -mkdir -p "$fallback_dir" "$tailnet_state_root" "$derived_data_path" "$source_packages_path" -rm -f "$socket_path" - -resolve_simulator_id() { - xcrun simctl list devices available -j | python3 -c ' -import json -import os -import sys - -target_name = sys.argv[1] -target_os = sys.argv[2] -target_runtime = "com.apple.CoreSimulator.SimRuntime.iOS-" + target_os.replace(".", "-") -devices = json.load(sys.stdin).get("devices", {}) -healthy = [] -for runtime, entries in devices.items(): - if runtime != target_runtime: - continue - for entry in entries: - if not entry.get("isAvailable", False): - continue - if not os.path.isdir(entry.get("dataPath", "")): - continue - healthy.append(entry) -for entry in healthy: - if entry.get("name") == target_name: - print(entry["udid"]) - raise SystemExit(0) -for entry in healthy: - if target_name in entry.get("name", ""): - print(entry["udid"]) - raise SystemExit(0) -raise SystemExit(1) -' "$simulator_name" "$simulator_os" -} - -if [[ -z "$simulator_id" ]]; then - simulator_id="$(resolve_simulator_id || true)" -fi - -if [[ -n "$simulator_id" ]]; then - xcrun simctl boot "$simulator_id" >/dev/null 2>&1 || true - xcrun simctl bootstatus "$simulator_id" -b - xcrun simctl terminate "$simulator_id" "$bundle_id" >/dev/null 2>&1 || true - xcrun simctl terminate "$simulator_id" "$ui_test_runner_bundle_id" >/dev/null 2>&1 || true - xcrun simctl uninstall "$simulator_id" "$bundle_id" >/dev/null 2>&1 || true - xcrun simctl uninstall "$simulator_id" "$ui_test_runner_bundle_id" >/dev/null 2>&1 || true - destination="id=${simulator_id}" -else - destination="platform=iOS Simulator,name=${simulator_name},OS=${simulator_os}" -fi - -cleanup() { - rm -f "$ui_test_config_path" - if [[ -n "${daemon_pid:-}" ]]; then - kill "$daemon_pid" >/dev/null 2>&1 || true - wait "$daemon_pid" >/dev/null 2>&1 || true - fi -} -trap cleanup EXIT - -umask 077 -python3 - <<'PY' "$ui_test_config_path" "$ui_test_email" "$ui_test_username" "$ui_test_password" "$ui_test_tailnet_mode" -import json -import pathlib -import sys - -config_path = pathlib.Path(sys.argv[1]) -config_path.write_text( - json.dumps( - { - "email": sys.argv[2], - "username": sys.argv[3], - "password": sys.argv[4], - "mode": sys.argv[5], - } - ), - encoding="utf-8", -) -PY - -cargo build -p burrow --bin burrow - -( - cd "$fallback_dir" - RUST_LOG="${BURROW_UI_TEST_RUST_LOG:-info,burrow=debug}" \ - BURROW_SOCKET_PATH="burrow.sock" \ - BURROW_TAILSCALE_STATE_ROOT="$tailnet_state_root" \ - "${repo_root}/target/debug/burrow" daemon >"$daemon_log" 2>&1 -) & -daemon_pid=$! - -for _ in $(seq 1 50); do - [[ -S "$socket_path" ]] && break - sleep 0.2 -done - -if [[ ! -S "$socket_path" ]]; then - echo "error: Burrow daemon did not create ${socket_path}" >&2 - [[ -f "$daemon_log" ]] && cat "$daemon_log" >&2 - exit 1 -fi - -common_xcodebuild_args=( - -quiet - -skipPackagePluginValidation - -project "${repo_root}/Apple/Burrow.xcodeproj" - -scheme App - -configuration Debug - -destination "$destination" - -derivedDataPath "$derived_data_path" - -clonedSourcePackagesDirPath "$source_packages_path" - -only-testing:BurrowUITests - -parallel-testing-enabled NO - -maximum-concurrent-test-simulator-destinations 1 - -maximum-parallel-testing-workers 1 - CODE_SIGNING_ALLOWED=NO -) - -xcodebuild \ - "${common_xcodebuild_args[@]}" \ - build-for-testing - -BURROW_UI_TEST_EMAIL="$ui_test_email" \ -BURROW_UI_TEST_USERNAME="$ui_test_username" \ -BURROW_UI_TEST_PASSWORD="$ui_test_password" \ -BURROW_UI_TEST_CONFIG_PATH="$ui_test_config_path" \ -BURROW_UI_TEST_EPHEMERAL_AUTH=1 \ -xcodebuild \ - "${common_xcodebuild_args[@]}" \ - test-without-building diff --git a/Scripts/run-tailnet-connectivity-smoke.sh b/Scripts/run-tailnet-connectivity-smoke.sh deleted file mode 100755 index f3053d3..0000000 --- a/Scripts/run-tailnet-connectivity-smoke.sh +++ /dev/null @@ -1,186 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -repo_root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -bundle_id="${BURROW_UI_TEST_APP_BUNDLE_ID:-com.hackclub.burrow}" -smoke_root="${BURROW_TAILNET_SMOKE_ROOT:-/tmp/burrow-tailnet-connectivity}" -socket_path="${smoke_root}/burrow.sock" -db_path="${smoke_root}/burrow.db" -daemon_log="${BURROW_TAILNET_SMOKE_DAEMON_LOG:-${smoke_root}/daemon.log}" -payload_path="${smoke_root}/tailnet.json" -authority="${BURROW_TAILNET_SMOKE_AUTHORITY:-https://ts.burrow.net}" -account_name="${BURROW_TAILNET_SMOKE_ACCOUNT:-ui-test}" -identity_name="${BURROW_TAILNET_SMOKE_IDENTITY:-apple}" -hostname="${BURROW_TAILNET_SMOKE_HOSTNAME:-burrow-apple}" -message="${BURROW_TAILNET_SMOKE_MESSAGE:-burrow-tailnet-smoke}" -timeout_ms="${BURROW_TAILNET_SMOKE_TIMEOUT_MS:-8000}" -remote_ip="${BURROW_TAILNET_SMOKE_REMOTE_IP:-}" -remote_port="${BURROW_TAILNET_SMOKE_REMOTE_PORT:-18081}" -remote_hostname="${BURROW_TAILNET_SMOKE_REMOTE_HOSTNAME:-burrow-echo}" -remote_authkey="${BURROW_TAILNET_SMOKE_REMOTE_AUTHKEY:-}" -helper_bin="${BURROW_TAILNET_SMOKE_HELPER_BIN:-${smoke_root}/tailscale-login-bridge}" -remote_state_root="${BURROW_TAILNET_SMOKE_REMOTE_STATE_ROOT:-${smoke_root}/remote-state}" -remote_stdout="${smoke_root}/remote-helper.stdout" -remote_stderr="${BURROW_TAILNET_SMOKE_REMOTE_LOG:-${smoke_root}/remote-helper.log}" - -if [[ -n "${TS_AUTHKEY:-}" ]]; then - default_tailnet_state_root="${smoke_root}/local-state" -else - default_tailnet_state_root="/tmp/${bundle_id}/SimulatorTailnetState" -fi -tailnet_state_root="${BURROW_TAILNET_STATE_ROOT:-${default_tailnet_state_root}}" - -need_login=0 -if [[ -z "${TS_AUTHKEY:-}" ]] && { [[ ! -d "$tailnet_state_root" ]] || [[ -z "$(find "$tailnet_state_root" -mindepth 1 -maxdepth 2 -print -quit 2>/dev/null)" ]]; }; then - need_login=1 -fi - -if [[ "$need_login" -eq 1 ]]; then - echo "Tailnet state root is empty; running iOS login bootstrap first..." - "${repo_root}/Scripts/run-ios-tailnet-ui-tests.sh" -fi - -rm -rf "$smoke_root" -mkdir -p "$smoke_root" - -cleanup() { - rm -f "$payload_path" - if [[ -n "${daemon_pid:-}" ]]; then - kill "$daemon_pid" >/dev/null 2>&1 || true - wait "$daemon_pid" >/dev/null 2>&1 || true - fi - if [[ -n "${remote_pid:-}" ]]; then - kill "$remote_pid" >/dev/null 2>&1 || true - wait "$remote_pid" >/dev/null 2>&1 || true - fi -} -trap cleanup EXIT - -wait_for_helper_listen() { - python3 - <<'PY' "$1" -import json -import pathlib -import sys -import time - -path = pathlib.Path(sys.argv[1]) -deadline = time.time() + 20 -while time.time() < deadline: - if path.exists(): - with path.open("r", encoding="utf-8") as handle: - line = handle.readline().strip() - if line: - hello = json.loads(line) - print(hello["listen_addr"]) - raise SystemExit(0) - time.sleep(0.1) -raise SystemExit("timed out waiting for helper startup line") -PY -} - -wait_for_helper_ip() { - python3 - <<'PY' "$1" -import json -import sys -import time -import urllib.request - -url = sys.argv[1] -deadline = time.time() + 30 -while time.time() < deadline: - with urllib.request.urlopen(url, timeout=5) as response: - status = json.load(response) - if status.get("running") and status.get("tailscale_ips"): - print(status["tailscale_ips"][0]) - raise SystemExit(0) - time.sleep(0.25) -raise SystemExit("timed out waiting for helper to become ready") -PY -} - -python3 - <<'PY' "$payload_path" "$authority" "$account_name" "$identity_name" "$hostname" -import json -import pathlib -import sys - -path = pathlib.Path(sys.argv[1]) -payload = { - "authority": sys.argv[2], - "account": sys.argv[3], - "identity": sys.argv[4], - "hostname": sys.argv[5], -} -path.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8") -PY - -cargo build -p burrow --bin burrow -( - cd "${repo_root}/Tools/tailscale-login-bridge" - GOWORK=off go build -o "$helper_bin" . -) - -if [[ -z "$remote_ip" ]]; then - if [[ -z "$remote_authkey" ]] && { [[ ! -d "$remote_state_root" ]] || [[ -z "$(find "$remote_state_root" -mindepth 1 -maxdepth 1 -print -quit 2>/dev/null)" ]]; }; then - echo "error: set BURROW_TAILNET_SMOKE_REMOTE_IP, BURROW_TAILNET_SMOKE_REMOTE_AUTHKEY, or BURROW_TAILNET_SMOKE_REMOTE_STATE_ROOT to an existing logged-in helper state" >&2 - exit 1 - fi - - if [[ -n "$remote_authkey" ]]; then - rm -rf "$remote_state_root" - mkdir -p "$remote_state_root" - fi - - ( - cd "$repo_root" - if [[ -n "$remote_authkey" ]]; then - export TS_AUTHKEY="$remote_authkey" - fi - "$helper_bin" \ - --listen 127.0.0.1:0 \ - --state-dir "$remote_state_root" \ - --hostname "$remote_hostname" \ - --control-url "$authority" \ - --udp-echo-port "$remote_port" \ - >"$remote_stdout" 2>"$remote_stderr" - ) & - remote_pid=$! - - remote_listen_addr="$(wait_for_helper_listen "$remote_stdout")" - remote_ip="$(wait_for_helper_ip "http://${remote_listen_addr}/status")" -fi - -( - cd "$smoke_root" - RUST_LOG="${BURROW_TAILNET_SMOKE_RUST_LOG:-info,burrow=debug}" \ - BURROW_SOCKET_PATH="$socket_path" \ - BURROW_TAILSCALE_STATE_ROOT="$tailnet_state_root" \ - "${repo_root}/target/debug/burrow" daemon >"$daemon_log" 2>&1 -) & -daemon_pid=$! - -for _ in $(seq 1 50); do - [[ -S "$socket_path" ]] && break - sleep 0.2 -done - -if [[ ! -S "$socket_path" ]]; then - echo "error: Burrow daemon did not create ${socket_path}" >&2 - [[ -f "$daemon_log" ]] && cat "$daemon_log" >&2 - exit 1 -fi - -run_burrow() { - BURROW_SOCKET_PATH="$socket_path" \ - BURROW_TAILSCALE_STATE_ROOT="$tailnet_state_root" \ - "${repo_root}/target/debug/burrow" "$@" -} - -run_burrow network-add 1 1 "$payload_path" -run_burrow start -run_burrow tunnel-config -run_burrow tailnet-udp-echo "${remote_ip}:${remote_port}" --message "$message" --timeout-ms "$timeout_ms" - -echo -echo "Tailnet connectivity smoke passed." -echo "State root: $tailnet_state_root" -echo "Remote: ${remote_ip}:${remote_port}" diff --git a/Scripts/seal-forgejo-nsc-secrets.sh b/Scripts/seal-forgejo-nsc-secrets.sh deleted file mode 100755 index a6b3918..0000000 --- a/Scripts/seal-forgejo-nsc-secrets.sh +++ /dev/null @@ -1,112 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" - -usage() { - cat <<'EOF' -Usage: Scripts/seal-forgejo-nsc-secrets.sh [options] - -Encrypt Burrow forgejo-nsc runtime inputs from intake/ into the agenix secrets -consumed by burrow-forge. - -Options: - --provision Re-render the local intake files before sealing. - --host SSH target forwarded to provision-forgejo-nsc.sh. - --ssh-key SSH private key forwarded to provision-forgejo-nsc.sh. - --nsc-bin Override the nsc binary for provisioning. - -h, --help Show this help text. -EOF -} - -PROVISION=0 -HOST="${BURROW_FORGE_HOST:-root@git.burrow.net}" -SSH_KEY="${BURROW_FORGE_SSH_KEY:-${REPO_ROOT}/intake/agent_at_burrow_net_ed25519}" -NSC_BIN="${NSC_BIN:-}" - -while [[ $# -gt 0 ]]; do - case "$1" in - --provision) - PROVISION=1 - shift - ;; - --host) - HOST="${2:?missing value for --host}" - shift 2 - ;; - --ssh-key) - SSH_KEY="${2:?missing value for --ssh-key}" - shift 2 - ;; - --nsc-bin) - NSC_BIN="${2:?missing value for --nsc-bin}" - shift 2 - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "unknown option: $1" >&2 - usage >&2 - exit 64 - ;; - esac -done - -require_cmd() { - if ! command -v "$1" >/dev/null 2>&1; then - echo "missing required command: $1" >&2 - exit 1 - fi -} - -require_cmd age -require_cmd nix -require_cmd python3 - -if [[ "${PROVISION}" -eq 1 ]]; then - provision_args=(--host "${HOST}" --ssh-key "${SSH_KEY}") - if [[ -n "${NSC_BIN}" ]]; then - provision_args+=(--nsc-bin "${NSC_BIN}") - fi - "${SCRIPT_DIR}/provision-forgejo-nsc.sh" "${provision_args[@]}" -fi - -tmpdir="$(mktemp -d)" -cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -seal_secret() { - local target="$1" - local source_path="$2" - recipients_file="${tmpdir}/$(basename "${target}").recipients" - if [[ ! -s "${source_path}" ]]; then - echo "required runtime input missing or empty: ${source_path}" >&2 - exit 1 - fi - nix eval --impure --json --expr "let s = import ${REPO_ROOT}/secrets.nix; in s.\"${target}\".publicKeys" \ - | python3 -c 'import json, sys; [print(item) for item in json.load(sys.stdin)]' \ - > "${recipients_file}" - - age -R "${recipients_file}" -o "${REPO_ROOT}/${target}" "${source_path}" -} - -seal_secret "secrets/infra/forgejo-nsc-token.age" "${REPO_ROOT}/intake/forgejo_nsc_token.txt" -seal_secret "secrets/infra/forgejo-nsc-dispatcher-config.age" "${REPO_ROOT}/intake/forgejo_nsc_dispatcher.yaml" -seal_secret "secrets/infra/forgejo-nsc-autoscaler-config.age" "${REPO_ROOT}/intake/forgejo_nsc_autoscaler.yaml" - -chmod 600 \ - "${REPO_ROOT}/secrets/infra/forgejo-nsc-token.age" \ - "${REPO_ROOT}/secrets/infra/forgejo-nsc-dispatcher-config.age" \ - "${REPO_ROOT}/secrets/infra/forgejo-nsc-autoscaler-config.age" - -echo "Sealed forgejo-nsc runtime inputs into:" -printf ' %s\n' \ - "${REPO_ROOT}/secrets/infra/forgejo-nsc-token.age" \ - "${REPO_ROOT}/secrets/infra/forgejo-nsc-dispatcher-config.age" \ - "${REPO_ROOT}/secrets/infra/forgejo-nsc-autoscaler-config.age" -echo "Deploy burrow-forge to apply the new CI credentials." diff --git a/Scripts/sync-forgejo-nsc-config.sh b/Scripts/sync-forgejo-nsc-config.sh deleted file mode 100755 index 2ce7114..0000000 --- a/Scripts/sync-forgejo-nsc-config.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -echo "Scripts/sync-forgejo-nsc-config.sh is obsolete." >&2 -echo "Burrow forgejo-nsc now consumes agenix-backed secrets instead of host-local intake files." >&2 -echo "Use Scripts/seal-forgejo-nsc-secrets.sh and deploy burrow-forge." >&2 -exit 1 diff --git a/Tools/forwardemail-custom-s3.sh b/Tools/forwardemail-custom-s3.sh deleted file mode 100755 index 5f39ddd..0000000 --- a/Tools/forwardemail-custom-s3.sh +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail -umask 077 - -usage() { - cat <<'EOF' -Usage: - Tools/forwardemail-custom-s3.sh \ - --domain burrow.net \ - --api-token-file intake/forwardemail_api_token.txt \ - --s3-endpoint https:// \ - --s3-region \ - --s3-bucket \ - --s3-access-key-file intake/hetzner-s3-user.txt \ - --s3-secret-key-file intake/hetzner-s3-secret.txt - -Options: - --domain Forward Email domain to update. - --api-token-file File containing the Forward Email API token. - --s3-endpoint S3-compatible endpoint URL. - --s3-region S3 region string expected by Forward Email. - --s3-bucket Bucket used for alias backup uploads. - --s3-access-key-file File containing the S3 access key id. - --s3-secret-key-file File containing the S3 secret access key. - --test-only Skip the update call and only test the saved connection. - --help Show this help text. - -Notes: - - Secrets are passed to curl through a temporary config file to avoid putting - them in the process list. - - By default the script updates the domain settings and then calls - /test-s3-connection. - - For Hetzner Object Storage, use the regional S3 endpoint such as - https://hel1.your-objectstorage.com, not an account alias endpoint. -EOF -} - -fail() { - printf 'error: %s\n' "$*" >&2 - exit 1 -} - -require_file() { - local path="$1" - [[ -f "$path" ]] || fail "missing file: $path" -} - -read_secret() { - local path="$1" - local value - value="$(tr -d '\r\n' < "$path")" - [[ -n "$value" ]] || fail "empty secret file: $path" - printf '%s' "$value" -} - -domain="" -api_token_file="" -s3_endpoint="" -s3_region="" -s3_bucket="" -s3_access_key_file="" -s3_secret_key_file="" -test_only=false - -while [[ $# -gt 0 ]]; do - case "$1" in - --domain) - domain="${2:-}" - shift 2 - ;; - --api-token-file) - api_token_file="${2:-}" - shift 2 - ;; - --s3-endpoint) - s3_endpoint="${2:-}" - shift 2 - ;; - --s3-region) - s3_region="${2:-}" - shift 2 - ;; - --s3-bucket) - s3_bucket="${2:-}" - shift 2 - ;; - --s3-access-key-file) - s3_access_key_file="${2:-}" - shift 2 - ;; - --s3-secret-key-file) - s3_secret_key_file="${2:-}" - shift 2 - ;; - --test-only) - test_only=true - shift - ;; - --help|-h) - usage - exit 0 - ;; - *) - fail "unknown argument: $1" - ;; - esac -done - -[[ -n "$domain" ]] || fail "--domain is required" -[[ -n "$api_token_file" ]] || fail "--api-token-file is required" -[[ -n "$s3_endpoint" || "$test_only" == true ]] || fail "--s3-endpoint is required unless --test-only is set" -[[ -n "$s3_region" || "$test_only" == true ]] || fail "--s3-region is required unless --test-only is set" -[[ -n "$s3_bucket" || "$test_only" == true ]] || fail "--s3-bucket is required unless --test-only is set" -[[ -n "$s3_access_key_file" || "$test_only" == true ]] || fail "--s3-access-key-file is required unless --test-only is set" -[[ -n "$s3_secret_key_file" || "$test_only" == true ]] || fail "--s3-secret-key-file is required unless --test-only is set" - -require_file "$api_token_file" -api_token="$(read_secret "$api_token_file")" - -if [[ "$test_only" == false ]]; then - require_file "$s3_access_key_file" - require_file "$s3_secret_key_file" - s3_access_key_id="$(read_secret "$s3_access_key_file")" - s3_secret_access_key="$(read_secret "$s3_secret_key_file")" - - case "$s3_endpoint" in - http://*|https://*) - ;; - *) - fail "--s3-endpoint must start with http:// or https://" - ;; - esac -fi - -curl_config="$(mktemp)" -trap 'rm -f "$curl_config"' EXIT - -if [[ "$test_only" == false ]]; then - cat >"$curl_config" <&2 - curl --config "$curl_config" - printf '\n' >&2 -fi - -cat >"$curl_config" <&2 -curl --config "$curl_config" -printf '\n' >&2 diff --git a/Tools/forwardemail-hetzner-storage.py b/Tools/forwardemail-hetzner-storage.py deleted file mode 100755 index 3a2a941..0000000 --- a/Tools/forwardemail-hetzner-storage.py +++ /dev/null @@ -1,261 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import annotations - -import argparse -import datetime as dt -import hashlib -import hmac -import sys -import textwrap -from pathlib import Path -from urllib.parse import urlencode, urlparse - -import requests - - -def read_secret(path: str) -> str: - value = Path(path).read_text(encoding="utf-8").strip() - if not value: - raise SystemExit(f"error: empty secret file: {path}") - return value - - -def sign(key: bytes, msg: str) -> bytes: - return hmac.new(key, msg.encode("utf-8"), hashlib.sha256).digest() - - -def request( - *, - method: str, - endpoint: str, - region: str, - access_key: str, - secret_key: str, - bucket: str, - query: dict[str, str] | None = None, - body: bytes = b"", - content_type: str | None = None, -) -> requests.Response: - parsed = urlparse(endpoint) - if parsed.scheme != "https": - raise SystemExit("error: endpoint must use https") - - host = parsed.netloc - canonical_uri = f"/{bucket}" - query = query or {} - canonical_querystring = urlencode(sorted(query.items()), doseq=True, safe="~") - - now = dt.datetime.now(dt.timezone.utc) - amz_date = now.strftime("%Y%m%dT%H%M%SZ") - date_stamp = now.strftime("%Y%m%d") - payload_hash = hashlib.sha256(body).hexdigest() - - headers = { - "host": host, - "x-amz-content-sha256": payload_hash, - "x-amz-date": amz_date, - } - if content_type: - headers["content-type"] = content_type - - signed_headers = ";".join(sorted(headers.keys())) - canonical_headers = "".join(f"{name}:{headers[name]}\n" for name in sorted(headers.keys())) - canonical_request = "\n".join( - [ - method, - canonical_uri, - canonical_querystring, - canonical_headers, - signed_headers, - payload_hash, - ] - ) - - algorithm = "AWS4-HMAC-SHA256" - credential_scope = f"{date_stamp}/{region}/s3/aws4_request" - string_to_sign = "\n".join( - [ - algorithm, - amz_date, - credential_scope, - hashlib.sha256(canonical_request.encode("utf-8")).hexdigest(), - ] - ) - - k_date = sign(("AWS4" + secret_key).encode("utf-8"), date_stamp) - k_region = sign(k_date, region) - k_service = sign(k_region, "s3") - signing_key = sign(k_service, "aws4_request") - signature = hmac.new(signing_key, string_to_sign.encode("utf-8"), hashlib.sha256).hexdigest() - - auth_header = ( - f"{algorithm} Credential={access_key}/{credential_scope}, " - f"SignedHeaders={signed_headers}, Signature={signature}" - ) - - url = f"{parsed.scheme}://{host}{canonical_uri}" - if canonical_querystring: - url = f"{url}?{canonical_querystring}" - - response = requests.request( - method, - url, - headers={**headers, "Authorization": auth_header}, - data=body, - timeout=30, - ) - return response - - -def ensure_bucket(args: argparse.Namespace, bucket: str) -> None: - head = request( - method="HEAD", - endpoint=args.endpoint, - region=args.region, - access_key=args.access_key, - secret_key=args.secret_key, - bucket=bucket, - ) - if head.status_code == 200: - print(f"{bucket}: exists") - return - if head.status_code != 404: - raise SystemExit(f"error: HEAD {bucket} returned {head.status_code}: {head.text[:200]}") - - body = textwrap.dedent( - f"""\ - - - {args.region} - - """ - ).encode("utf-8") - create = request( - method="PUT", - endpoint=args.endpoint, - region=args.region, - access_key=args.access_key, - secret_key=args.secret_key, - bucket=bucket, - body=body, - content_type="application/xml", - ) - if create.status_code not in (200, 204): - raise SystemExit(f"error: PUT {bucket} returned {create.status_code}: {create.text[:200]}") - print(f"{bucket}: created") - - -def put_lifecycle(args: argparse.Namespace, bucket: str) -> None: - body = textwrap.dedent( - f"""\ - - - - expire-forwardemail-backups-after-{args.expire_days}-days - Enabled - - - - - {args.expire_days} - - - - """ - ).encode("utf-8") - response = request( - method="PUT", - endpoint=args.endpoint, - region=args.region, - access_key=args.access_key, - secret_key=args.secret_key, - bucket=bucket, - query={"lifecycle": ""}, - body=body, - content_type="application/xml", - ) - if response.status_code not in (200, 204): - raise SystemExit( - f"error: PUT lifecycle for {bucket} returned {response.status_code}: {response.text[:200]}" - ) - print(f"{bucket}: lifecycle set to {args.expire_days} days") - - -def get_lifecycle(args: argparse.Namespace, bucket: str) -> None: - response = request( - method="GET", - endpoint=args.endpoint, - region=args.region, - access_key=args.access_key, - secret_key=args.secret_key, - bucket=bucket, - query={"lifecycle": ""}, - ) - if response.status_code != 200: - raise SystemExit( - f"error: GET lifecycle for {bucket} returned {response.status_code}: {response.text[:200]}" - ) - print(f"=== {bucket} lifecycle ===") - print(response.text.strip()) - - -def parse_args() -> argparse.Namespace: - parser = argparse.ArgumentParser( - description="Provision Hetzner object-storage buckets for Forward Email backups." - ) - parser.add_argument( - "--endpoint", - default="https://hel1.your-objectstorage.com", - help="Public S3-compatible endpoint URL. For Hetzner, use the regional endpoint, not the account alias.", - ) - parser.add_argument("--region", default="hel1", help="S3 region.") - parser.add_argument( - "--access-key-file", - default="intake/hetzner-s3-user.txt", - help="File containing the S3 access key id.", - ) - parser.add_argument( - "--secret-key-file", - default="intake/hetzner-s3-secret.txt", - help="File containing the S3 secret key.", - ) - parser.add_argument( - "--bucket", - action="append", - required=True, - help="Bucket to provision. Repeat for multiple buckets.", - ) - parser.add_argument( - "--expire-days", - type=int, - default=90, - help="Lifecycle expiry window in days.", - ) - parser.add_argument( - "--verify-only", - action="store_true", - help="Skip create/update and only read the current lifecycle.", - ) - return parser.parse_args() - - -def main() -> None: - args = parse_args() - args.access_key = read_secret(args.access_key_file) - args.secret_key = read_secret(args.secret_key_file) - - for bucket in args.bucket: - if args.verify_only: - get_lifecycle(args, bucket) - continue - ensure_bucket(args, bucket) - put_lifecycle(args, bucket) - get_lifecycle(args, bucket) - - -if __name__ == "__main__": - try: - main() - except requests.RequestException as err: - raise SystemExit(f"error: request failed: {err}") from err diff --git a/Tools/tailscale-login-bridge/go.mod b/Tools/tailscale-login-bridge/go.mod deleted file mode 100644 index 0e19f33..0000000 --- a/Tools/tailscale-login-bridge/go.mod +++ /dev/null @@ -1,66 +0,0 @@ -module burrow.dev/tailscale-login-bridge - -go 1.26.1 - -require tailscale.com v1.96.5 - -require ( - filippo.io/edwards25519 v1.2.0 // indirect - github.com/akutz/memconn v0.1.0 // indirect - github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa // indirect - github.com/aws/aws-sdk-go-v2 v1.41.0 // indirect - github.com/aws/aws-sdk-go-v2/config v1.29.5 // indirect - github.com/aws/aws-sdk-go-v2/credentials v1.17.58 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.27 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.16 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.16 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.16 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.24.14 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.13 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.41.5 // indirect - github.com/aws/smithy-go v1.24.0 // indirect - github.com/coder/websocket v1.8.12 // indirect - github.com/creachadair/msync v0.7.1 // indirect - github.com/dblohm7/wingoes v0.0.0-20240119213807-a09d6be7affa // indirect - github.com/fxamacker/cbor/v2 v2.9.0 // indirect - github.com/gaissmai/bart v0.26.1 // indirect - github.com/go-json-experiment/json v0.0.0-20250813024750-ebf49471dced // indirect - github.com/godbus/dbus/v5 v5.1.1-0.20230522191255-76236955d466 // indirect - github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect - github.com/google/btree v1.1.3 // indirect - github.com/google/go-cmp v0.7.0 // indirect - github.com/google/uuid v1.6.0 // indirect - github.com/hdevalence/ed25519consensus v0.2.0 // indirect - github.com/huin/goupnp v1.3.0 // indirect - github.com/jsimonetti/rtnetlink v1.4.0 // indirect - github.com/klauspost/compress v1.18.2 // indirect - github.com/mdlayher/netlink v1.7.3-0.20250113171957-fbb4dce95f42 // indirect - github.com/mdlayher/socket v0.5.0 // indirect - github.com/mitchellh/go-ps v1.0.0 // indirect - github.com/pires/go-proxyproto v0.8.1 // indirect - github.com/prometheus-community/pro-bing v0.4.0 // indirect - github.com/safchain/ethtool v0.3.0 // indirect - github.com/tailscale/certstore v0.1.1-0.20231202035212-d3fa0460f47e // indirect - github.com/tailscale/go-winio v0.0.0-20231025203758-c4f33415bf55 // indirect - github.com/tailscale/hujson v0.0.0-20221223112325-20486734a56a // indirect - github.com/tailscale/peercred v0.0.0-20250107143737-35a0c7bd7edc // indirect - github.com/tailscale/web-client-prebuilt v0.0.0-20250124233751-d4cd19a26976 // indirect - github.com/tailscale/wireguard-go v0.0.0-20250716170648-1d0488a3d7da // indirect - github.com/x448/float16 v0.8.4 // indirect - go4.org/mem v0.0.0-20240501181205-ae6ca9944745 // indirect - go4.org/netipx v0.0.0-20231129151722-fdeea329fbba // indirect - golang.org/x/crypto v0.46.0 // indirect - golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect - golang.org/x/net v0.48.0 // indirect - golang.org/x/oauth2 v0.33.0 // indirect - golang.org/x/sync v0.19.0 // indirect - golang.org/x/sys v0.40.0 // indirect - golang.org/x/term v0.38.0 // indirect - golang.org/x/text v0.32.0 // indirect - golang.org/x/time v0.12.0 // indirect - golang.zx2c4.com/wintun v0.0.0-20230126152724-0fa3db229ce2 // indirect - golang.zx2c4.com/wireguard/windows v0.5.3 // indirect - gvisor.dev/gvisor v0.0.0-20260224225140-573d5e7127a8 // indirect -) diff --git a/Tools/tailscale-login-bridge/go.sum b/Tools/tailscale-login-bridge/go.sum deleted file mode 100644 index 5393a62..0000000 --- a/Tools/tailscale-login-bridge/go.sum +++ /dev/null @@ -1,229 +0,0 @@ -9fans.net/go v0.0.8-0.20250307142834-96bdba94b63f h1:1C7nZuxUMNz7eiQALRfiqNOm04+m3edWlRff/BYHf0Q= -9fans.net/go v0.0.8-0.20250307142834-96bdba94b63f/go.mod h1:hHyrZRryGqVdqrknjq5OWDLGCTJ2NeEvtrpR96mjraM= -filippo.io/edwards25519 v1.2.0 h1:crnVqOiS4jqYleHd9vaKZ+HKtHfllngJIiOpNpoJsjo= -filippo.io/edwards25519 v1.2.0/go.mod h1:xzAOLCNug/yB62zG1bQ8uziwrIqIuxhctzJT18Q77mc= -filippo.io/mkcert v1.4.4 h1:8eVbbwfVlaqUM7OwuftKc2nuYOoTDQWqsoXmzoXZdbc= -filippo.io/mkcert v1.4.4/go.mod h1:VyvOchVuAye3BoUsPUOOofKygVwLV2KQMVFJNRq+1dA= -github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= -github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= -github.com/akutz/memconn v0.1.0 h1:NawI0TORU4hcOMsMr11g7vwlCdkYeLKXBcxWu2W/P8A= -github.com/akutz/memconn v0.1.0/go.mod h1:Jo8rI7m0NieZyLI5e2CDlRdRqRRB4S7Xp77ukDjH+Fw= -github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7VVbI0o4wBRNQIgn917usHWOd6VAffYI= -github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4= -github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= -github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/aws/aws-sdk-go-v2 v1.41.0 h1:tNvqh1s+v0vFYdA1xq0aOJH+Y5cRyZ5upu6roPgPKd4= -github.com/aws/aws-sdk-go-v2 v1.41.0/go.mod h1:MayyLB8y+buD9hZqkCW3kX1AKq07Y5pXxtgB+rRFhz0= -github.com/aws/aws-sdk-go-v2/config v1.29.5 h1:4lS2IB+wwkj5J43Tq/AwvnscBerBJtQQ6YS7puzCI1k= -github.com/aws/aws-sdk-go-v2/config v1.29.5/go.mod h1:SNzldMlDVbN6nWxM7XsUiNXPSa1LWlqiXtvh/1PrJGg= -github.com/aws/aws-sdk-go-v2/credentials v1.17.58 h1:/d7FUpAPU8Lf2KUdjniQvfNdlMID0Sd9pS23FJ3SS9Y= -github.com/aws/aws-sdk-go-v2/credentials v1.17.58/go.mod h1:aVYW33Ow10CyMQGFgC0ptMRIqJWvJ4nxZb0sUiuQT/A= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.27 h1:7lOW8NUwE9UZekS1DYoiPdVAqZ6A+LheHWb+mHbNOq8= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.27/go.mod h1:w1BASFIPOPUae7AgaH4SbjNbfdkxuggLyGfNFTn8ITY= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.16 h1:rgGwPzb82iBYSvHMHXc8h9mRoOUBZIGFgKb9qniaZZc= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.16/go.mod h1:L/UxsGeKpGoIj6DxfhOWHWQ/kGKcd4I1VncE4++IyKA= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.16 h1:1jtGzuV7c82xnqOVfx2F0xmJcOw5374L7N6juGW6x6U= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.16/go.mod h1:M2E5OQf+XLe+SZGmmpaI2yy+J326aFf6/+54PoxSANc= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2 h1:Pg9URiobXy85kgFev3og2CuOZ8JZUBENF+dcgWBaYNk= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 h1:0ryTNEdJbzUCEWkVXEXoqlXV72J5keC1GvILMOuD00E= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4/go.mod h1:HQ4qwNZh32C3CBeO6iJLQlgtMzqeG17ziAA/3KDJFow= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.16 h1:oHjJHeUy0ImIV0bsrX0X91GkV5nJAyv1l1CC9lnO0TI= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.16/go.mod h1:iRSNGgOYmiYwSCXxXaKb9HfOEj40+oTKn8pTxMlYkRM= -github.com/aws/aws-sdk-go-v2/service/ssm v1.44.7 h1:a8HvP/+ew3tKwSXqL3BCSjiuicr+XTU2eFYeogV9GJE= -github.com/aws/aws-sdk-go-v2/service/ssm v1.44.7/go.mod h1:Q7XIWsMo0JcMpI/6TGD6XXcXcV1DbTj6e9BKNntIMIM= -github.com/aws/aws-sdk-go-v2/service/sso v1.24.14 h1:c5WJ3iHz7rLIgArznb3JCSQT3uUMiz9DLZhIX+1G8ok= -github.com/aws/aws-sdk-go-v2/service/sso v1.24.14/go.mod h1:+JJQTxB6N4niArC14YNtxcQtwEqzS3o9Z32n7q33Rfs= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.13 h1:f1L/JtUkVODD+k1+IiSJUUv8A++2qVr+Xvb3xWXETMU= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.13/go.mod h1:tvqlFoja8/s0o+UruA1Nrezo/df0PzdunMDDurUfg6U= -github.com/aws/aws-sdk-go-v2/service/sts v1.41.5 h1:SciGFVNZ4mHdm7gpD1dgZYnCuVdX1s+lFTg4+4DOy70= -github.com/aws/aws-sdk-go-v2/service/sts v1.41.5/go.mod h1:iW40X4QBmUxdP+fZNOpfmkdMZqsovezbAeO+Ubiv2pk= -github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk= -github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= -github.com/axiomhq/hyperloglog v0.0.0-20240319100328-84253e514e02 h1:bXAPYSbdYbS5VTy92NIUbeDI1qyggi+JYh5op9IFlcQ= -github.com/axiomhq/hyperloglog v0.0.0-20240319100328-84253e514e02/go.mod h1:k08r+Yj1PRAmuayFiRK6MYuR5Ve4IuZtTfxErMIh0+c= -github.com/cilium/ebpf v0.16.0 h1:+BiEnHL6Z7lXnlGUsXQPPAE7+kenAd4ES8MQ5min0Ok= -github.com/cilium/ebpf v0.16.0/go.mod h1:L7u2Blt2jMM/vLAVgjxluxtBKlz3/GWjB0dMOEngfwE= -github.com/coder/websocket v1.8.12 h1:5bUXkEPPIbewrnkU8LTCLVaxi4N4J8ahufH2vlo4NAo= -github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs= -github.com/coreos/go-iptables v0.7.1-0.20240112124308-65c67c9f46e6 h1:8h5+bWd7R6AYUslN6c6iuZWTKsKxUFDlpnmilO6R2n0= -github.com/coreos/go-iptables v0.7.1-0.20240112124308-65c67c9f46e6/go.mod h1:Qe8Bv2Xik5FyTXwgIbLAnv2sWSBmvWdFETJConOQ//Q= -github.com/creachadair/mds v0.25.9 h1:080Hr8laN2h+l3NeVCGMBpXtIPnl9mz8e4HLraGPqtA= -github.com/creachadair/mds v0.25.9/go.mod h1:4hatI3hRM+qhzuAmqPRFvaBM8mONkS7nsLxkcuTYUIs= -github.com/creachadair/msync v0.7.1 h1:SeZmuEBXQPe5GqV/C94ER7QIZPwtvFbeQiykzt/7uho= -github.com/creachadair/msync v0.7.1/go.mod h1:8CcFlLsSujfHE5wWm19uUBLHIPDAUr6LXDwneVMO008= -github.com/creachadair/taskgroup v0.13.2 h1:3KyqakBuFsm3KkXi/9XIb0QcA8tEzLHLgaoidf0MdVc= -github.com/creachadair/taskgroup v0.13.2/go.mod h1:i3V1Zx7H8RjwljUEeUWYT30Lmb9poewSb2XI1yTwD0g= -github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s= -github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE= -github.com/dblohm7/wingoes v0.0.0-20240119213807-a09d6be7affa h1:h8TfIT1xc8FWbwwpmHn1J5i43Y0uZP97GqasGCzSRJk= -github.com/dblohm7/wingoes v0.0.0-20240119213807-a09d6be7affa/go.mod h1:Nx87SkVqTKd8UtT+xu7sM/l+LgXs6c0aHrlKusR+2EQ= -github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8= -github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw= -github.com/digitalocean/go-smbios v0.0.0-20180907143718-390a4f403a8e h1:vUmf0yezR0y7jJ5pceLHthLaYf4bA5T14B6q39S4q2Q= -github.com/digitalocean/go-smbios v0.0.0-20180907143718-390a4f403a8e/go.mod h1:YTIHhz/QFSYnu/EhlF2SpU2Uk+32abacUYA5ZPljz1A= -github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c= -github.com/djherbis/times v1.6.0/go.mod h1:gOHeRAz2h+VJNZ5Gmc/o7iD9k4wW7NMVqieYCY99oc0= -github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= -github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/fxamacker/cbor/v2 v2.9.0 h1:NpKPmjDBgUfBms6tr6JZkTHtfFGcMKsw3eGcmD/sapM= -github.com/fxamacker/cbor/v2 v2.9.0/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ= -github.com/gaissmai/bart v0.26.1 h1:+w4rnLGNlA2GDVn382Tfe3jOsK5vOr5n4KmigJ9lbTo= -github.com/gaissmai/bart v0.26.1/go.mod h1:GREWQfTLRWz/c5FTOsIw+KkscuFkIV5t8Rp7Nd1Td5c= -github.com/github/fakeca v0.1.0 h1:Km/MVOFvclqxPM9dZBC4+QE564nU4gz4iZ0D9pMw28I= -github.com/github/fakeca v0.1.0/go.mod h1:+bormgoGMMuamOscx7N91aOuUST7wdaJ2rNjeohylyo= -github.com/go-json-experiment/json v0.0.0-20250813024750-ebf49471dced h1:Q311OHjMh/u5E2TITc++WlTP5We0xNseRMkHDyvhW7I= -github.com/go-json-experiment/json v0.0.0-20250813024750-ebf49471dced/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M= -github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= -github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= -github.com/go4org/plan9netshell v0.0.0-20250324183649-788daa080737 h1:cf60tHxREO3g1nroKr2osU3JWZsJzkfi7rEg+oAB0Lo= -github.com/go4org/plan9netshell v0.0.0-20250324183649-788daa080737/go.mod h1:MIS0jDzbU/vuM9MC4YnBITCv+RYuTRq8dJzmCrFsK9g= -github.com/godbus/dbus/v5 v5.1.1-0.20230522191255-76236955d466 h1:sQspH8M4niEijh3PFscJRLDnkL547IeP7kpPe3uUhEg= -github.com/godbus/dbus/v5 v5.1.1-0.20230522191255-76236955d466/go.mod h1:ZiQxhyQ+bbbfxUKVvjfO498oPYvtYhZzycal3G/NHmU= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= -github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= -github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg= -github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= -github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= -github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/go-tpm v0.9.4 h1:awZRf9FwOeTunQmHoDYSHJps3ie6f1UlhS1fOdPEt1I= -github.com/google/go-tpm v0.9.4/go.mod h1:h9jEsEECg7gtLis0upRBQU+GhYVH6jMjrFxI8u6bVUY= -github.com/google/nftables v0.2.1-0.20240414091927-5e242ec57806 h1:wG8RYIyctLhdFk6Vl1yPGtSRtwGpVkWyZww1OCil2MI= -github.com/google/nftables v0.2.1-0.20240414091927-5e242ec57806/go.mod h1:Beg6V6zZ3oEn0JuiUQ4wqwuyqqzasOltcoXPtgLbFp4= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/hdevalence/ed25519consensus v0.2.0 h1:37ICyZqdyj0lAZ8P4D1d1id3HqbbG1N3iBb1Tb4rdcU= -github.com/hdevalence/ed25519consensus v0.2.0/go.mod h1:w3BHWjwJbFU29IRHL1Iqkw3sus+7FctEyM4RqDxYNzo= -github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc= -github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8= -github.com/illarion/gonotify/v3 v3.0.2 h1:O7S6vcopHexutmpObkeWsnzMJt/r1hONIEogeVNmJMk= -github.com/illarion/gonotify/v3 v3.0.2/go.mod h1:HWGPdPe817GfvY3w7cx6zkbzNZfi3QjcBm/wgVvEL1U= -github.com/insomniacslk/dhcp v0.0.0-20231206064809-8c70d406f6d2 h1:9K06NfxkBh25x56yVhWWlKFE8YpicaSfHwoV8SFbueA= -github.com/insomniacslk/dhcp v0.0.0-20231206064809-8c70d406f6d2/go.mod h1:3A9PQ1cunSDF/1rbTq99Ts4pVnycWg+vlPkfeD2NLFI= -github.com/jellydator/ttlcache/v3 v3.1.0 h1:0gPFG0IHHP6xyUyXq+JaD8fwkDCqgqwohXNJBcYE71g= -github.com/jellydator/ttlcache/v3 v3.1.0/go.mod h1:hi7MGFdMAwZna5n2tuvh63DvFLzVKySzCVW6+0gA2n4= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jsimonetti/rtnetlink v1.4.0 h1:Z1BF0fRgcETPEa0Kt0MRk3yV5+kF1FWTni6KUFKrq2I= -github.com/jsimonetti/rtnetlink v1.4.0/go.mod h1:5W1jDvWdnthFJ7fxYX1GMK07BUpI4oskfOqvPteYS6E= -github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk= -github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4= -github.com/kortschak/wol v0.0.0-20200729010619-da482cc4850a h1:+RR6SqnTkDLWyICxS1xpjCi/3dhyV+TgZwA6Ww3KncQ= -github.com/kortschak/wol v0.0.0-20200729010619-da482cc4850a/go.mod h1:YTtCCM3ryyfiu4F7t8HQ1mxvp1UBdWM2r6Xa+nGWvDk= -github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/mdlayher/genetlink v1.3.2 h1:KdrNKe+CTu+IbZnm/GVUMXSqBBLqcGpRDa0xkQy56gw= -github.com/mdlayher/genetlink v1.3.2/go.mod h1:tcC3pkCrPUGIKKsCsp0B3AdaaKuHtaxoJRz3cc+528o= -github.com/mdlayher/netlink v1.7.3-0.20250113171957-fbb4dce95f42 h1:A1Cq6Ysb0GM0tpKMbdCXCIfBclan4oHk1Jb+Hrejirg= -github.com/mdlayher/netlink v1.7.3-0.20250113171957-fbb4dce95f42/go.mod h1:BB4YCPDOzfy7FniQ/lxuYQ3dgmM2cZumHbK8RpTjN2o= -github.com/mdlayher/sdnotify v1.0.0 h1:Ma9XeLVN/l0qpyx1tNeMSeTjCPH6NtuD6/N9XdTlQ3c= -github.com/mdlayher/sdnotify v1.0.0/go.mod h1:HQUmpM4XgYkhDLtd+Uad8ZFK1T9D5+pNxnXQjCeJlGE= -github.com/mdlayher/socket v0.5.0 h1:ilICZmJcQz70vrWVes1MFera4jGiWNocSkykwwoy3XI= -github.com/mdlayher/socket v0.5.0/go.mod h1:WkcBFfvyG8QENs5+hfQPl1X6Jpd2yeLIYgrGFmJiJxI= -github.com/miekg/dns v1.1.58 h1:ca2Hdkz+cDg/7eNF6V56jjzuZ4aCAE+DbVkILdQWG/4= -github.com/miekg/dns v1.1.58/go.mod h1:Ypv+3b/KadlvW9vJfXOTf300O4UqaHFzFCuHz+rPkBY= -github.com/mitchellh/go-ps v1.0.0 h1:i6ampVEEF4wQFF+bkYfwYgY+F/uYJDktmvLPf7qIgjc= -github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg= -github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= -github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= -github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= -github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= -github.com/pierrec/lz4/v4 v4.1.25 h1:kocOqRffaIbU5djlIBr7Wh+cx82C0vtFb0fOurZHqD0= -github.com/pierrec/lz4/v4 v4.1.25/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4= -github.com/pires/go-proxyproto v0.8.1 h1:9KEixbdJfhrbtjpz/ZwCdWDD2Xem0NZ38qMYaASJgp0= -github.com/pires/go-proxyproto v0.8.1/go.mod h1:ZKAAyp3cgy5Y5Mo4n9AlScrkCZwUy0g3Jf+slqQVcuU= -github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo= -github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk= -github.com/prometheus-community/pro-bing v0.4.0 h1:YMbv+i08gQz97OZZBwLyvmmQEEzyfyrrjEaAchdy3R4= -github.com/prometheus-community/pro-bing v0.4.0/go.mod h1:b7wRYZtCcPmt4Sz319BykUU241rWLe1VFXyiyWK/dH4= -github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= -github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= -github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= -github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= -github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= -github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= -github.com/safchain/ethtool v0.3.0 h1:gimQJpsI6sc1yIqP/y8GYgiXn/NjgvpM0RNoWLVVmP0= -github.com/safchain/ethtool v0.3.0/go.mod h1:SA9BwrgyAqNo7M+uaL6IYbxpm5wk3L7Mm6ocLW+CJUs= -github.com/tailscale/certstore v0.1.1-0.20231202035212-d3fa0460f47e h1:PtWT87weP5LWHEY//SWsYkSO3RWRZo4OSWagh3YD2vQ= -github.com/tailscale/certstore v0.1.1-0.20231202035212-d3fa0460f47e/go.mod h1:XrBNfAFN+pwoWuksbFS9Ccxnopa15zJGgXRFN90l3K4= -github.com/tailscale/go-winio v0.0.0-20231025203758-c4f33415bf55 h1:Gzfnfk2TWrk8Jj4P4c1a3CtQyMaTVCznlkLZI++hok4= -github.com/tailscale/go-winio v0.0.0-20231025203758-c4f33415bf55/go.mod h1:4k4QO+dQ3R5FofL+SanAUZe+/QfeK0+OIuwDIRu2vSg= -github.com/tailscale/golang-x-crypto v0.0.0-20250404221719-a5573b049869 h1:SRL6irQkKGQKKLzvQP/ke/2ZuB7Py5+XuqtOgSj+iMM= -github.com/tailscale/golang-x-crypto v0.0.0-20250404221719-a5573b049869/go.mod h1:ikbF+YT089eInTp9f2vmvy4+ZVnW5hzX1q2WknxSprQ= -github.com/tailscale/hujson v0.0.0-20221223112325-20486734a56a h1:SJy1Pu0eH1C29XwJucQo73FrleVK6t4kYz4NVhp34Yw= -github.com/tailscale/hujson v0.0.0-20221223112325-20486734a56a/go.mod h1:DFSS3NAGHthKo1gTlmEcSBiZrRJXi28rLNd/1udP1c8= -github.com/tailscale/netlink v1.1.1-0.20240822203006-4d49adab4de7 h1:uFsXVBE9Qr4ZoF094vE6iYTLDl0qCiKzYXlL6UeWObU= -github.com/tailscale/netlink v1.1.1-0.20240822203006-4d49adab4de7/go.mod h1:NzVQi3Mleb+qzq8VmcWpSkcSYxXIg0DkI6XDzpVkhJ0= -github.com/tailscale/peercred v0.0.0-20250107143737-35a0c7bd7edc h1:24heQPtnFR+yfntqhI3oAu9i27nEojcQ4NuBQOo5ZFA= -github.com/tailscale/peercred v0.0.0-20250107143737-35a0c7bd7edc/go.mod h1:f93CXfllFsO9ZQVq+Zocb1Gp4G5Fz0b0rXHLOzt/Djc= -github.com/tailscale/web-client-prebuilt v0.0.0-20250124233751-d4cd19a26976 h1:UBPHPtv8+nEAy2PD8RyAhOYvau1ek0HDJqLS/Pysi14= -github.com/tailscale/web-client-prebuilt v0.0.0-20250124233751-d4cd19a26976/go.mod h1:agQPE6y6ldqCOui2gkIh7ZMztTkIQKH049tv8siLuNQ= -github.com/tailscale/wf v0.0.0-20240214030419-6fbb0a674ee6 h1:l10Gi6w9jxvinoiq15g8OToDdASBni4CyJOdHY1Hr8M= -github.com/tailscale/wf v0.0.0-20240214030419-6fbb0a674ee6/go.mod h1:ZXRML051h7o4OcI0d3AaILDIad/Xw0IkXaHM17dic1Y= -github.com/tailscale/wireguard-go v0.0.0-20250716170648-1d0488a3d7da h1:jVRUZPRs9sqyKlYHHzHjAqKN+6e/Vog6NpHYeNPJqOw= -github.com/tailscale/wireguard-go v0.0.0-20250716170648-1d0488a3d7da/go.mod h1:BOm5fXUBFM+m9woLNBoxI9TaBXXhGNP50LX/TGIvGb4= -github.com/tailscale/xnet v0.0.0-20240729143630-8497ac4dab2e h1:zOGKqN5D5hHhiYUp091JqK7DPCqSARyUfduhGUY8Bek= -github.com/tailscale/xnet v0.0.0-20240729143630-8497ac4dab2e/go.mod h1:orPd6JZXXRyuDusYilywte7k094d7dycXXU5YnWsrwg= -github.com/tc-hib/winres v0.2.1 h1:YDE0FiP0VmtRaDn7+aaChp1KiF4owBiJa5l964l5ujA= -github.com/tc-hib/winres v0.2.1/go.mod h1:C/JaNhH3KBvhNKVbvdlDWkbMDO9H4fKKDaN7/07SSuk= -github.com/u-root/u-root v0.14.0 h1:Ka4T10EEML7dQ5XDvO9c3MBN8z4nuSnGjcd1jmU2ivg= -github.com/u-root/u-root v0.14.0/go.mod h1:hAyZorapJe4qzbLWlAkmSVCJGbfoU9Pu4jpJ1WMluqE= -github.com/u-root/uio v0.0.0-20240224005618-d2acac8f3701 h1:pyC9PaHYZFgEKFdlp3G8RaCKgVpHZnecvArXvPXcFkM= -github.com/u-root/uio v0.0.0-20240224005618-d2acac8f3701/go.mod h1:P3a5rG4X7tI17Nn3aOIAYr5HbIMukwXG0urG0WuL8OA= -github.com/vishvananda/netns v0.0.5 h1:DfiHV+j8bA32MFM7bfEunvT8IAqQ/NzSJHtcmW5zdEY= -github.com/vishvananda/netns v0.0.5/go.mod h1:SpkAiCQRtJ6TvvxPnOSyH3BMl6unz3xZlaprSwhNNJM= -github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= -github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= -go4.org/mem v0.0.0-20240501181205-ae6ca9944745 h1:Tl++JLUCe4sxGu8cTpDzRLd3tN7US4hOxG5YpKCzkek= -go4.org/mem v0.0.0-20240501181205-ae6ca9944745/go.mod h1:reUoABIJ9ikfM5sgtSF3Wushcza7+WeD01VB9Lirh3g= -go4.org/netipx v0.0.0-20231129151722-fdeea329fbba h1:0b9z3AuHCjxk0x/opv64kcgZLBseWJUpBw5I82+2U4M= -go4.org/netipx v0.0.0-20231129151722-fdeea329fbba/go.mod h1:PLyyIXexvUFg3Owu6p/WfdlivPbZJsZdgWZlrGope/Y= -golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU= -golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0= -golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o= -golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= -golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f h1:phY1HzDcf18Aq9A8KkmRtY9WvOFIxN8wgfvy6Zm1DV8= -golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/image v0.27.0 h1:C8gA4oWU/tKkdCfYT6T2u4faJu3MeNS5O8UPWlPF61w= -golang.org/x/image v0.27.0/go.mod h1:xbdrClrAUway1MUTEZDq9mz/UpRwYAkFFNUslZtcB+g= -golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk= -golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc= -golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU= -golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY= -golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo= -golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= -golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= -golang.org/x/sys v0.0.0-20220817070843-5a390386f1f2/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= -golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q= -golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg= -golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU= -golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY= -golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= -golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= -golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ= -golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ= -golang.zx2c4.com/wintun v0.0.0-20230126152724-0fa3db229ce2 h1:B82qJJgjvYKsXS9jeunTOisW56dUokqW/FOteYJJ/yg= -golang.zx2c4.com/wintun v0.0.0-20230126152724-0fa3db229ce2/go.mod h1:deeaetjYA+DHMHg+sMSMI58GrEteJUUzzw7en6TJQcI= -golang.zx2c4.com/wireguard/windows v0.5.3 h1:On6j2Rpn3OEMXqBq00QEDC7bWSZrPIHKIus8eIuExIE= -golang.zx2c4.com/wireguard/windows v0.5.3/go.mod h1:9TEe8TJmtwyQebdFwAkEWOPr3prrtqm+REGFifP60hI= -google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= -google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= -gvisor.dev/gvisor v0.0.0-20260224225140-573d5e7127a8 h1:Zy8IV/+FMLxy6j6p87vk/vQGKcdnbprwjTxc8UiUtsA= -gvisor.dev/gvisor v0.0.0-20260224225140-573d5e7127a8/go.mod h1:QkHjoMIBaYtpVufgwv3keYAbln78mBoCuShZrPrer1Q= -honnef.co/go/tools v0.7.0-0.dev.0.20251022135355-8273271481d0 h1:5SXjd4ET5dYijLaf0O3aOenC0Z4ZafIWSpjUzsQaNho= -honnef.co/go/tools v0.7.0-0.dev.0.20251022135355-8273271481d0/go.mod h1:EPDDhEZqVHhWuPI5zPAsjU0U7v9xNIWjoOVyZ5ZcniQ= -howett.net/plist v1.0.0 h1:7CrbWYbPPO/PyNy38b2EB/+gYbjCe2DXBxgtOOZbSQM= -howett.net/plist v1.0.0/go.mod h1:lqaXoTrLY4hg8tnEzNru53gicrbv7rrk+2xJA/7hw9g= -software.sslmate.com/src/go-pkcs12 v0.4.0 h1:H2g08FrTvSFKUj+D309j1DPfk5APnIdAQAB8aEykJ5k= -software.sslmate.com/src/go-pkcs12 v0.4.0/go.mod h1:Qiz0EyvDRJjjxGyUQa2cCNZn/wMyzrRJ/qcDXOQazLI= -tailscale.com v1.96.5 h1:gNkfA/KSZAl6jCH9cj8urq00HRWItDDTtGsyATI89jA= -tailscale.com v1.96.5/go.mod h1:/3lnZBYb2UEwnN0MNu2SDXUtT06AGd5k0s+OWx3WmcY= diff --git a/Tools/tailscale-login-bridge/main.go b/Tools/tailscale-login-bridge/main.go deleted file mode 100644 index 877d0e4..0000000 --- a/Tools/tailscale-login-bridge/main.go +++ /dev/null @@ -1,523 +0,0 @@ -package main - -import ( - "context" - "encoding/binary" - "encoding/json" - "errors" - "flag" - "fmt" - "io" - "log" - "net" - "net/netip" - "net/http" - "os" - "strconv" - "sync" - "time" - - "github.com/tailscale/wireguard-go/tun" - "tailscale.com/client/local" - "tailscale.com/ipn" - "tailscale.com/ipn/ipnstate" - "tailscale.com/tailcfg" - "tailscale.com/tsnet" -) - -type statusResponse struct { - BackendState string `json:"backend_state"` - AuthURL string `json:"auth_url,omitempty"` - Running bool `json:"running"` - NeedsLogin bool `json:"needs_login"` - TailnetName string `json:"tailnet_name,omitempty"` - MagicDNSSuffix string `json:"magic_dns_suffix,omitempty"` - SelfDNSName string `json:"self_dns_name,omitempty"` - TailscaleIPs []string `json:"tailscale_ips,omitempty"` - Health []string `json:"health,omitempty"` - Peers []peerSummary `json:"peers,omitempty"` -} - -type peerSummary struct { - Name string `json:"name,omitempty"` - DNSName string `json:"dns_name,omitempty"` - TailscaleIPs []string `json:"tailscale_ips,omitempty"` - Online bool `json:"online"` - Active bool `json:"active"` - Relay string `json:"relay,omitempty"` - CurAddr string `json:"cur_addr,omitempty"` - LastSeenUnix int64 `json:"last_seen_unix,omitempty"` -} - -type pingResponse struct { - Result *ipnstate.PingResult `json:"result,omitempty"` -} - -type helperHello struct { - ListenAddr string `json:"listen_addr"` - PacketSocket string `json:"packet_socket,omitempty"` -} - -type helperState struct { - mu sync.RWMutex - authURL string -} - -func (s *helperState) authURLSnapshot() string { - s.mu.RLock() - defer s.mu.RUnlock() - return s.authURL -} - -func (s *helperState) setAuthURL(url string) { - s.mu.Lock() - defer s.mu.Unlock() - s.authURL = url -} - -func (s *helperState) clearAuthURL() { - s.setAuthURL("") -} - -// chanTUN is a tun.Device backed by channels so another process can feed and -// consume raw IP packets while tsnet handles the Tailnet control/data plane. -type chanTUN struct { - Inbound chan []byte - Outbound chan []byte - closed chan struct{} - events chan tun.Event -} - -func newChanTUN() *chanTUN { - t := &chanTUN{ - Inbound: make(chan []byte, 1024), - Outbound: make(chan []byte, 1024), - closed: make(chan struct{}), - events: make(chan tun.Event, 1), - } - t.events <- tun.EventUp - return t -} - -func (t *chanTUN) File() *os.File { return nil } - -func (t *chanTUN) Close() error { - select { - case <-t.closed: - default: - close(t.closed) - close(t.Inbound) - } - return nil -} - -func (t *chanTUN) Read(bufs [][]byte, sizes []int, offset int) (int, error) { - select { - case <-t.closed: - return 0, io.EOF - case pkt, ok := <-t.Outbound: - if !ok { - return 0, io.EOF - } - sizes[0] = copy(bufs[0][offset:], pkt) - return 1, nil - } -} - -func (t *chanTUN) Write(bufs [][]byte, offset int) (int, error) { - for _, buf := range bufs { - pkt := buf[offset:] - if len(pkt) == 0 { - continue - } - select { - case <-t.closed: - return 0, errors.New("closed") - case t.Inbound <- append([]byte(nil), pkt...): - default: - } - } - return len(bufs), nil -} - -func (t *chanTUN) MTU() (int, error) { return 1280, nil } -func (t *chanTUN) Name() (string, error) { return "burrow-tailnet", nil } -func (t *chanTUN) Events() <-chan tun.Event { return t.events } -func (t *chanTUN) BatchSize() int { return 1 } - -func main() { - listen := flag.String("listen", "127.0.0.1:0", "local listen address") - stateDir := flag.String("state-dir", "", "persistent state directory") - hostname := flag.String("hostname", "burrow-apple", "tailnet hostname") - controlURL := flag.String("control-url", "", "optional control URL") - packetSocket := flag.String("packet-socket", "", "optional unix socket path for raw packet bridging") - udpEchoPort := flag.Int("udp-echo-port", 0, "optional tailnet UDP echo port") - flag.Parse() - - if *stateDir == "" { - log.Fatal("--state-dir is required") - } - - if err := os.MkdirAll(*stateDir, 0o755); err != nil { - log.Fatalf("create state dir: %v", err) - } - - server := &tsnet.Server{ - Dir: *stateDir, - Hostname: *hostname, - UserLogf: log.Printf, - } - - var tunDevice *chanTUN - var packetListener net.Listener - if *packetSocket != "" { - _ = os.Remove(*packetSocket) - ln, err := net.Listen("unix", *packetSocket) - if err != nil { - log.Fatalf("packet listen: %v", err) - } - packetListener = ln - defer func() { - packetListener.Close() - _ = os.Remove(*packetSocket) - }() - - tunDevice = newChanTUN() - server.Tun = tunDevice - } - if *controlURL != "" { - server.ControlURL = *controlURL - } - defer server.Close() - - if err := server.Start(); err != nil { - log.Fatalf("start tsnet: %v", err) - } - - localClient, err := server.LocalClient() - if err != nil { - log.Fatalf("local client: %v", err) - } - state := &helperState{} - - ln, err := net.Listen("tcp", *listen) - if err != nil { - log.Fatalf("listen: %v", err) - } - defer ln.Close() - - if packetListener != nil { - go servePacketBridge(packetListener, tunDevice) - } - if *udpEchoPort > 0 { - go serveUDPEcho(context.Background(), server, localClient, *udpEchoPort) - } - - hello := helperHello{ - ListenAddr: ln.Addr().String(), - } - if *packetSocket != "" { - hello.PacketSocket = *packetSocket - } - if err := json.NewEncoder(os.Stdout).Encode(hello); err != nil { - log.Fatalf("write hello: %v", err) - } - _ = os.Stdout.Sync() - - mux := http.NewServeMux() - mux.HandleFunc("/status", func(w http.ResponseWriter, r *http.Request) { - status, err := snapshot(r.Context(), localClient, state) - if err != nil { - http.Error(w, err.Error(), http.StatusBadGateway) - return - } - w.Header().Set("content-type", "application/json") - _ = json.NewEncoder(w).Encode(status) - }) - mux.HandleFunc("/ping", func(w http.ResponseWriter, r *http.Request) { - ip := r.URL.Query().Get("ip") - if ip == "" { - http.Error(w, "missing ip", http.StatusBadRequest) - return - } - target, err := netip.ParseAddr(ip) - if err != nil { - http.Error(w, fmt.Sprintf("invalid ip: %v", err), http.StatusBadRequest) - return - } - - pingType := tailcfg.PingTSMP - switch r.URL.Query().Get("type") { - case "", "tsmp", "TSMP": - pingType = tailcfg.PingTSMP - case "icmp", "ICMP": - pingType = tailcfg.PingICMP - case "peerapi": - pingType = tailcfg.PingPeerAPI - default: - http.Error(w, "unsupported ping type", http.StatusBadRequest) - return - } - - result, err := localClient.Ping(r.Context(), target, pingType) - if err != nil { - http.Error(w, err.Error(), http.StatusBadGateway) - return - } - - w.Header().Set("content-type", "application/json") - _ = json.NewEncoder(w).Encode(&pingResponse{Result: result}) - }) - mux.HandleFunc("/shutdown", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNoContent) - go func() { - _ = server.Close() - time.Sleep(100 * time.Millisecond) - os.Exit(0) - }() - }) - - httpServer := &http.Server{ - Handler: mux, - } - log.Fatal(httpServer.Serve(ln)) -} - -func servePacketBridge(listener net.Listener, device *chanTUN) { - for { - conn, err := listener.Accept() - if err != nil { - if errors.Is(err, net.ErrClosed) { - return - } - log.Printf("packet accept: %v", err) - continue - } - log.Printf("packet bridge connected") - if err := bridgePacketConn(conn, device); err != nil && !errors.Is(err, io.EOF) { - log.Printf("packet bridge error: %v", err) - } - _ = conn.Close() - log.Printf("packet bridge disconnected") - } -} - -func bridgePacketConn(conn net.Conn, device *chanTUN) error { - errCh := make(chan error, 2) - - go func() { - for { - pkt, err := readFrame(conn) - if err != nil { - errCh <- err - return - } - select { - case <-device.closed: - errCh <- io.EOF - return - case device.Outbound <- pkt: - } - } - }() - - go func() { - for { - select { - case <-device.closed: - errCh <- io.EOF - return - case pkt, ok := <-device.Inbound: - if !ok { - errCh <- io.EOF - return - } - if err := writeFrame(conn, pkt); err != nil { - errCh <- err - return - } - } - } - }() - - return <-errCh -} - -func readFrame(r io.Reader) ([]byte, error) { - var size [4]byte - if _, err := io.ReadFull(r, size[:]); err != nil { - return nil, err - } - length := binary.BigEndian.Uint32(size[:]) - if length == 0 { - return []byte{}, nil - } - packet := make([]byte, length) - if _, err := io.ReadFull(r, packet); err != nil { - return nil, err - } - return packet, nil -} - -func writeFrame(w io.Writer, packet []byte) error { - var size [4]byte - binary.BigEndian.PutUint32(size[:], uint32(len(packet))) - if _, err := w.Write(size[:]); err != nil { - return err - } - if len(packet) == 0 { - return nil - } - _, err := w.Write(packet) - return err -} - -func snapshot(ctx context.Context, localClient *local.Client, state *helperState) (*statusResponse, error) { - status, err := localClient.Status(ctx) - if err != nil { - return nil, err - } - - authURL := status.AuthURL - if authURL == "" { - authURL = state.authURLSnapshot() - } - if status.BackendState == ipn.Running.String() { - state.clearAuthURL() - authURL = "" - } else if (status.BackendState == ipn.NeedsLogin.String() || status.BackendState == ipn.NoState.String()) && authURL == "" { - authURL, err = awaitAuthURL(ctx, localClient, state) - if err != nil { - return nil, err - } - } - - response := &statusResponse{ - BackendState: status.BackendState, - AuthURL: authURL, - Running: status.BackendState == ipn.Running.String(), - NeedsLogin: status.BackendState == ipn.NeedsLogin.String(), - Health: append([]string(nil), status.Health...), - } - - if status.CurrentTailnet != nil { - response.TailnetName = status.CurrentTailnet.Name - response.MagicDNSSuffix = status.CurrentTailnet.MagicDNSSuffix - } - if status.Self != nil { - response.SelfDNSName = status.Self.DNSName - } - for _, ip := range status.TailscaleIPs { - response.TailscaleIPs = append(response.TailscaleIPs, ip.String()) - } - for _, key := range status.Peers() { - peer := status.Peer[key] - if peer == nil { - continue - } - summary := peerSummary{ - Name: peer.HostName, - DNSName: peer.DNSName, - Online: peer.Online, - Active: peer.Active, - Relay: peer.Relay, - CurAddr: peer.CurAddr, - LastSeenUnix: peer.LastSeen.Unix(), - } - for _, ip := range peer.TailscaleIPs { - summary.TailscaleIPs = append(summary.TailscaleIPs, ip.String()) - } - response.Peers = append(response.Peers, summary) - } - return response, nil -} - -func serveUDPEcho(ctx context.Context, server *tsnet.Server, localClient *local.Client, port int) { - ip, err := awaitTailscaleIP(ctx, localClient) - if err != nil { - log.Printf("udp echo setup failed: %v", err) - return - } - - listenAddr := net.JoinHostPort(ip.String(), strconv.Itoa(port)) - pc, err := server.ListenPacket("udp", listenAddr) - if err != nil { - log.Printf("udp echo listen failed on %s: %v", listenAddr, err) - return - } - defer pc.Close() - - log.Printf("udp echo listening on %s", pc.LocalAddr()) - buf := make([]byte, 64<<10) - for { - n, addr, err := pc.ReadFrom(buf) - if err != nil { - if errors.Is(err, net.ErrClosed) || errors.Is(err, io.EOF) { - return - } - log.Printf("udp echo read failed: %v", err) - return - } - if _, err := pc.WriteTo(buf[:n], addr); err != nil { - log.Printf("udp echo write failed: %v", err) - return - } - } -} - -func awaitTailscaleIP(ctx context.Context, localClient *local.Client) (netip.Addr, error) { - for range 60 { - status, err := localClient.StatusWithoutPeers(ctx) - if err == nil { - for _, ip := range status.TailscaleIPs { - if ip.Is4() { - return ip, nil - } - } - for _, ip := range status.TailscaleIPs { - if ip.Is6() { - return ip, nil - } - } - } - select { - case <-ctx.Done(): - return netip.Addr{}, ctx.Err() - case <-time.After(250 * time.Millisecond): - } - } - return netip.Addr{}, errors.New("timed out waiting for tailscale IP") -} - -func awaitAuthURL(ctx context.Context, localClient *local.Client, state *helperState) (string, error) { - watchCtx, cancel := context.WithTimeout(ctx, 8*time.Second) - defer cancel() - - watcher, err := localClient.WatchIPNBus(watchCtx, ipn.NotifyInitialState) - if err != nil { - return "", err - } - defer watcher.Close() - - if err := localClient.StartLoginInteractive(ctx); err != nil { - return "", err - } - - for { - notify, err := watcher.Next() - if err != nil { - if errors.Is(err, context.DeadlineExceeded) || errors.Is(err, context.Canceled) { - return state.authURLSnapshot(), nil - } - return "", err - } - if notify.BrowseToURL != nil && *notify.BrowseToURL != "" { - state.setAuthURL(*notify.BrowseToURL) - return *notify.BrowseToURL, nil - } - if notify.State != nil && *notify.State == ipn.Running { - state.clearAuthURL() - return "", nil - } - } -} diff --git a/Tools/version.sh b/Tools/version.sh deleted file mode 100755 index fcb3f00..0000000 --- a/Tools/version.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash - -export PATH="$PATH:/opt/homebrew/bin:/usr/local/bin:/etc/profiles/per-user/$USER/bin" - -set -euo pipefail - -cd "$(dirname "${BASH_SOURCE[0]}")"/.. - -TAG_PREFIX="builds/" - -CURRENT_BUILD=$(git tag --points-at HEAD | tail -n 1) -LATEST_BUILD="$TAG_PREFIX$(git tag -l "builds/[0-9]*" | cut -d'/' -f 2 | sort -n | tail -n 1)" - -CURRENT_BUILD_NUMBER=${CURRENT_BUILD#$TAG_PREFIX} -LATEST_BUILD_NUMBER=${LATEST_BUILD#$TAG_PREFIX} -if [[ -z $LATEST_BUILD_NUMBER ]]; then - LATEST_BUILD_NUMBER="0" -fi - -if [[ ! -z $LATEST_BUILD && $(git merge-base --is-ancestor $LATEST_BUILD HEAD) -ne 0 ]]; then - echo "error: HEAD is not descended from build $LATEST_BUILD_NUMBER" >&2 - exit 1 -fi - -BUILD_NUMBER=$LATEST_BUILD_NUMBER - -if [[ $# -gt 0 && "$1" == "increment" ]]; then - NEW_BUILD_NUMBER=$((LATEST_BUILD_NUMBER + 1)) - NEW_TAG="$TAG_PREFIX$NEW_BUILD_NUMBER" - BUILD_NUMBER=$NEW_BUILD_NUMBER - - git tag $NEW_TAG - git push --quiet origin $NEW_TAG - gh release create "$NEW_TAG" -t "Build $BUILD_NUMBER" --verify-tag --generate-notes >/dev/null -fi - -if [[ -z $(grep $BUILD_NUMBER Apple/Configuration/Version.xcconfig 2>/dev/null) ]]; then - echo "CURRENT_PROJECT_VERSION = $BUILD_NUMBER" > Apple/Configuration/Version.xcconfig - git update-index --assume-unchanged Apple/Configuration/Version.xcconfig -fi - -if [[ $# -gt 0 && "$1" == "status" ]]; then - if [[ $CURRENT_BUILD_NUMBER -eq $LATEST_BUILD_NUMBER ]]; then - echo "clean" - else - echo "dirty" - fi - exit 0 -fi - -echo $BUILD_NUMBER diff --git a/burrow-gtk/Cargo.lock b/burrow-gtk/Cargo.lock index a1a9ebd..6721318 100644 --- a/burrow-gtk/Cargo.lock +++ b/burrow-gtk/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 4 +version = 3 [[package]] name = "addr2line" @@ -38,18 +38,6 @@ dependencies = [ "cpufeatures", ] -[[package]] -name = "ahash" -version = "0.8.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] - [[package]] name = "aho-corasick" version = "1.1.2" @@ -126,49 +114,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "async-stream" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22068c0c19514942eefcfd4daf8976ef1aad84e61539f95cd200c35202f80af5" -dependencies = [ - "async-stream-impl 0.2.1", - "futures-core", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl 0.3.6", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25f9db3b38af870bf7e5cc649167533b493928e50744e2c30ae350230b414670" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "async-trait" version = "0.1.77" @@ -177,76 +122,15 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - [[package]] name = "autocfg" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "axum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" -dependencies = [ - "async-trait", - "axum-core", - "bytes", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "axum-core" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "mime", - "pin-project-lite", - "rustversion", - "sync_wrapper", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "backtrace" version = "0.3.69" @@ -268,12 +152,6 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - [[package]] name = "base64ct" version = "1.6.0" @@ -296,7 +174,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash", "shlex", "syn 1.0.109", "which", @@ -319,9 +197,9 @@ dependencies = [ "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash", "shlex", - "syn 2.0.106", + "syn 2.0.48", "which", ] @@ -374,19 +252,15 @@ dependencies = [ "aead", "anyhow", "async-channel", - "async-stream 0.2.1", - "axum", - "base64 0.21.7", + "base64", "blake2", "caps", "chacha20poly1305", "clap", "console", - "dotenv", "fehler", "futures", "hmac", - "hyper-util", "ip_network", "ip_network_table", "libsystemd", @@ -394,23 +268,13 @@ dependencies = [ "nix 0.27.1", "once_cell", "parking_lot", - "prost", - "prost-types", - "rand 0.8.5", - "rand_core 0.6.4", - "reqwest 0.12.5", + "rand", + "rand_core", "ring", - "rusqlite", - "rust-ini", "schemars", "serde", "serde_json", "tokio", - "tokio-stream", - "toml", - "tonic", - "tonic-build", - "tower", "tracing", "tracing-journald", "tracing-log 0.1.4", @@ -440,9 +304,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "bzip2" @@ -476,7 +340,7 @@ dependencies = [ "glib", "libc", "once_cell", - "thiserror 1.0.56", + "thiserror", ] [[package]] @@ -497,7 +361,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "190baaad529bcfbde9e1a19022c42781bdb6ff9de25721abdb8fd98c0807730b" dependencies = [ "libc", - "thiserror 1.0.56", + "thiserror", ] [[package]] @@ -535,12 +399,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - [[package]] name = "chacha20" version = "0.9.1" @@ -618,7 +476,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -655,26 +513,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "const-random" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" -dependencies = [ - "const-random-macro", -] - -[[package]] -name = "const-random-macro" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" -dependencies = [ - "getrandom 0.2.12", - "once_cell", - "tiny-keccak", -] - [[package]] name = "constant_time_eq" version = "0.1.5" @@ -721,12 +559,6 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - [[package]] name = "crypto-common" version = "0.1.6" @@ -734,7 +566,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "typenum", ] @@ -762,7 +594,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -785,21 +617,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "dlv-list" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" -dependencies = [ - "const-random", -] - -[[package]] -name = "dotenv" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" - [[package]] name = "dyn-clone" version = "1.0.16" @@ -864,18 +681,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "fallible-iterator" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" - -[[package]] -name = "fallible-streaming-iterator" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" - [[package]] name = "fastrand" version = "2.0.1" @@ -918,12 +723,6 @@ dependencies = [ "rustc_version", ] -[[package]] -name = "fixedbitset" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" - [[package]] name = "flate2" version = "1.0.28" @@ -1039,7 +838,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -1151,21 +950,7 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "r-efi", - "wasi 0.14.7+wasi-0.2.4", + "wasi", "wasm-bindgen", ] @@ -1212,7 +997,7 @@ dependencies = [ "once_cell", "pin-project-lite", "smallvec", - "thiserror 1.0.56", + "thiserror", ] [[package]] @@ -1248,7 +1033,7 @@ dependencies = [ "memchr", "once_cell", "smallvec", - "thiserror 1.0.56", + "thiserror", ] [[package]] @@ -1421,62 +1206,19 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.11", - "indexmap 2.11.4", + "http", + "indexmap", "slab", "tokio", "tokio-util", "tracing", ] -[[package]] -name = "h2" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http 1.3.1", - "indexmap 2.11.4", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - [[package]] name = "hashbrown" version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" -dependencies = [ - "ahash", -] - -[[package]] -name = "hashbrown" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" - -[[package]] -name = "hashlink" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" -dependencies = [ - "hashbrown 0.14.3", -] [[package]] name = "heck" @@ -1484,6 +1226,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "hermit-abi" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f" + [[package]] name = "hex" version = "0.4.3" @@ -1519,17 +1267,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http-body" version = "0.4.6" @@ -1537,38 +1274,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.11", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http 1.3.1", -] - -[[package]] -name = "http-body-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" -dependencies = [ - "bytes", - "futures-core", - "http 1.3.1", - "http-body 1.0.1", + "http", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.10.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" @@ -1586,71 +1300,20 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.3.24", - "http 0.2.11", - "http-body 0.4.6", + "h2", + "http", + "http-body", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.5.5", "tokio", "tower-service", "tracing", "want", ] -[[package]] -name = "hyper" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "h2 0.4.12", - "http 1.3.1", - "http-body 1.0.1", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" -dependencies = [ - "http 1.3.1", - "hyper 1.6.0", - "hyper-util", - "rustls", - "rustls-pki-types", - "tokio", - "tokio-rustls", - "tower-service", - "webpki-roots 1.0.2", -] - -[[package]] -name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper 1.6.0", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - [[package]] name = "hyper-tls" version = "0.5.0" @@ -1658,33 +1321,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.28", + "hyper", "native-tls", "tokio", "tokio-native-tls", ] -[[package]] -name = "hyper-util" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "hyper 1.6.0", - "libc", - "pin-project-lite", - "socket2 0.5.10", - "tokio", - "tower-service", - "tracing", -] - [[package]] name = "idna" version = "0.5.0" @@ -1697,22 +1339,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.9.3" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] - -[[package]] -name = "indexmap" -version = "2.11.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.16.0", + "hashbrown", ] [[package]] @@ -1724,17 +1356,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "io-uring" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" -dependencies = [ - "bitflags 2.4.2", - "cfg-if", - "libc", -] - [[package]] name = "ip_network" version = "0.4.1" @@ -1763,15 +1384,6 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.10" @@ -1789,11 +1401,10 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.80" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852f13bec5eba4ba9afbeb93fd7c13fe56147f055939ae21c43a29a0ecb2702e" +checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" dependencies = [ - "once_cell", "wasm-bindgen", ] @@ -1844,9 +1455,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.176" +version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" [[package]] name = "libloading" @@ -1868,17 +1479,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "libsqlite3-sys" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", -] - [[package]] name = "libsystemd" version = "0.7.0" @@ -1893,7 +1493,7 @@ dependencies = [ "once_cell", "serde", "sha2", - "thiserror 1.0.56", + "thiserror", "uuid", ] @@ -1932,12 +1532,6 @@ version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" -[[package]] -name = "lru-slab" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" - [[package]] name = "malloc_buf" version = "0.0.6" @@ -1956,12 +1550,6 @@ dependencies = [ "regex-automata 0.1.10", ] -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" - [[package]] name = "memchr" version = "2.7.1" @@ -1994,7 +1582,7 @@ checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e" dependencies = [ "miette-derive", "once_cell", - "thiserror 1.0.56", + "thiserror", "unicode-width", ] @@ -2006,7 +1594,7 @@ checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -2032,28 +1620,22 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.3" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi", + "windows-sys 0.48.0", ] -[[package]] -name = "multimap" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" - [[package]] name = "nanorand" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a51313c5820b0b02bd422f4b44776fbf47961755c74ce64afc73bfad10226c3" dependencies = [ - "getrandom 0.2.12", + "getrandom", ] [[package]] @@ -2119,6 +1701,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + [[package]] name = "objc" version = "0.2.7" @@ -2192,7 +1784,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -2213,16 +1805,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "ordered-multimap" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" -dependencies = [ - "dlv-list", - "hashbrown 0.14.3", -] - [[package]] name = "overload" version = "0.1.1" @@ -2291,7 +1873,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -2319,16 +1901,6 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" -[[package]] -name = "petgraph" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" -dependencies = [ - "fixedbitset", - "indexmap 2.11.4", -] - [[package]] name = "pin-project" version = "1.1.4" @@ -2346,7 +1918,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -2403,7 +1975,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5" dependencies = [ "proc-macro2", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -2442,120 +2014,13 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.101" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] -[[package]] -name = "prost" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-build" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" -dependencies = [ - "heck", - "itertools", - "log", - "multimap", - "once_cell", - "petgraph", - "prettyplease", - "prost", - "prost-types", - "regex", - "syn 2.0.106", - "tempfile", -] - -[[package]] -name = "prost-derive" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" -dependencies = [ - "anyhow", - "itertools", - "proc-macro2", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "prost-types" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" -dependencies = [ - "prost", -] - -[[package]] -name = "quinn" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" -dependencies = [ - "bytes", - "cfg_aliases", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash 2.1.1", - "rustls", - "socket2 0.5.10", - "thiserror 2.0.16", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-proto" -version = "0.11.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" -dependencies = [ - "bytes", - "getrandom 0.3.3", - "lru-slab", - "rand 0.9.2", - "ring", - "rustc-hash 2.1.1", - "rustls", - "rustls-pki-types", - "slab", - "thiserror 2.0.16", - "tinyvec", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-udp" -version = "0.5.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" -dependencies = [ - "cfg_aliases", - "libc", - "once_cell", - "socket2 0.5.10", - "tracing", - "windows-sys 0.52.0", -] - [[package]] name = "quote" version = "1.0.35" @@ -2565,12 +2030,6 @@ dependencies = [ "proc-macro2", ] -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - [[package]] name = "rand" version = "0.8.5" @@ -2578,18 +2037,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.3", + "rand_chacha", + "rand_core", ] [[package]] @@ -2599,17 +2048,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.3", + "rand_core", ] [[package]] @@ -2618,16 +2057,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.12", -] - -[[package]] -name = "rand_core" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" -dependencies = [ - "getrandom 0.3.3", + "getrandom", ] [[package]] @@ -2709,7 +2139,7 @@ checksum = "9340e2553c0a184a80a0bfa1dcf73c47f3d48933aa6be90724b202f9fbd24735" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -2718,15 +2148,15 @@ version = "0.11.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" dependencies = [ - "base64 0.21.7", + "base64", "bytes", "encoding_rs", "futures-core", "futures-util", - "h2 0.3.24", - "http 0.2.11", - "http-body 0.4.6", - "hyper 0.14.28", + "h2", + "http", + "http-body", + "hyper", "hyper-tls", "ipnet", "js-sys", @@ -2747,49 +2177,7 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "winreg 0.50.0", -] - -[[package]] -name = "reqwest" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures-core", - "futures-util", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-rustls", - "hyper-util", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding", - "pin-project-lite", - "quinn", - "rustls", - "rustls-pemfile", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tokio-rustls", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots 0.26.11", - "winreg 0.52.0", + "winreg", ] [[package]] @@ -2799,37 +2187,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", - "getrandom 0.2.12", + "getrandom", "libc", "spin", "untrusted", "windows-sys 0.48.0", ] -[[package]] -name = "rusqlite" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b838eba278d213a8beaf485bd313fd580ca4505a00d5871caeb1457c55322cae" -dependencies = [ - "bitflags 2.4.2", - "fallible-iterator", - "fallible-streaming-iterator", - "hashlink", - "libsqlite3-sys", - "smallvec", -] - -[[package]] -name = "rust-ini" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "796e8d2b6696392a43bea58116b667fb4c29727dc5abd27d6acf338bb4f688c7" -dependencies = [ - "cfg-if", - "ordered-multimap", -] - [[package]] name = "rustc-demangle" version = "0.1.23" @@ -2842,12 +2206,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" - [[package]] name = "rustc_version" version = "0.4.0" @@ -2870,56 +2228,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "rustls" -version = "0.23.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" -dependencies = [ - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-pemfile" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "rustls-pki-types" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" -dependencies = [ - "web-time", - "zeroize", -] - -[[package]] -name = "rustls-webpki" -version = "0.103.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8572f3c2cb9934231157b45499fc41e1f58c589fdfb81a844ba873265e80f8eb" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - [[package]] name = "ryu" version = "1.0.16" @@ -2996,32 +2304,22 @@ checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" [[package]] name = "serde" -version = "1.0.226" +version = "1.0.195" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.226" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4" +checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.226" +version = "1.0.195" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33" +checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -3046,22 +2344,11 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_path_to_error" -version = "0.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" -dependencies = [ - "itoa", - "serde", - "serde_core", -] - [[package]] name = "serde_spanned" -version = "0.6.9" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" dependencies = [ "serde", ] @@ -3126,15 +2413,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" -[[package]] -name = "signal-hook-registry" -version = "1.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" -dependencies = [ - "libc", -] - [[package]] name = "slab" version = "0.4.9" @@ -3162,12 +2440,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.10" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] @@ -3185,13 +2463,13 @@ version = "9.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da7a2b3c2bc9693bcb40870c4e9b5bf0d79f9cb46273321bf855ec513e919082" dependencies = [ - "base64 0.21.7", + "base64", "digest", "hex", "miette", "sha-1", "sha2", - "thiserror 1.0.56", + "thiserror", "xxhash-rust", ] @@ -3220,21 +2498,15 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" - [[package]] name = "system-configuration" version = "0.5.1" @@ -3300,16 +2572,7 @@ version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" dependencies = [ - "thiserror-impl 1.0.56", -] - -[[package]] -name = "thiserror" -version = "2.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" -dependencies = [ - "thiserror-impl 2.0.16", + "thiserror-impl", ] [[package]] @@ -3320,18 +2583,7 @@ checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -3362,15 +2614,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - [[package]] name = "tinyvec" version = "1.6.0" @@ -3388,33 +2631,31 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.46.1" +version = "1.35.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17" +checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" dependencies = [ "backtrace", "bytes", - "io-uring", "libc", "mio", + "num_cpus", "pin-project-lite", - "signal-hook-registry", - "slab", - "socket2 0.5.10", + "socket2 0.5.5", "tokio-macros", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -3427,27 +2668,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.26.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f63835928ca123f1bef57abbcd23bb2ba0ac9ae1235f1e65bda0d06e7786bd" -dependencies = [ - "rustls", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", -] - [[package]] name = "tokio-util" version = "0.7.10" @@ -3464,21 +2684,21 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.23" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.27", + "toml_edit 0.20.2", ] [[package]] name = "toml_datetime" -version = "0.6.11" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" dependencies = [ "serde", ] @@ -3489,101 +2709,24 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.11.4", + "indexmap", "toml_datetime", - "winnow 0.5.34", + "winnow", ] [[package]] name = "toml_edit" -version = "0.22.27" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" dependencies = [ - "indexmap 2.11.4", + "indexmap", "serde", "serde_spanned", "toml_datetime", - "toml_write", - "winnow 0.7.13", + "winnow", ] -[[package]] -name = "toml_write" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" - -[[package]] -name = "tonic" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" -dependencies = [ - "async-stream 0.3.6", - "async-trait", - "axum", - "base64 0.22.1", - "bytes", - "h2 0.4.12", - "http 1.3.1", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-timeout", - "hyper-util", - "percent-encoding", - "pin-project", - "prost", - "socket2 0.5.10", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tonic-build" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" -dependencies = [ - "prettyplease", - "proc-macro2", - "prost-build", - "prost-types", - "quote", - "syn 2.0.106", -] - -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "indexmap 1.9.3", - "pin-project", - "pin-project-lite", - "rand 0.8.5", - "slab", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - [[package]] name = "tower-service" version = "0.3.2" @@ -3596,7 +2739,6 @@ version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -3610,7 +2752,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] @@ -3709,10 +2851,10 @@ dependencies = [ "libloading 0.7.4", "log", "nix 0.26.4", - "reqwest 0.11.23", + "reqwest", "schemars", "serde", - "socket2 0.5.10", + "socket2 0.4.10", "ssri", "tempfile", "tokio", @@ -3836,48 +2978,28 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" -[[package]] -name = "wasi" -version = "0.14.7+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" -dependencies = [ - "wasip2", -] - -[[package]] -name = "wasip2" -version = "1.0.1+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" -dependencies = [ - "wit-bindgen", -] - [[package]] name = "wasm-bindgen" -version = "0.2.103" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab10a69fbd0a177f5f649ad4d8d3305499c42bab9aef2f7ff592d0ec8f833819" +checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" dependencies = [ "cfg-if", - "once_cell", - "rustversion", "wasm-bindgen-macro", - "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.103" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb702423545a6007bbc368fde243ba47ca275e549c8a28617f56f6ba53b1d1c" +checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" dependencies = [ "bumpalo", "log", + "once_cell", "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", "wasm-bindgen-shared", ] @@ -3895,9 +3017,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.103" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc65f4f411d91494355917b605e1480033152658d71f722a90647f56a70c88a0" +checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3905,25 +3027,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.103" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc003a991398a8ee604a401e194b6b3a39677b3173d6e74495eb51b82e99a32" +checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.103" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf" -dependencies = [ - "unicode-ident", -] +checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" [[package]] name = "web-sys" @@ -3935,34 +3054,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-roots" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" -dependencies = [ - "webpki-roots 1.0.2", -] - -[[package]] -name = "webpki-roots" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" -dependencies = [ - "rustls-pki-types", -] - [[package]] name = "which" version = "4.4.2" @@ -4153,15 +3244,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winnow" -version = "0.7.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" -dependencies = [ - "memchr", -] - [[package]] name = "winreg" version = "0.50.0" @@ -4172,22 +3254,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "winreg" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - -[[package]] -name = "wit-bindgen" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" - [[package]] name = "x25519-dalek" version = "2.0.0" @@ -4195,7 +3261,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb66477291e7e8d2b0ff1bcb900bf29489a9692816d79874bea351e7a8b6de96" dependencies = [ "curve25519-dalek", - "rand_core 0.6.4", + "rand_core", "serde", "zeroize", ] @@ -4206,26 +3272,6 @@ version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53be06678ed9e83edb1745eb72efc0bbcd7b5c3c35711a860906aed827a13d61" -[[package]] -name = "zerocopy" -version = "0.8.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "zeroize" version = "1.7.0" @@ -4243,7 +3289,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.106", + "syn 2.0.48", ] [[package]] diff --git a/burrow-gtk/Cargo.toml b/burrow-gtk/Cargo.toml index b12577a..21cb52e 100644 --- a/burrow-gtk/Cargo.toml +++ b/burrow-gtk/Cargo.toml @@ -11,8 +11,6 @@ relm4 = { version = "0.6", features = ["libadwaita", "gnome_44"]} burrow = { version = "*", path = "../burrow/" } tokio = { version = "1.35.0", features = ["time", "sync"] } gettext-rs = { version = "0.7.0", features = ["gettext-system"] } -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" [build-dependencies] anyhow = "1.0" diff --git a/burrow-gtk/build-aux/Dockerfile b/burrow-gtk/build-aux/Dockerfile index 834e450..df07c4a 100644 --- a/burrow-gtk/build-aux/Dockerfile +++ b/burrow-gtk/build-aux/Dockerfile @@ -4,7 +4,7 @@ ENV DEBIAN_FRONTEND=noninteractive RUN set -eux && \ dnf update -y && \ - dnf install -y clang ninja-build cmake meson gtk4-devel glib2-devel libadwaita-devel desktop-file-utils libappstream-glib util-linux wget fuse fuse-libs file sqlite sqlite-devel protobuf-compiler protobuf-devel + dnf install -y clang ninja-build cmake meson gtk4-devel glib2-devel libadwaita-devel desktop-file-utils libappstream-glib util-linux wget fuse fuse-libs file RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable --profile minimal ENV PATH="/root/.cargo/bin:${PATH}" @@ -12,8 +12,6 @@ ENV PATH="/root/.cargo/bin:${PATH}" WORKDIR /app COPY . /app -ENV SQLITE3_STATIC=1 - RUN cd /app/burrow-gtk/ && \ ./build-aux/build_appimage.sh diff --git a/burrow-gtk/build-aux/build_appimage.sh b/burrow-gtk/build-aux/build_appimage.sh index f054cd9..248cca7 100755 --- a/burrow-gtk/build-aux/build_appimage.sh +++ b/burrow-gtk/build-aux/build_appimage.sh @@ -5,7 +5,6 @@ set -ex BURROW_GTK_ROOT="$(readlink -f $(dirname -- "$(readlink -f -- "$BASH_SOURCE")")/..)" BURROW_GTK_BUILD="$BURROW_GTK_ROOT/build-appimage" LINUXDEPLOY_VERSION="${LINUXDEPLOY_VERSION:-"1-alpha-20240109-1"}" -BURROW_BUILD_TYPE="${BURROW_BUILD_TYPE:-"release"}" if [ "$BURROW_GTK_ROOT" != $(pwd) ]; then echo "Make sure to cd into burrow-gtk" @@ -22,11 +21,8 @@ elif [ "$ARCHITECTURE" == "aarch64" ]; then chmod a+x /tmp/linuxdeploy fi - -CFLAGS="-I/usr/local/include -I/usr/include/$MUSL_TARGET -fPIE" -meson setup $BURROW_GTK_BUILD --bindir bin --prefix /usr --buildtype $BURROW_BUILD_TYPE +meson setup $BURROW_GTK_BUILD --bindir bin --prefix /usr meson compile -C $BURROW_GTK_BUILD DESTDIR=AppDir meson install -C $BURROW_GTK_BUILD -cargo b --$BURROW_BUILD_TYPE --manifest-path=../Cargo.toml -/tmp/linuxdeploy --appimage-extract-and-run --appdir $BURROW_GTK_BUILD/AppDir -e $BURROW_GTK_BUILD/../../target/$BURROW_BUILD_TYPE/burrow --output appimage +/tmp/linuxdeploy --appimage-extract-and-run --appdir $BURROW_GTK_BUILD/AppDir --output appimage mv *.AppImage $BURROW_GTK_BUILD diff --git a/burrow-gtk/src/account_store.rs b/burrow-gtk/src/account_store.rs deleted file mode 100644 index 6aee78b..0000000 --- a/burrow-gtk/src/account_store.rs +++ /dev/null @@ -1,139 +0,0 @@ -use anyhow::{Context, Result}; -use serde::{Deserialize, Serialize}; -use std::{ - path::PathBuf, - time::{SystemTime, UNIX_EPOCH}, -}; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AccountRecord { - pub id: String, - pub kind: AccountKind, - pub title: String, - pub authority: Option, - pub account: String, - pub identity: String, - pub hostname: Option, - pub tailnet: Option, - pub note: Option, - pub created_at: u64, - pub updated_at: u64, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum AccountKind { - WireGuard, - Tor, - Tailnet, -} - -impl AccountKind { - pub fn title(self) -> &'static str { - match self { - Self::WireGuard => "WireGuard", - Self::Tor => "Tor", - Self::Tailnet => "Tailnet", - } - } - - fn sort_rank(self) -> u8 { - match self { - Self::Tailnet => 0, - Self::Tor => 1, - Self::WireGuard => 2, - } - } -} - -pub fn load() -> Result> { - let path = storage_path()?; - if !path.exists() { - return Ok(Vec::new()); - } - let data = - std::fs::read(&path).with_context(|| format!("failed to read {}", path.display()))?; - serde_json::from_slice(&data).with_context(|| format!("failed to parse {}", path.display())) -} - -pub fn upsert(mut record: AccountRecord) -> Result> { - let mut accounts = load()?; - let now = timestamp(); - record.updated_at = now; - if record.created_at == 0 { - record.created_at = now; - } - - if let Some(index) = accounts.iter().position(|account| account.id == record.id) { - accounts[index] = record; - } else { - accounts.push(record); - } - accounts.sort_by(|lhs, rhs| { - lhs.kind - .sort_rank() - .cmp(&rhs.kind.sort_rank()) - .then_with(|| lhs.title.to_lowercase().cmp(&rhs.title.to_lowercase())) - }); - persist(&accounts)?; - Ok(accounts) -} - -pub fn new_record( - kind: AccountKind, - title: String, - authority: Option, - account: String, - identity: String, - hostname: Option, - tailnet: Option, - note: Option, -) -> AccountRecord { - let now = timestamp(); - AccountRecord { - id: format!("{}-{now}", kind.title().to_ascii_lowercase()), - kind, - title, - authority, - account, - identity, - hostname, - tailnet, - note, - created_at: now, - updated_at: now, - } -} - -fn persist(accounts: &[AccountRecord]) -> Result<()> { - let path = storage_path()?; - if let Some(parent) = path.parent() { - std::fs::create_dir_all(parent) - .with_context(|| format!("failed to create {}", parent.display()))?; - } - let data = serde_json::to_vec_pretty(accounts).context("failed to encode account store")?; - std::fs::write(&path, data).with_context(|| format!("failed to write {}", path.display())) -} - -fn storage_path() -> Result { - if let Some(data_home) = std::env::var_os("XDG_DATA_HOME") { - return Ok(PathBuf::from(data_home) - .join("burrow") - .join("accounts.json")); - } - if let Some(home) = std::env::var_os("HOME") { - return Ok(PathBuf::from(home) - .join(".local") - .join("share") - .join("burrow") - .join("accounts.json")); - } - Ok(std::env::temp_dir().join("burrow-accounts.json")) -} - -fn timestamp() -> u64 { - SystemTime::now() - .duration_since(UNIX_EPOCH) - .map(|duration| duration.as_secs()) - .unwrap_or_default() -} diff --git a/burrow-gtk/src/components/app.rs b/burrow-gtk/src/components/app.rs index 7354825..57348ef 100644 --- a/burrow-gtk/src/components/app.rs +++ b/burrow-gtk/src/components/app.rs @@ -1,19 +1,24 @@ use super::*; use anyhow::Context; +use std::time::Duration; + +const RECONNECT_POLL_TIME: Duration = Duration::from_secs(5); pub struct App { - _home_screen: AsyncController, + daemon_client: Arc>>, + _settings_screen: Controller, + switch_screen: AsyncController, } #[derive(Debug)] pub enum AppMsg { None, + PostInit, } impl App { pub fn run() { let app = RelmApp::new(config::ID); - relm4::set_global_css(APP_CSS); Self::setup_gresources().unwrap(); Self::setup_i18n().unwrap(); @@ -44,7 +49,7 @@ impl AsyncComponent for App { view! { adw::Window { set_title: Some("Burrow"), - set_default_size: (900, 760), + set_default_size: (640, 480), } } @@ -53,84 +58,91 @@ impl AsyncComponent for App { root: Self::Root, sender: AsyncComponentSender, ) -> AsyncComponentParts { - let home_screen = home_screen::HomeScreen::builder() - .launch(()) + let daemon_client = Arc::new(Mutex::new(DaemonClient::new().await.ok())); + + let switch_screen = switch_screen::SwitchScreen::builder() + .launch(switch_screen::SwitchScreenInit { + daemon_client: Arc::clone(&daemon_client), + }) + .forward(sender.input_sender(), |_| AppMsg::None); + + let settings_screen = settings_screen::SettingsScreen::builder() + .launch(settings_screen::SettingsScreenInit { + daemon_client: Arc::clone(&daemon_client), + }) .forward(sender.input_sender(), |_| AppMsg::None); let widgets = view_output!(); + let view_stack = adw::ViewStack::new(); + view_stack.add_titled(switch_screen.widget(), None, "Switch"); + view_stack.add_titled(settings_screen.widget(), None, "Settings"); + + let view_switcher_bar = adw::ViewSwitcherBar::builder().stack(&view_stack).build(); + view_switcher_bar.set_reveal(true); + + // When libadwaita 1.4 support becomes more avaliable, this approach is more appropriate + // + // let toolbar = adw::ToolbarView::new(); + // toolbar.add_top_bar( + // &adw::HeaderBar::builder() + // .title_widget(>k::Label::new(Some("Burrow"))) + // .build(), + // ); + // toolbar.add_bottom_bar(&view_switcher_bar); + // toolbar.set_content(Some(&view_stack)); + // root.set_content(Some(&toolbar)); + let content = gtk::Box::new(gtk::Orientation::Vertical, 0); content.append( &adw::HeaderBar::builder() .title_widget(>k::Label::new(Some("Burrow"))) .build(), ); - content.append(home_screen.widget()); + content.append(&view_stack); + content.append(&view_switcher_bar); root.set_content(Some(&content)); - let model = App { _home_screen: home_screen }; + sender.input(AppMsg::PostInit); + + let model = App { + daemon_client, + switch_screen, + _settings_screen: settings_screen, + }; AsyncComponentParts { model, widgets } } async fn update( &mut self, - msg: Self::Input, + _msg: Self::Input, _sender: AsyncComponentSender, _root: &Self::Root, ) { - match msg { - AppMsg::None => {} + loop { + tokio::time::sleep(RECONNECT_POLL_TIME).await; + { + let mut daemon_client = self.daemon_client.lock().await; + let mut disconnected_daemon_client = false; + + if let Some(daemon_client) = daemon_client.as_mut() { + if let Err(_e) = daemon_client.send_command(DaemonCommand::ServerInfo).await { + disconnected_daemon_client = true; + self.switch_screen + .emit(switch_screen::SwitchScreenMsg::DaemonDisconnect); + } + } + + if disconnected_daemon_client || daemon_client.is_none() { + *daemon_client = DaemonClient::new().await.ok(); + if daemon_client.is_some() { + self.switch_screen + .emit(switch_screen::SwitchScreenMsg::DaemonReconnect); + } + } + } } } } - -const APP_CSS: &str = r#" -.empty-state { - border-radius: 18px; - padding: 22px; - background: alpha(@card_bg_color, 0.72); -} - -.summary-card { - border-radius: 18px; - padding: 14px; - background: alpha(@card_bg_color, 0.72); -} - -.network-card { - border-radius: 10px; - padding: 16px; - box-shadow: 0 2px 6px alpha(black, 0.14); -} - -.wireguard-card { - background: linear-gradient(135deg, #3277d8, #174ea6); -} - -.tailnet-card { - background: linear-gradient(135deg, #31b891, #147d69); -} - -.network-card-kind, -.network-card-title, -.network-card-detail { - color: white; -} - -.network-card-kind { - opacity: 0.86; - font-weight: 700; -} - -.network-card-title { - font-size: 1.22em; - font-weight: 700; -} - -.network-card-detail { - opacity: 0.92; - font-family: monospace; -} -"#; diff --git a/burrow-gtk/src/components/home_screen.rs b/burrow-gtk/src/components/home_screen.rs deleted file mode 100644 index 0bfdda2..0000000 --- a/burrow-gtk/src/components/home_screen.rs +++ /dev/null @@ -1,1178 +0,0 @@ -use super::*; -use crate::account_store::{self, AccountKind, AccountRecord}; -use std::time::Duration; - -pub struct HomeScreen { - daemon_banner: adw::Banner, - network_status: gtk::Label, - network_cards: gtk::Box, - account_status: gtk::Label, - account_rows: gtk::Box, - tunnel_status: gtk::Label, - tunnel_button: gtk::Button, - tunnel_state: Option, - tailnet_session_id: Option, - tailnet_running: bool, -} - -#[derive(Debug)] -pub enum HomeScreenMsg { - EnsureDaemon, - Refresh, - TunnelAction, - OpenWireGuard, - OpenTor, - OpenTailnet, - AddWireGuard { - title: String, - account: String, - identity: String, - config: String, - }, - SaveTor { - title: String, - account: String, - identity: String, - note: String, - }, - DiscoverTailnet(String), - ProbeTailnet(String), - StartTailnetLogin { - authority: String, - account: String, - identity: String, - hostname: Option, - }, - PollTailnetLogin, - CancelTailnetLogin, - AddTailnet { - authority: String, - account: String, - identity: String, - hostname: Option, - tailnet: Option, - }, -} - -#[relm4::component(pub, async)] -impl AsyncComponent for HomeScreen { - type Init = (); - type Input = HomeScreenMsg; - type Output = (); - type CommandOutput = (); - - view! { - gtk::ScrolledWindow { - set_vexpand: true, - - adw::Clamp { - set_maximum_size: 900, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 24, - set_margin_all: 24, - - gtk::Box { - set_orientation: gtk::Orientation::Horizontal, - set_spacing: 16, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 6, - set_hexpand: true, - - gtk::Label { - add_css_class: "title-1", - set_xalign: 0.0, - set_label: "Burrow", - }, - - gtk::Label { - add_css_class: "heading", - add_css_class: "dim-label", - set_xalign: 0.0, - set_label: "Networks and accounts", - }, - }, - - #[name(add_button)] - gtk::MenuButton { - add_css_class: "flat", - set_icon_name: "list-add-symbolic", - set_tooltip_text: Some("Add"), - set_valign: Align::Start, - }, - }, - - #[name(daemon_banner)] - adw::Banner { - set_title: "Starting Burrow daemon", - set_revealed: false, - }, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 12, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 4, - - gtk::Label { - add_css_class: "title-2", - set_xalign: 0.0, - set_label: "Networks", - }, - - #[name(network_status)] - gtk::Label { - add_css_class: "dim-label", - set_xalign: 0.0, - set_wrap: true, - set_label: "Stored daemon networks and their active account selectors", - }, - }, - - gtk::ScrolledWindow { - set_policy: (gtk::PolicyType::Automatic, gtk::PolicyType::Never), - set_min_content_height: 190, - - #[name(network_cards)] - gtk::Box { - set_orientation: gtk::Orientation::Horizontal, - set_spacing: 14, - }, - }, - }, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 12, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 4, - - gtk::Label { - add_css_class: "title-2", - set_xalign: 0.0, - set_label: "Accounts", - }, - - gtk::Label { - add_css_class: "dim-label", - set_xalign: 0.0, - set_wrap: true, - set_label: "Per-network identities and sign-in state", - }, - }, - - #[name(account_rows)] - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 8, - set_margin_all: 0, - set_valign: Align::Center, - }, - - #[name(account_status)] - gtk::Label { - add_css_class: "dim-label", - set_xalign: 0.0, - set_wrap: true, - set_label: "", - }, - }, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 8, - - gtk::Box { - set_orientation: gtk::Orientation::Vertical, - set_spacing: 4, - - gtk::Label { - add_css_class: "title-2", - set_xalign: 0.0, - set_label: "Tunnel", - }, - - gtk::Label { - add_css_class: "dim-label", - set_xalign: 0.0, - set_label: "Current daemon tunnel state", - }, - }, - - #[name(tunnel_status)] - gtk::Label { - set_xalign: 0.0, - set_label: "Checking daemon status", - }, - - #[name(tunnel_button)] - gtk::Button { - add_css_class: "suggested-action", - set_label: "Start", - set_halign: Align::Start, - connect_clicked => HomeScreenMsg::TunnelAction, - }, - }, - } - } - } - } - - async fn init( - _: Self::Init, - _root: Self::Root, - sender: AsyncComponentSender, - ) -> AsyncComponentParts { - let widgets = view_output!(); - configure_add_popover(&widgets.add_button, &sender); - - let refresh_sender = sender.input_sender().clone(); - relm4::spawn(async move { - loop { - tokio::time::sleep(Duration::from_secs(5)).await; - refresh_sender.emit(HomeScreenMsg::Refresh); - } - }); - - let model = HomeScreen { - daemon_banner: widgets.daemon_banner.clone(), - network_status: widgets.network_status.clone(), - network_cards: widgets.network_cards.clone(), - account_status: widgets.account_status.clone(), - account_rows: widgets.account_rows.clone(), - tunnel_status: widgets.tunnel_status.clone(), - tunnel_button: widgets.tunnel_button.clone(), - tunnel_state: None, - tailnet_session_id: None, - tailnet_running: false, - }; - - sender.input(HomeScreenMsg::EnsureDaemon); - - AsyncComponentParts { model, widgets } - } - - async fn update( - &mut self, - msg: Self::Input, - sender: AsyncComponentSender, - root: &Self::Root, - ) { - match msg { - HomeScreenMsg::EnsureDaemon => self.ensure_daemon().await, - HomeScreenMsg::Refresh => self.refresh().await, - HomeScreenMsg::TunnelAction => self.perform_tunnel_action().await, - HomeScreenMsg::OpenWireGuard => open_wireguard_window(root, &sender), - HomeScreenMsg::OpenTor => open_tor_window(root, &sender), - HomeScreenMsg::OpenTailnet => open_tailnet_window(root, &sender), - HomeScreenMsg::AddWireGuard { - title, - account, - identity, - config, - } => self.add_wireguard(title, account, identity, config).await, - HomeScreenMsg::SaveTor { title, account, identity, note } => { - self.save_tor(title, account, identity, note) - } - HomeScreenMsg::DiscoverTailnet(email) => self.discover_tailnet(email).await, - HomeScreenMsg::ProbeTailnet(authority) => self.probe_tailnet(authority).await, - HomeScreenMsg::StartTailnetLogin { - authority, - account, - identity, - hostname, - } => { - self.start_tailnet_login(authority, account, identity, hostname, sender) - .await; - } - HomeScreenMsg::PollTailnetLogin => self.poll_tailnet_login(sender).await, - HomeScreenMsg::CancelTailnetLogin => self.cancel_tailnet_login().await, - HomeScreenMsg::AddTailnet { - authority, - account, - identity, - hostname, - tailnet, - } => { - self.add_tailnet(authority, account, identity, hostname, tailnet) - .await; - } - } - } -} - -impl HomeScreen { - async fn ensure_daemon(&mut self) { - self.daemon_banner.set_title("Starting Burrow daemon"); - self.daemon_banner.set_revealed(true); - match daemon_api::ensure_daemon().await { - Ok(()) => { - self.daemon_banner.set_revealed(false); - self.refresh().await; - } - Err(error) => { - self.daemon_banner - .set_title(&format!("Burrow daemon is not reachable: {error}")); - self.daemon_banner.set_revealed(true); - self.tunnel_state = None; - self.tunnel_status.set_label("Daemon unavailable"); - self.tunnel_button.set_label("Enable"); - self.tunnel_button.set_sensitive(true); - self.network_status - .set_label("Stored daemon networks are unavailable until the daemon starts."); - self.render_networks(&[]); - } - } - } - - async fn refresh(&mut self) { - match daemon_api::tunnel_state().await { - Ok(state) => { - self.daemon_banner.set_revealed(false); - self.tunnel_state = Some(state); - match state { - daemon_api::TunnelState::Running => { - self.tunnel_status.set_label("Connected"); - self.tunnel_button.set_label("Stop"); - } - daemon_api::TunnelState::Stopped => { - self.tunnel_status.set_label("Disconnected"); - self.tunnel_button.set_label("Start"); - } - } - self.tunnel_button.set_sensitive(true); - } - Err(error) => { - self.tunnel_state = None; - self.daemon_banner - .set_title(&format!("Burrow daemon is not reachable: {error}")); - self.daemon_banner.set_revealed(true); - self.tunnel_status.set_label("Unknown"); - self.tunnel_button.set_label("Enable"); - self.tunnel_button.set_sensitive(true); - } - } - - match daemon_api::list_networks().await { - Ok(networks) => { - self.render_networks(&networks); - self.network_status.set_label(if networks.is_empty() { - "Stored daemon networks and their active account selectors" - } else { - "Stored daemon networks and their active account selectors" - }); - } - Err(error) => { - self.render_networks(&[]); - self.network_status - .set_label(&format!("Unable to read daemon networks: {error}")); - } - } - - match account_store::load() { - Ok(accounts) => { - self.account_status.set_label(""); - self.render_accounts(&accounts); - } - Err(error) => { - self.render_accounts(&[]); - self.account_status - .set_label(&format!("Unable to read account store: {error}")); - } - } - } - - async fn perform_tunnel_action(&mut self) { - match self.tunnel_state { - Some(daemon_api::TunnelState::Running) => { - self.tunnel_button.set_sensitive(false); - self.tunnel_status.set_label("Disconnecting..."); - if let Err(error) = daemon_api::stop_tunnel().await { - self.tunnel_status - .set_label(&format!("Stop failed: {error}")); - } - self.refresh().await; - } - Some(daemon_api::TunnelState::Stopped) => { - self.tunnel_button.set_sensitive(false); - self.tunnel_status.set_label("Connecting..."); - if let Err(error) = daemon_api::start_tunnel().await { - self.tunnel_status - .set_label(&format!("Start failed: {error}")); - } - self.refresh().await; - } - None => self.ensure_daemon().await, - } - } - - async fn add_wireguard( - &mut self, - title: String, - account: String, - identity: String, - config: String, - ) { - if config.trim().is_empty() { - self.network_status - .set_label("Paste a WireGuard configuration before adding a network."); - return; - } - match daemon_api::add_wireguard(config).await { - Ok(id) => { - let title = daemon_api::normalized(&title, &format!("WireGuard {id}")); - let record = account_store::new_record( - AccountKind::WireGuard, - title, - None, - daemon_api::normalized(&account, "default"), - daemon_api::normalized(&identity, &format!("network-{id}")), - None, - None, - Some(format!("Linked to daemon network #{id}.")), - ); - match account_store::upsert(record) { - Ok(accounts) => self.render_accounts(&accounts), - Err(error) => self - .account_status - .set_label(&format!("WireGuard account save failed: {error}")), - } - self.network_status - .set_label(&format!("Added WireGuard network #{id}.")); - self.refresh().await; - } - Err(error) => self - .network_status - .set_label(&format!("Unable to add WireGuard network: {error}")), - } - } - - fn save_tor(&mut self, title: String, account: String, identity: String, note: String) { - let record = account_store::new_record( - AccountKind::Tor, - daemon_api::normalized( - &title, - &format!("Tor {}", daemon_api::normalized(&identity, "linux")), - ), - Some("arti://local".to_owned()), - daemon_api::normalized(&account, "default"), - daemon_api::normalized(&identity, "linux"), - None, - None, - Some(note), - ); - match account_store::upsert(record) { - Ok(accounts) => { - self.account_status.set_label("Saved Tor account."); - self.render_accounts(&accounts); - } - Err(error) => self - .account_status - .set_label(&format!("Unable to save Tor account: {error}")), - } - } - - async fn discover_tailnet(&mut self, email: String) { - let Ok(email) = daemon_api::require_value(&email, "Email address") else { - self.account_status - .set_label("Enter an email address before Tailnet discovery."); - return; - }; - - self.account_status.set_label("Finding Tailnet server..."); - match daemon_api::discover_tailnet(email).await { - Ok(discovery) => { - let kind = if discovery.managed { - "managed authority" - } else { - "custom authority" - }; - let issuer = discovery - .oidc_issuer - .map(|issuer| format!(" OIDC: {issuer}.")) - .unwrap_or_default(); - self.account_status.set_label(&format!( - "Discovered {kind}: {}.{issuer}", - discovery.authority - )); - } - Err(error) => self - .account_status - .set_label(&format!("Tailnet discovery failed: {error}")), - } - } - - async fn probe_tailnet(&mut self, authority: String) { - let Ok(authority) = daemon_api::require_value(&authority, "Tailnet server URL") else { - self.account_status - .set_label("Enter a Tailnet server URL before checking it."); - return; - }; - - self.account_status.set_label("Checking Tailnet server..."); - match daemon_api::probe_tailnet(authority).await { - Ok(probe) => { - let detail = probe - .detail - .unwrap_or_else(|| format!("HTTP {}", probe.status_code)); - self.account_status - .set_label(&format!("{}: {detail}", probe.summary)); - } - Err(error) => self - .account_status - .set_label(&format!("Tailnet probe failed: {error}")), - } - } - - async fn start_tailnet_login( - &mut self, - authority: String, - account: String, - identity: String, - hostname: Option, - sender: AsyncComponentSender, - ) { - let Ok(authority) = daemon_api::require_value(&authority, "Tailnet server URL") else { - self.account_status - .set_label("Enter a Tailnet server URL before sign-in."); - return; - }; - - self.account_status.set_label("Starting Tailnet sign-in..."); - match daemon_api::start_tailnet_login(authority, account, identity, hostname).await { - Ok(status) => { - self.apply_login_status(&status); - if let Some(auth_url) = status.auth_url.as_deref() { - if let Err(error) = open_auth_url(auth_url) { - self.account_status.set_label(&format!( - "{} Open this URL manually: {auth_url}. Browser launch failed: {error}", - self.account_status.text() - )); - } - } - if !status.running { - sender.input(HomeScreenMsg::PollTailnetLogin); - } - } - Err(error) => self - .account_status - .set_label(&format!("Tailnet sign-in failed: {error}")), - } - } - - async fn poll_tailnet_login(&mut self, sender: AsyncComponentSender) { - let Some(session_id) = self.tailnet_session_id.clone() else { - return; - }; - if self.tailnet_running { - return; - } - - tokio::time::sleep(Duration::from_secs(1)).await; - match daemon_api::tailnet_login_status(session_id).await { - Ok(status) => { - self.apply_login_status(&status); - if !status.running { - sender.input(HomeScreenMsg::PollTailnetLogin); - } - } - Err(error) => { - self.account_status - .set_label(&format!("Tailnet sign-in status failed: {error}")); - self.tailnet_session_id = None; - } - } - } - - async fn cancel_tailnet_login(&mut self) { - let Some(session_id) = self.tailnet_session_id.clone() else { - self.account_status - .set_label("No Tailnet sign-in is active."); - return; - }; - match daemon_api::cancel_tailnet_login(session_id).await { - Ok(()) => { - self.tailnet_session_id = None; - self.tailnet_running = false; - self.account_status.set_label("Tailnet sign-in cancelled."); - } - Err(error) => self - .account_status - .set_label(&format!("Unable to cancel Tailnet sign-in: {error}")), - } - } - - async fn add_tailnet( - &mut self, - authority: String, - account: String, - identity: String, - hostname: Option, - tailnet: Option, - ) { - let Ok(authority) = daemon_api::require_value(&authority, "Tailnet server URL") else { - self.account_status - .set_label("Enter a Tailnet server URL before saving."); - return; - }; - if self.tailnet_session_id.is_some() && !self.tailnet_running { - self.account_status - .set_label("Finish browser sign-in before saving this Tailnet account."); - return; - } - - let stored_authority = daemon_api::normalized_optional(&authority) - .unwrap_or_else(|| daemon_api::default_tailnet_authority().to_owned()); - let stored_account = daemon_api::normalized(&account, "default"); - let stored_identity = daemon_api::normalized(&identity, "linux"); - let stored_hostname = hostname.clone(); - let stored_tailnet = tailnet.clone(); - - match daemon_api::add_tailnet(authority, account, identity, hostname, tailnet).await { - Ok(id) => { - let title = stored_tailnet - .clone() - .or(stored_hostname.clone()) - .unwrap_or_else(|| format!("Tailnet {id}")); - let record = account_store::new_record( - AccountKind::Tailnet, - title, - Some(stored_authority), - stored_account, - stored_identity, - stored_hostname, - stored_tailnet, - Some(format!("Linked to daemon network #{id}.")), - ); - match account_store::upsert(record) { - Ok(accounts) => self.render_accounts(&accounts), - Err(error) => self - .account_status - .set_label(&format!("Tailnet account save failed: {error}")), - } - self.account_status - .set_label(&format!("Saved Tailnet account and network #{id}.")); - self.refresh().await; - } - Err(error) => self - .account_status - .set_label(&format!("Unable to save Tailnet account: {error}")), - } - } - - fn apply_login_status(&mut self, status: &daemon_api::TailnetLoginStatus) { - self.tailnet_session_id = Some(status.session_id.clone()); - self.tailnet_running = status.running; - - let mut parts = Vec::new(); - if status.running { - parts.push("Signed In".to_owned()); - } else if status.needs_login { - parts.push("Browser Sign-In Required".to_owned()); - } else { - parts.push("Checking Sign-In".to_owned()); - } - if !status.backend_state.is_empty() { - parts.push(format!("State: {}", status.backend_state)); - } - if let Some(tailnet_name) = &status.tailnet_name { - parts.push(format!("Tailnet: {tailnet_name}")); - } - if let Some(self_dns_name) = &status.self_dns_name { - parts.push(self_dns_name.clone()); - } - if !status.tailnet_ips.is_empty() { - parts.push(status.tailnet_ips.join(", ")); - } - if !status.health.is_empty() { - parts.push(status.health.join(" / ")); - } - self.account_status.set_label(&parts.join("\n")); - } - - fn render_networks(&self, networks: &[daemon_api::NetworkSummary]) { - while let Some(child) = self.network_cards.first_child() { - self.network_cards.remove(&child); - } - - if networks.is_empty() { - self.network_cards.append(&empty_networks_view()); - return; - } - - for network in networks { - self.network_cards.append(&network_card(network)); - } - } - - fn render_accounts(&self, accounts: &[AccountRecord]) { - while let Some(child) = self.account_rows.first_child() { - self.account_rows.remove(&child); - } - - if accounts.is_empty() { - self.account_rows.append(&empty_accounts_view()); - return; - } - - for account in accounts { - self.account_rows.append(&account_card(account)); - } - } -} - -fn configure_add_popover(button: >k::MenuButton, sender: &AsyncComponentSender) { - let popover = gtk::Popover::new(); - let box_ = gtk::Box::new(gtk::Orientation::Vertical, 4); - box_.set_margin_all(6); - - for (label, msg) in [ - ("Add WireGuard Network", HomeScreenMsg::OpenWireGuard), - ("Save Tor Account", HomeScreenMsg::OpenTor), - ("Add Tailnet Account", HomeScreenMsg::OpenTailnet), - ] { - let item = gtk::Button::with_label(label); - item.add_css_class("flat"); - item.set_halign(Align::Fill); - let input = sender.input_sender().clone(); - item.connect_clicked(move |_| input.emit(msg_from_template(&msg))); - box_.append(&item); - } - - popover.set_child(Some(&box_)); - button.set_popover(Some(&popover)); -} - -fn msg_from_template(msg: &HomeScreenMsg) -> HomeScreenMsg { - match msg { - HomeScreenMsg::OpenWireGuard => HomeScreenMsg::OpenWireGuard, - HomeScreenMsg::OpenTor => HomeScreenMsg::OpenTor, - HomeScreenMsg::OpenTailnet => HomeScreenMsg::OpenTailnet, - _ => unreachable!(), - } -} - -fn network_card(network: &daemon_api::NetworkSummary) -> gtk::Box { - let card = gtk::Box::new(gtk::Orientation::Vertical, 10); - card.add_css_class("network-card"); - if network.title.to_ascii_lowercase().contains("wireguard") { - card.add_css_class("wireguard-card"); - } else { - card.add_css_class("tailnet-card"); - } - card.set_size_request(360, 175); - card.set_margin_bottom(8); - - let kind = if network.title.to_ascii_lowercase().contains("wireguard") { - "WireGuard" - } else { - "Tailnet" - }; - let kind_label = gtk::Label::new(Some(kind)); - kind_label.add_css_class("network-card-kind"); - kind_label.set_xalign(0.0); - - let title = gtk::Label::new(Some(&network.title)); - title.add_css_class("network-card-title"); - title.set_xalign(0.0); - title.set_wrap(true); - - let spacer = gtk::Box::new(gtk::Orientation::Vertical, 0); - spacer.set_vexpand(true); - - let detail = gtk::Label::new(Some(&network.detail)); - detail.add_css_class("network-card-detail"); - detail.set_xalign(0.0); - detail.set_wrap(true); - detail.set_lines(4); - - card.append(&kind_label); - card.append(&title); - card.append(&spacer); - card.append(&detail); - card -} - -fn empty_networks_view() -> gtk::Box { - let box_ = gtk::Box::new(gtk::Orientation::Vertical, 6); - box_.add_css_class("empty-state"); - box_.set_size_request(520, 175); - box_.set_hexpand(true); - - let title = gtk::Label::new(Some("No Networks Yet")); - title.add_css_class("title-3"); - title.set_xalign(0.0); - let detail = gtk::Label::new(Some( - "Add a WireGuard network, or save a Tailnet account so Burrow can store a managed network when the daemon is reachable.", - )); - detail.add_css_class("dim-label"); - detail.set_wrap(true); - detail.set_xalign(0.0); - - box_.append(&title); - box_.append(&detail); - box_ -} - -fn empty_accounts_view() -> gtk::Box { - let box_ = gtk::Box::new(gtk::Orientation::Vertical, 6); - box_.add_css_class("empty-state"); - box_.set_hexpand(true); - - let title = gtk::Label::new(Some("No Accounts Yet")); - title.add_css_class("title-3"); - title.set_justify(gtk::Justification::Center); - let detail = gtk::Label::new(Some( - "Save a Tor account or sign in to Tailnet to keep network identities ready on this device.", - )); - detail.add_css_class("dim-label"); - detail.set_wrap(true); - detail.set_justify(gtk::Justification::Center); - - box_.append(&title); - box_.append(&detail); - box_ -} - -fn account_card(account: &AccountRecord) -> gtk::Box { - let card = gtk::Box::new(gtk::Orientation::Vertical, 8); - card.add_css_class("summary-card"); - card.set_hexpand(true); - - let header = gtk::Box::new(gtk::Orientation::Horizontal, 8); - let title = gtk::Label::new(Some(&account.title)); - title.add_css_class("title-3"); - title.set_xalign(0.0); - title.set_hexpand(true); - let kind = gtk::Label::new(Some(account.kind.title())); - kind.add_css_class("dim-label"); - header.append(&title); - header.append(&kind); - card.append(&header); - - append_account_value(&card, "Account", &account.account); - append_account_value(&card, "Identity", &account.identity); - if let Some(authority) = &account.authority { - append_account_value(&card, "Authority", authority); - } - if let Some(hostname) = &account.hostname { - append_account_value(&card, "Hostname", hostname); - } - if let Some(tailnet) = &account.tailnet { - append_account_value(&card, "Tailnet", tailnet); - } - if let Some(note) = &account.note { - let note_label = gtk::Label::new(Some(note)); - note_label.add_css_class("dim-label"); - note_label.set_wrap(true); - note_label.set_xalign(0.0); - card.append(¬e_label); - } - - card -} - -fn append_account_value(card: >k::Box, label: &str, value: &str) { - let row = gtk::Box::new(gtk::Orientation::Horizontal, 8); - let key = gtk::Label::new(Some(label)); - key.add_css_class("dim-label"); - key.set_xalign(0.0); - key.set_width_chars(9); - let value = gtk::Label::new(Some(value)); - value.set_xalign(0.0); - value.set_wrap(true); - value.set_hexpand(true); - row.append(&key); - row.append(&value); - card.append(&row); -} - -fn open_wireguard_window(root: >k::ScrolledWindow, sender: &AsyncComponentSender) { - let window = sheet_window(root, "WireGuard", 560, 620); - let content = sheet_content( - &window, - "Import WireGuard", - "Import a tunnel and optional account metadata.", - ); - - let title = gtk::Entry::new(); - title.set_placeholder_text(Some("Title")); - let account = gtk::Entry::new(); - account.set_placeholder_text(Some("Account")); - let identity = gtk::Entry::new(); - identity.set_placeholder_text(Some("Identity")); - let text = gtk::TextView::new(); - text.set_monospace(true); - text.set_wrap_mode(gtk::WrapMode::WordChar); - - let editor = gtk::ScrolledWindow::new(); - editor.set_min_content_height(220); - editor.set_child(Some(&text)); - - content.append(§ion_label("Identity")); - content.append(&title); - content.append(&account); - content.append(&identity); - content.append(§ion_label("WireGuard Configuration")); - content.append(&editor); - - let add = gtk::Button::with_label("Add Network"); - add.add_css_class("suggested-action"); - let input = sender.input_sender().clone(); - let window_for_click = window.clone(); - add.connect_clicked(move |_| { - input.emit(HomeScreenMsg::AddWireGuard { - title: title.text().to_string(), - account: account.text().to_string(), - identity: identity.text().to_string(), - config: text_view_text(&text), - }); - window_for_click.close(); - }); - content.append(&add); - - window.set_child(Some(&content)); - window.present(); -} - -fn open_tor_window(root: >k::ScrolledWindow, sender: &AsyncComponentSender) { - let window = sheet_window(root, "Tor", 520, 540); - let content = sheet_content( - &window, - "Configure Tor", - "Store Arti account and identity preferences.", - ); - - let title = entry_with_text("Title", "Default Tor"); - let account = entry_with_text("Account", "default"); - let identity = entry_with_text("Identity", "linux"); - let addresses = entry_with_text("Virtual Addresses", "100.64.0.2/32"); - let dns = entry_with_text("DNS Resolvers", "1.1.1.1, 1.0.0.1"); - let mtu = entry_with_text("MTU", "1400"); - let listen = entry_with_text("Transparent Listener", "127.0.0.1:9040"); - - content.append(§ion_label("Identity")); - content.append(&title); - content.append(&account); - content.append(&identity); - content.append(§ion_label("Tor Preferences")); - content.append(&addresses); - content.append(&dns); - content.append(&mtu); - content.append(&listen); - - let save = gtk::Button::with_label("Save Account"); - save.add_css_class("suggested-action"); - let input = sender.input_sender().clone(); - let window_for_click = window.clone(); - save.connect_clicked(move |_| { - let note = [ - format!( - "Addresses: {}", - normalized_entry(&addresses, "100.64.0.2/32") - ), - format!("DNS: {}", normalized_entry(&dns, "1.1.1.1, 1.0.0.1")), - format!("MTU: {}", normalized_entry(&mtu, "1400")), - format!("Listen: {}", normalized_entry(&listen, "127.0.0.1:9040")), - ] - .join(" - "); - input.emit(HomeScreenMsg::SaveTor { - title: normalized_entry(&title, "Default Tor"), - account: normalized_entry(&account, "default"), - identity: normalized_entry(&identity, "linux"), - note, - }); - window_for_click.close(); - }); - content.append(&save); - - window.set_child(Some(&content)); - window.present(); -} - -fn open_tailnet_window(root: >k::ScrolledWindow, sender: &AsyncComponentSender) { - let window = sheet_window(root, "Tailnet", 560, 680); - let content = sheet_content( - &window, - "Connect Tailnet", - "Save Tailnet authority, identity defaults, and login material.", - ); - - let email = gtk::Entry::new(); - email.set_placeholder_text(Some("Email address")); - let authority = entry_with_text("Server URL", daemon_api::default_tailnet_authority()); - let tailnet = gtk::Entry::new(); - tailnet.set_placeholder_text(Some("Tailnet")); - let account = entry_with_text("Account", "default"); - let identity = entry_with_text("Identity", "linux"); - let hostname = entry_with_text("Hostname", &hostname_fallback()); - - content.append(§ion_label("Connection")); - content.append(&email); - content.append(&authority); - content.append(&tailnet); - content.append(§ion_label("Identity")); - content.append(&account); - content.append(&identity); - content.append(&hostname); - - let actions = gtk::Box::new(gtk::Orientation::Horizontal, 8); - let discover = gtk::Button::with_label("Refresh Server Lookup"); - let probe = gtk::Button::with_label("Check Server"); - let sign_in = gtk::Button::with_label("Start Sign-In"); - actions.append(&discover); - actions.append(&probe); - actions.append(&sign_in); - content.append(§ion_label("Authentication")); - content.append(&actions); - - let input = sender.input_sender().clone(); - let email_for_click = email.clone(); - discover.connect_clicked(move |_| { - input.emit(HomeScreenMsg::DiscoverTailnet( - email_for_click.text().to_string(), - )); - }); - - let input = sender.input_sender().clone(); - let authority_for_probe = authority.clone(); - probe.connect_clicked(move |_| { - input.emit(HomeScreenMsg::ProbeTailnet( - authority_for_probe.text().to_string(), - )); - }); - - let input = sender.input_sender().clone(); - let authority_for_login = authority.clone(); - let account_for_login = account.clone(); - let identity_for_login = identity.clone(); - let hostname_for_login = hostname.clone(); - sign_in.connect_clicked(move |_| { - input.emit(HomeScreenMsg::StartTailnetLogin { - authority: authority_for_login.text().to_string(), - account: normalized_entry(&account_for_login, "default"), - identity: normalized_entry(&identity_for_login, "linux"), - hostname: daemon_api::normalized_optional(&hostname_for_login.text()), - }); - }); - - let save = gtk::Button::with_label("Save Account"); - save.add_css_class("suggested-action"); - let input = sender.input_sender().clone(); - let window_for_click = window.clone(); - save.connect_clicked(move |_| { - input.emit(HomeScreenMsg::AddTailnet { - authority: authority.text().to_string(), - account: normalized_entry(&account, "default"), - identity: normalized_entry(&identity, "linux"), - hostname: daemon_api::normalized_optional(&hostname.text()), - tailnet: daemon_api::normalized_optional(&tailnet.text()), - }); - window_for_click.close(); - }); - - let cancel = gtk::Button::with_label("Cancel Sign-In"); - let input = sender.input_sender().clone(); - cancel.connect_clicked(move |_| { - input.emit(HomeScreenMsg::CancelTailnetLogin); - }); - - content.append(&save); - content.append(&cancel); - - window.set_child(Some(&content)); - window.present(); -} - -fn sheet_window(root: >k::ScrolledWindow, title: &str, width: i32, height: i32) -> gtk::Window { - let window = gtk::Window::builder() - .title(title) - .default_width(width) - .default_height(height) - .modal(true) - .build(); - if let Some(root) = root.root() { - if let Ok(parent) = root.downcast::() { - window.set_transient_for(Some(&parent)); - } - } - window -} - -fn sheet_content(window: >k::Window, title: &str, detail: &str) -> gtk::Box { - let content = gtk::Box::new(gtk::Orientation::Vertical, 12); - content.set_margin_all(18); - - let summary = gtk::Box::new(gtk::Orientation::Horizontal, 12); - summary.add_css_class("summary-card"); - - let copy = gtk::Box::new(gtk::Orientation::Vertical, 4); - copy.set_hexpand(true); - - let title_label = gtk::Label::new(Some(title)); - title_label.add_css_class("title-3"); - title_label.set_xalign(0.0); - - let detail_label = gtk::Label::new(Some(detail)); - detail_label.add_css_class("dim-label"); - detail_label.set_wrap(true); - detail_label.set_xalign(0.0); - - copy.append(&title_label); - copy.append(&detail_label); - summary.append(©); - - let close = gtk::Button::builder() - .icon_name("window-close-symbolic") - .tooltip_text("Close") - .valign(Align::Start) - .build(); - close.add_css_class("flat"); - let window_for_click = window.clone(); - close.connect_clicked(move |_| window_for_click.close()); - summary.append(&close); - - content.append(&summary); - content -} - -fn section_label(label: &str) -> gtk::Label { - let section = gtk::Label::new(Some(label)); - section.add_css_class("heading"); - section.set_xalign(0.0); - section -} - -fn entry_with_text(placeholder: &str, value: &str) -> gtk::Entry { - let entry = gtk::Entry::new(); - entry.set_placeholder_text(Some(placeholder)); - entry.set_text(value); - entry -} - -fn normalized_entry(entry: >k::Entry, fallback: &str) -> String { - daemon_api::normalized(&entry.text(), fallback) -} - -fn hostname_fallback() -> String { - std::env::var("HOSTNAME").unwrap_or_else(|_| "linux".to_owned()) -} - -fn text_view_text(text_view: >k::TextView) -> String { - let buffer = text_view.buffer(); - buffer - .text(&buffer.start_iter(), &buffer.end_iter(), true) - .to_string() -} - -fn open_auth_url(url: &str) -> anyhow::Result<()> { - gtk::gio::AppInfo::launch_default_for_uri(url, None::<>k::gio::AppLaunchContext>) - .map_err(anyhow::Error::from) -} diff --git a/burrow-gtk/src/components/mod.rs b/burrow-gtk/src/components/mod.rs index 8e60fa7..b1cc938 100644 --- a/burrow-gtk/src/components/mod.rs +++ b/burrow-gtk/src/components/mod.rs @@ -1,6 +1,6 @@ use super::*; -use crate::daemon_api; use adw::prelude::*; +use burrow::{DaemonClient, DaemonCommand, DaemonResponseData}; use gtk::Align; use relm4::{ component::{ @@ -9,9 +9,12 @@ use relm4::{ }, prelude::*, }; +use std::sync::Arc; +use tokio::sync::Mutex; mod app; -mod home_screen; +mod settings; +mod settings_screen; +mod switch_screen; pub use app::*; -pub use home_screen::{HomeScreen, HomeScreenMsg}; diff --git a/burrow-gtk/src/components/settings/daemon_group.rs b/burrow-gtk/src/components/settings/daemon_group.rs deleted file mode 100644 index 3817ca6..0000000 --- a/burrow-gtk/src/components/settings/daemon_group.rs +++ /dev/null @@ -1,111 +0,0 @@ -use super::*; -use std::process::Command; - -#[derive(Debug)] -pub struct DaemonGroup { - system_setup: SystemSetup, - daemon_client: Arc>>, - already_running: bool, -} - -pub struct DaemonGroupInit { - pub daemon_client: Arc>>, - pub system_setup: SystemSetup, -} - -#[derive(Debug)] -pub enum DaemonGroupMsg { - LaunchLocal, - DaemonStateChange, -} - -#[relm4::component(pub, async)] -impl AsyncComponent for DaemonGroup { - type Init = DaemonGroupInit; - type Input = DaemonGroupMsg; - type Output = (); - type CommandOutput = (); - - view! { - #[name(group)] - adw::PreferencesGroup { - #[watch] - set_sensitive: - (model.system_setup == SystemSetup::AppImage || model.system_setup == SystemSetup::Other) && - !model.already_running, - set_title: "Local Daemon", - set_description: Some("Run Local Daemon"), - - gtk::Button { - set_label: "Launch", - connect_clicked => DaemonGroupMsg::LaunchLocal - } - } - } - - async fn init( - init: Self::Init, - root: Self::Root, - sender: AsyncComponentSender, - ) -> AsyncComponentParts { - // Should be impossible to panic here - let model = DaemonGroup { - system_setup: init.system_setup, - daemon_client: init.daemon_client.clone(), - already_running: init.daemon_client.lock().await.is_some(), - }; - - let widgets = view_output!(); - - AsyncComponentParts { model, widgets } - } - - async fn update( - &mut self, - msg: Self::Input, - _sender: AsyncComponentSender, - _root: &Self::Root, - ) { - match msg { - DaemonGroupMsg::LaunchLocal => { - let burrow_original_bin = std::env::vars() - .find(|(k, _)| k == "APPDIR") - .map(|(_, v)| v + "/usr/bin/burrow") - .unwrap_or("/usr/bin/burrow".to_owned()); - - let mut burrow_bin = - String::from_utf8(Command::new("mktemp").output().unwrap().stdout).unwrap(); - burrow_bin.pop(); - - let privileged_spawn_script = format!( - r#"TEMP=$(mktemp -p /root) -cp {} $TEMP -chmod +x $TEMP -setcap CAP_NET_BIND_SERVICE,CAP_NET_ADMIN+eip $TEMP -mv $TEMP /tmp/burrow-detached-daemon"#, - burrow_original_bin - ) - .replace('\n', "&&"); - - // TODO: Handle error condition - - Command::new("pkexec") - .arg("sh") - .arg("-c") - .arg(privileged_spawn_script) - .arg(&burrow_bin) - .output() - .unwrap(); - - Command::new("/tmp/burrow-detached-daemon") - .env("RUST_LOG", "debug") - .arg("daemon") - .spawn() - .unwrap(); - } - DaemonGroupMsg::DaemonStateChange => { - self.already_running = self.daemon_client.lock().await.is_some(); - } - } - } -} diff --git a/burrow-gtk/src/components/settings/diag_group.rs b/burrow-gtk/src/components/settings/diag_group.rs index a15e0ea..be542cd 100644 --- a/burrow-gtk/src/components/settings/diag_group.rs +++ b/burrow-gtk/src/components/settings/diag_group.rs @@ -1,10 +1,11 @@ use super::*; +use diag::{StatusTernary, SystemSetup}; #[derive(Debug)] pub struct DiagGroup { daemon_client: Arc>>, - system_setup: SystemSetup, + init_system: SystemSetup, service_installed: StatusTernary, socket_installed: StatusTernary, socket_enabled: StatusTernary, @@ -13,20 +14,19 @@ pub struct DiagGroup { pub struct DiagGroupInit { pub daemon_client: Arc>>, - pub system_setup: SystemSetup, } impl DiagGroup { async fn new(daemon_client: Arc>>) -> Result { - let system_setup = SystemSetup::new(); + let setup = SystemSetup::new(); let daemon_running = daemon_client.lock().await.is_some(); Ok(Self { - service_installed: system_setup.is_service_installed()?, - socket_installed: system_setup.is_socket_installed()?, - socket_enabled: system_setup.is_socket_enabled()?, + service_installed: setup.is_service_installed()?, + socket_installed: setup.is_socket_installed()?, + socket_enabled: setup.is_socket_enabled()?, daemon_running, - system_setup, + init_system: setup, daemon_client, }) } @@ -52,7 +52,7 @@ impl AsyncComponent for DiagGroup { adw::ActionRow { #[watch] - set_title: &format!("System Type: {}", model.system_setup) + set_title: &format!("Init System: {}", model.init_system) }, adw::ActionRow { #[watch] diff --git a/burrow-gtk/src/components/settings/mod.rs b/burrow-gtk/src/components/settings/mod.rs index aa87db2..53f46d4 100644 --- a/burrow-gtk/src/components/settings/mod.rs +++ b/burrow-gtk/src/components/settings/mod.rs @@ -1,8 +1,5 @@ use super::*; -use diag::{StatusTernary, SystemSetup}; -mod daemon_group; mod diag_group; -pub use daemon_group::{DaemonGroup, DaemonGroupInit, DaemonGroupMsg}; -pub use diag_group::{DiagGroup, DiagGroupInit, DiagGroupMsg}; +pub use diag_group::{DiagGroup, DiagGroupInit}; diff --git a/burrow-gtk/src/components/settings_screen.rs b/burrow-gtk/src/components/settings_screen.rs index 971f262..0a29e43 100644 --- a/burrow-gtk/src/components/settings_screen.rs +++ b/burrow-gtk/src/components/settings_screen.rs @@ -1,24 +1,17 @@ use super::*; -use diag::SystemSetup; pub struct SettingsScreen { - diag_group: AsyncController, - daemon_group: AsyncController, + _diag_group: AsyncController, } pub struct SettingsScreenInit { pub daemon_client: Arc>>, } -#[derive(Debug, PartialEq, Eq)] -pub enum SettingsScreenMsg { - DaemonStateChange, -} - #[relm4::component(pub)] impl SimpleComponent for SettingsScreen { type Init = SettingsScreenInit; - type Input = SettingsScreenMsg; + type Input = (); type Output = (); view! { @@ -31,41 +24,21 @@ impl SimpleComponent for SettingsScreen { root: &Self::Root, sender: ComponentSender, ) -> ComponentParts { - let system_setup = SystemSetup::new(); - let diag_group = settings::DiagGroup::builder() .launch(settings::DiagGroupInit { - system_setup, daemon_client: Arc::clone(&init.daemon_client), }) - .forward(sender.input_sender(), |_| { - SettingsScreenMsg::DaemonStateChange - }); - - let daemon_group = settings::DaemonGroup::builder() - .launch(settings::DaemonGroupInit { - system_setup, - daemon_client: Arc::clone(&init.daemon_client), - }) - .forward(sender.input_sender(), |_| { - SettingsScreenMsg::DaemonStateChange - }); + .forward(sender.input_sender(), |_| ()); let widgets = view_output!(); widgets.preferences.add(diag_group.widget()); - widgets.preferences.add(daemon_group.widget()); - let model = SettingsScreen { diag_group, daemon_group }; + let model = SettingsScreen { + _diag_group: diag_group, + }; ComponentParts { model, widgets } } - fn update(&mut self, _: Self::Input, _sender: ComponentSender) { - // Currently, `SettingsScreenMsg` only has one variant, so the if is ambiguous. - // - // if let SettingsScreenMsg::DaemonStateChange = msg { - self.diag_group.emit(DiagGroupMsg::Refresh); - self.daemon_group.emit(DaemonGroupMsg::DaemonStateChange); - // } - } + fn update(&mut self, _: Self::Input, _sender: ComponentSender) {} } diff --git a/burrow-gtk/src/daemon_api.rs b/burrow-gtk/src/daemon_api.rs deleted file mode 100644 index 4ff8bf5..0000000 --- a/burrow-gtk/src/daemon_api.rs +++ /dev/null @@ -1,420 +0,0 @@ -use anyhow::{anyhow, Context, Result}; -use burrow::{ - control::{TailnetConfig, TailnetProvider}, - grpc_defs::{ - Empty, Network, NetworkType, State, TailnetDiscoverRequest, TailnetLoginCancelRequest, - TailnetLoginStartRequest, TailnetLoginStatusRequest, TailnetProbeRequest, - }, - BurrowClient, -}; -use std::{path::PathBuf, sync::OnceLock}; -use tokio::time::{timeout, Duration}; - -const RPC_TIMEOUT: Duration = Duration::from_secs(3); -const MANAGED_TAILSCALE_AUTHORITY: &str = "https://controlplane.tailscale.com"; -static EMBEDDED_DAEMON_STARTED: OnceLock<()> = OnceLock::new(); - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum TunnelState { - Running, - Stopped, -} - -#[derive(Debug, Clone)] -pub struct NetworkSummary { - pub id: i32, - pub title: String, - pub detail: String, -} - -#[derive(Debug, Clone)] -pub struct TailnetDiscovery { - pub authority: String, - pub managed: bool, - pub oidc_issuer: Option, -} - -#[derive(Debug, Clone)] -pub struct TailnetProbe { - pub summary: String, - pub detail: Option, - pub status_code: i32, -} - -#[derive(Debug, Clone)] -pub struct TailnetLoginStatus { - pub session_id: String, - pub backend_state: String, - pub auth_url: Option, - pub running: bool, - pub needs_login: bool, - pub tailnet_name: Option, - pub self_dns_name: Option, - pub tailnet_ips: Vec, - pub health: Vec, -} - -pub fn default_tailnet_authority() -> &'static str { - MANAGED_TAILSCALE_AUTHORITY -} - -pub fn configure_client_paths() -> Result<()> { - if std::env::var_os("BURROW_SOCKET_PATH").is_none() { - std::env::set_var("BURROW_SOCKET_PATH", default_socket_path()?); - } - Ok(()) -} - -pub async fn ensure_daemon() -> Result<()> { - configure_client_paths()?; - if daemon_available().await { - return Ok(()); - } - - let socket_path = socket_path()?; - let db_path = database_path()?; - ensure_parent(&socket_path)?; - ensure_parent(&db_path)?; - - if EMBEDDED_DAEMON_STARTED.get().is_none() { - tokio::task::spawn_blocking(move || { - burrow::spawn_in_process_with_paths(Some(socket_path), Some(db_path)); - }) - .await - .context("failed to join embedded daemon startup")?; - let _ = EMBEDDED_DAEMON_STARTED.set(()); - } - - tunnel_state() - .await - .map(|_| ()) - .context("Burrow daemon started but did not accept tunnel status RPCs") -} - -pub fn infer_tailnet_provider(authority: &str) -> TailnetProvider { - let normalized = authority.trim().trim_end_matches('/').to_ascii_lowercase(); - if normalized == "controlplane.tailscale.com" - || normalized == "http://controlplane.tailscale.com" - || normalized == MANAGED_TAILSCALE_AUTHORITY - { - TailnetProvider::Tailscale - } else { - TailnetProvider::Headscale - } -} - -pub async fn daemon_available() -> bool { - tunnel_state().await.is_ok() -} - -fn socket_path() -> Result { - if let Some(path) = std::env::var_os("BURROW_SOCKET_PATH") { - return Ok(PathBuf::from(path)); - } - default_socket_path() -} - -fn default_socket_path() -> Result { - if let Some(runtime_dir) = std::env::var_os("XDG_RUNTIME_DIR") { - return Ok(PathBuf::from(runtime_dir).join("burrow.sock")); - } - let uid = std::env::var("UID").unwrap_or_else(|_| "1000".to_owned()); - Ok(PathBuf::from(format!("/tmp/burrow-{uid}.sock"))) -} - -fn database_path() -> Result { - if let Some(path) = std::env::var_os("BURROW_DB_PATH") { - return Ok(PathBuf::from(path)); - } - if let Some(data_home) = std::env::var_os("XDG_DATA_HOME") { - return Ok(PathBuf::from(data_home).join("burrow").join("burrow.db")); - } - if let Some(home) = std::env::var_os("HOME") { - return Ok(PathBuf::from(home) - .join(".local") - .join("share") - .join("burrow") - .join("burrow.db")); - } - Ok(std::env::temp_dir().join("burrow.db")) -} - -fn ensure_parent(path: &PathBuf) -> Result<()> { - if let Some(parent) = path.parent() { - std::fs::create_dir_all(parent) - .with_context(|| format!("failed to create {}", parent.display()))?; - } - Ok(()) -} - -pub async fn tunnel_state() -> Result { - let mut client = BurrowClient::from_uds().await?; - let mut stream = timeout(RPC_TIMEOUT, client.tunnel_client.tunnel_status(Empty {})) - .await - .context("timed out connecting to Burrow daemon")?? - .into_inner(); - let status = timeout(RPC_TIMEOUT, stream.message()) - .await - .context("timed out reading Burrow tunnel status")?? - .context("Burrow daemon ended the status stream without a state")?; - Ok(match status.state() { - State::Running => TunnelState::Running, - State::Stopped => TunnelState::Stopped, - }) -} - -pub async fn start_tunnel() -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - timeout(RPC_TIMEOUT, client.tunnel_client.tunnel_start(Empty {})) - .await - .context("timed out starting Burrow tunnel")??; - Ok(()) -} - -pub async fn stop_tunnel() -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - timeout(RPC_TIMEOUT, client.tunnel_client.tunnel_stop(Empty {})) - .await - .context("timed out stopping Burrow tunnel")??; - Ok(()) -} - -pub async fn list_networks() -> Result> { - let mut client = BurrowClient::from_uds().await?; - let mut stream = timeout(RPC_TIMEOUT, client.networks_client.network_list(Empty {})) - .await - .context("timed out connecting to Burrow network list")?? - .into_inner(); - let response = timeout(RPC_TIMEOUT, stream.message()) - .await - .context("timed out reading Burrow network list")?? - .context("Burrow daemon ended the network stream without a snapshot")?; - Ok(response.network.iter().map(summarize_network).collect()) -} - -pub async fn add_wireguard(config: String) -> Result { - add_network(NetworkType::WireGuard, config.into_bytes()).await -} - -pub async fn add_tailnet( - authority: String, - account: String, - identity: String, - hostname: Option, - tailnet: Option, -) -> Result { - let provider = infer_tailnet_provider(&authority); - let config = TailnetConfig { - provider, - authority: Some(authority), - account: Some(account), - identity: Some(identity), - hostname, - tailnet, - }; - let payload = serde_json::to_vec_pretty(&config)?; - add_network(NetworkType::Tailnet, payload).await -} - -pub async fn discover_tailnet(email: String) -> Result { - let mut client = BurrowClient::from_uds().await?; - let response = timeout( - RPC_TIMEOUT, - client - .tailnet_client - .discover(TailnetDiscoverRequest { email }), - ) - .await - .context("timed out discovering Tailnet authority")?? - .into_inner(); - - Ok(TailnetDiscovery { - authority: response.authority, - managed: response.managed, - oidc_issuer: optional(response.oidc_issuer), - }) -} - -pub async fn probe_tailnet(authority: String) -> Result { - let mut client = BurrowClient::from_uds().await?; - let response = timeout( - RPC_TIMEOUT, - client - .tailnet_client - .probe(TailnetProbeRequest { authority }), - ) - .await - .context("timed out probing Tailnet authority")?? - .into_inner(); - - Ok(TailnetProbe { - summary: response.summary, - detail: optional(response.detail), - status_code: response.status_code, - }) -} - -pub async fn start_tailnet_login( - authority: String, - account_name: String, - identity_name: String, - hostname: Option, -) -> Result { - let mut client = BurrowClient::from_uds().await?; - let response = timeout( - RPC_TIMEOUT, - client.tailnet_client.login_start(TailnetLoginStartRequest { - account_name, - identity_name, - hostname: hostname.unwrap_or_default(), - authority, - }), - ) - .await - .context("timed out starting Tailnet sign-in")?? - .into_inner(); - Ok(decode_tailnet_status(response)) -} - -pub async fn tailnet_login_status(session_id: String) -> Result { - let mut client = BurrowClient::from_uds().await?; - let response = timeout( - RPC_TIMEOUT, - client - .tailnet_client - .login_status(TailnetLoginStatusRequest { session_id }), - ) - .await - .context("timed out reading Tailnet sign-in status")?? - .into_inner(); - Ok(decode_tailnet_status(response)) -} - -pub async fn cancel_tailnet_login(session_id: String) -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - timeout( - RPC_TIMEOUT, - client - .tailnet_client - .login_cancel(TailnetLoginCancelRequest { session_id }), - ) - .await - .context("timed out cancelling Tailnet sign-in")??; - Ok(()) -} - -async fn add_network(network_type: NetworkType, payload: Vec) -> Result { - let id = next_network_id().await?; - let mut client = BurrowClient::from_uds().await?; - timeout( - RPC_TIMEOUT, - client.networks_client.network_add(Network { - id, - r#type: network_type.into(), - payload, - }), - ) - .await - .context("timed out saving network to Burrow daemon")??; - Ok(id) -} - -async fn next_network_id() -> Result { - let networks = list_networks().await?; - Ok(networks.iter().map(|network| network.id).max().unwrap_or(0) + 1) -} - -fn summarize_network(network: &Network) -> NetworkSummary { - match network.r#type() { - NetworkType::WireGuard => summarize_wireguard(network), - NetworkType::Tailnet => summarize_tailnet(network), - } -} - -fn summarize_wireguard(network: &Network) -> NetworkSummary { - let payload = String::from_utf8_lossy(&network.payload); - let detail = payload - .lines() - .map(str::trim) - .find(|line| !line.is_empty() && !line.starts_with('[')) - .unwrap_or("Stored WireGuard configuration") - .to_owned(); - NetworkSummary { - id: network.id, - title: format!("WireGuard {}", network.id), - detail, - } -} - -fn summarize_tailnet(network: &Network) -> NetworkSummary { - match TailnetConfig::from_slice(&network.payload) { - Ok(config) => { - let title = config - .tailnet - .clone() - .or(config.hostname.clone()) - .unwrap_or_else(|| "Tailnet".to_owned()); - let authority = config - .authority - .unwrap_or_else(|| "default authority".to_owned()); - let account = config.account.unwrap_or_else(|| "default".to_owned()); - NetworkSummary { - id: network.id, - title, - detail: format!("{authority} - account {account}"), - } - } - Err(error) => NetworkSummary { - id: network.id, - title: "Tailnet".to_owned(), - detail: format!("Unable to read Tailnet payload: {error}"), - }, - } -} - -fn decode_tailnet_status( - response: burrow::grpc_defs::TailnetLoginStatusResponse, -) -> TailnetLoginStatus { - TailnetLoginStatus { - session_id: response.session_id, - backend_state: response.backend_state, - auth_url: optional(response.auth_url), - running: response.running, - needs_login: response.needs_login, - tailnet_name: optional(response.tailnet_name), - self_dns_name: optional(response.self_dns_name), - tailnet_ips: response.tailnet_ips, - health: response.health, - } -} - -fn optional(value: String) -> Option { - let trimmed = value.trim(); - if trimmed.is_empty() { - None - } else { - Some(trimmed.to_owned()) - } -} - -pub fn normalized(value: &str, fallback: &str) -> String { - let trimmed = value.trim(); - if trimmed.is_empty() { - fallback.to_owned() - } else { - trimmed.to_owned() - } -} - -pub fn normalized_optional(value: &str) -> Option { - let trimmed = value.trim(); - if trimmed.is_empty() { - None - } else { - Some(trimmed.to_owned()) - } -} - -pub fn require_value(value: &str, label: &str) -> Result { - normalized_optional(value).ok_or_else(|| anyhow!("{label} is required")) -} diff --git a/burrow-gtk/src/diag.rs b/burrow-gtk/src/diag.rs index ab4757e..348293e 100644 --- a/burrow-gtk/src/diag.rs +++ b/burrow-gtk/src/diag.rs @@ -15,18 +15,15 @@ pub enum StatusTernary { // Realistically, we may not explicitly "support" non-systemd platforms which would simply this // code greatly. // Along with replacing [`StatusTernary`] with good old [`bool`]. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy)] pub enum SystemSetup { Systemd, - AppImage, Other, } impl SystemSetup { pub fn new() -> Self { - if is_appimage() { - SystemSetup::AppImage - } else if Command::new("systemctl").arg("--version").output().is_ok() { + if Command::new("systemctl").arg("--version").output().is_ok() { SystemSetup::Systemd } else { SystemSetup::Other @@ -36,7 +33,6 @@ impl SystemSetup { pub fn is_service_installed(&self) -> Result { match self { SystemSetup::Systemd => Ok(fs::metadata(SYSTEMD_SERVICE_LOC).is_ok().into()), - SystemSetup::AppImage => Ok(StatusTernary::NA), SystemSetup::Other => Ok(StatusTernary::NA), } } @@ -44,7 +40,6 @@ impl SystemSetup { pub fn is_socket_installed(&self) -> Result { match self { SystemSetup::Systemd => Ok(fs::metadata(SYSTEMD_SOCKET_LOC).is_ok().into()), - SystemSetup::AppImage => Ok(StatusTernary::NA), SystemSetup::Other => Ok(StatusTernary::NA), } } @@ -60,7 +55,6 @@ impl SystemSetup { let output = String::from_utf8(output)?; Ok((output == "enabled\n").into()) } - SystemSetup::AppImage => Ok(StatusTernary::NA), SystemSetup::Other => Ok(StatusTernary::NA), } } @@ -80,12 +74,7 @@ impl Display for SystemSetup { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(match self { SystemSetup::Systemd => "Systemd", - SystemSetup::AppImage => "AppImage", SystemSetup::Other => "Other", }) } } - -pub fn is_appimage() -> bool { - std::env::vars().any(|(k, _)| k == "APPDIR") -} diff --git a/burrow-gtk/src/main.rs b/burrow-gtk/src/main.rs index b47b63e..6f91e2a 100644 --- a/burrow-gtk/src/main.rs +++ b/burrow-gtk/src/main.rs @@ -1,15 +1,11 @@ use anyhow::Result; pub mod components; -mod account_store; -mod daemon_api; +mod diag; // Generated using meson mod config; fn main() { - if let Err(error) = daemon_api::configure_client_paths() { - eprintln!("failed to configure Burrow daemon paths: {error}"); - } components::App::run(); } diff --git a/burrow/Cargo.toml b/burrow/Cargo.toml index 22f3d25..4e7688b 100644 --- a/burrow/Cargo.toml +++ b/burrow/Cargo.toml @@ -10,37 +10,22 @@ crate-type = ["lib", "staticlib"] [dependencies] anyhow = "1.0" -tokio = { version = "1.37", features = [ - "rt", - "macros", - "sync", - "io-util", - "net", - "process", - "rt-multi-thread", - "signal", - "time", - "tracing", - "fs", -] } +tokio = { version = "1.21", features = ["rt", "macros", "sync", "io-util", "rt-multi-thread", "time", "tracing"] } tun = { version = "0.1", path = "../tun", features = ["serde", "tokio"] } clap = { version = "4.4", features = ["derive"] } tracing = "0.1" tracing-log = "0.1" -tracing-subscriber = { version = "0.3", features = ["std", "env-filter"] } +tracing-oslog = { git = "https://github.com/Stormshield-robinc/tracing-oslog" } +tracing-subscriber = { version = "0.3" , features = ["std", "env-filter"] } log = "0.4" serde = { version = "1", features = ["derive"] } serde_json = "1.0" blake2 = "0.10" chacha20poly1305 = "0.10" rand = "0.8" -bytes = "1" rand_core = "0.6" aead = "0.5" -x25519-dalek = { version = "2.0", features = [ - "reusable_secrets", - "static_secrets", -] } +x25519-dalek = { version = "2.0", features = ["reusable_secrets", "static_secrets"] } ring = "0.17" parking_lot = "0.12" hmac = "0.12" @@ -48,54 +33,23 @@ base64 = "0.21" fehler = "1.0" ip_network_table = "0.2" ip_network = "0.4" -ipnetwork = { version = "0.21", features = ["serde"] } async-channel = "2.1" schemars = "0.8" futures = "0.3.28" once_cell = "1.19" -arti-client = "0.40.0" -hickory-proto = "0.25.2" -netstack-smoltcp = "0.2.1" -tokio-util = { version = "0.7.18", features = ["compat"] } -tor-rtcompat = "0.40.0" -console-subscriber = { version = "0.2.0", optional = true } +console-subscriber = { version = "0.2.0" , optional = true } console = "0.15.8" -axum = "0.7.4" -argon2 = "0.5" -reqwest = { version = "0.12", default-features = false, features = [ - "json", - "rustls-tls", -] } -rusqlite = { version = "0.38.0", features = ["blob"] } -dotenv = "0.15.0" -tonic = "0.12.0" -prost = "0.13.1" -prost-types = "0.13.1" -tokio-stream = "0.1" -async-stream = "0.2" -tower = { version = "0.4.13", features = ["util"] } -hyper-util = "0.1.6" -toml = "0.8.15" -rust-ini = "0.21.0" -subtle = "2.6" [target.'cfg(target_os = "linux")'.dependencies] caps = "0.5" -libc = "0.2" libsystemd = "0.7" -nix = { version = "0.27", features = ["fs", "socket", "uio"] } tracing-journald = "0.3" [target.'cfg(target_vendor = "apple")'.dependencies] nix = { version = "0.27" } -rusqlite = { version = "0.38.0", features = ["bundled", "blob"] } - -[target.'cfg(target_os = "macos")'.dependencies] -tracing-oslog = { git = "https://github.com/Stormshield-robinc/tracing-oslog" } [dev-dependencies] insta = { version = "1.32", features = ["yaml"] } -tempfile = "3.13" [package.metadata.generate-rpm] assets = [ @@ -108,8 +62,3 @@ pre_uninstall_script = "../package/rpm/pre_uninstall" [features] tokio-console = ["dep:console-subscriber"] -bundled = ["rusqlite/bundled"] - - -[build-dependencies] -tonic-build = "0.12.0" diff --git a/burrow/build.rs b/burrow/build.rs deleted file mode 100644 index 8eea5dc..0000000 --- a/burrow/build.rs +++ /dev/null @@ -1,4 +0,0 @@ -fn main() -> Result<(), Box> { - tonic_build::compile_protos("../proto/burrow.proto")?; - Ok(()) -} diff --git a/burrow/src/auth/mod.rs b/burrow/src/auth/mod.rs deleted file mode 100644 index 74f47ad..0000000 --- a/burrow/src/auth/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod server; diff --git a/burrow/src/auth/server/db.rs b/burrow/src/auth/server/db.rs deleted file mode 100644 index c31c473..0000000 --- a/burrow/src/auth/server/db.rs +++ /dev/null @@ -1,627 +0,0 @@ -use anyhow::{anyhow, Context, Result}; -use argon2::{ - password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString}, - Argon2, -}; -use base64::{engine::general_purpose, Engine as _}; -use rand::RngCore; -use rusqlite::{params, Connection, OptionalExtension}; - -use crate::control::{ - DnsConfig, Hostinfo, LocalAuthResponse, MapRequest, MapResponse, Node, NodeCapMap, - PacketFilter, PeerCapMap, RegisterRequest, UserProfile, -}; - -const CREATE_SCHEMA: &str = r#" -CREATE TABLE IF NOT EXISTS auth_user ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - email TEXT NOT NULL UNIQUE, - display_name TEXT NOT NULL, - profile_pic_url TEXT, - groups_json TEXT NOT NULL DEFAULT '[]', - created_at TEXT NOT NULL DEFAULT (datetime('now')) -); - -CREATE TABLE IF NOT EXISTS auth_local_credential ( - user_id INTEGER PRIMARY KEY REFERENCES auth_user(id) ON DELETE CASCADE, - username TEXT NOT NULL UNIQUE, - password_hash TEXT NOT NULL, - rotated_at TEXT NOT NULL DEFAULT (datetime('now')) -); - -CREATE TABLE IF NOT EXISTS auth_session ( - id TEXT PRIMARY KEY, - user_id INTEGER NOT NULL REFERENCES auth_user(id) ON DELETE CASCADE, - created_at TEXT NOT NULL DEFAULT (datetime('now')), - expires_at TEXT NOT NULL DEFAULT (datetime('now', '+7 days')) -); - -CREATE TABLE IF NOT EXISTS control_node ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - stable_id TEXT NOT NULL UNIQUE, - user_id INTEGER NOT NULL REFERENCES auth_user(id) ON DELETE CASCADE, - name TEXT NOT NULL, - node_key TEXT NOT NULL UNIQUE, - machine_key TEXT, - disco_key TEXT, - addresses_json TEXT NOT NULL, - allowed_ips_json TEXT NOT NULL, - endpoints_json TEXT NOT NULL, - home_derp INTEGER, - hostinfo_json TEXT, - tags_json TEXT NOT NULL DEFAULT '[]', - primary_routes_json TEXT NOT NULL DEFAULT '[]', - cap_version INTEGER NOT NULL DEFAULT 1, - cap_map_json TEXT NOT NULL DEFAULT '{}', - peer_cap_map_json TEXT NOT NULL DEFAULT '{}', - machine_authorized INTEGER NOT NULL DEFAULT 1, - node_key_expired INTEGER NOT NULL DEFAULT 0, - created_at TEXT NOT NULL DEFAULT (datetime('now')), - updated_at TEXT NOT NULL DEFAULT (datetime('now')), - last_seen TEXT, - online INTEGER -); -"#; - -#[derive(Clone, Debug)] -pub struct StoredUser { - pub profile: UserProfile, -} - -pub fn init_db(path: &str) -> Result<()> { - let conn = Connection::open(path)?; - conn.execute_batch(CREATE_SCHEMA)?; - Ok(()) -} - -pub fn ensure_local_identity( - path: &str, - username: &str, - email: &str, - display_name: &str, - password: &str, -) -> Result { - let conn = Connection::open(path)?; - conn.execute( - "INSERT INTO auth_user (email, display_name) VALUES (?, ?) - ON CONFLICT(email) DO UPDATE SET display_name = excluded.display_name", - params![email, display_name], - )?; - let user_id: i64 = - conn.query_row("SELECT id FROM auth_user WHERE email = ?", [email], |row| { - row.get(0) - })?; - - let existing_hash: Option = conn - .query_row( - "SELECT password_hash FROM auth_local_credential WHERE user_id = ?", - [user_id], - |row| row.get(0), - ) - .optional()?; - - let password_hash = match existing_hash { - Some(hash) if verify_password(password, &hash) => hash, - _ => hash_password(password)?, - }; - - conn.execute( - "INSERT INTO auth_local_credential (user_id, username, password_hash) - VALUES (?, ?, ?) - ON CONFLICT(user_id) DO UPDATE SET username = excluded.username, password_hash = excluded.password_hash, rotated_at = datetime('now')", - params![user_id, username, password_hash], - )?; - - load_user_profile(&conn, user_id) -} - -pub fn authenticate_local( - path: &str, - identifier: &str, - password: &str, -) -> Result> { - let conn = Connection::open(path)?; - let record = conn - .query_row( - "SELECT u.id, u.email, u.display_name, u.profile_pic_url, u.groups_json, c.password_hash - FROM auth_user u - JOIN auth_local_credential c ON c.user_id = u.id - WHERE c.username = ? OR u.email = ?", - params![identifier, identifier], - |row| { - Ok(( - row.get::<_, i64>(0)?, - row.get::<_, String>(1)?, - row.get::<_, String>(2)?, - row.get::<_, Option>(3)?, - row.get::<_, String>(4)?, - row.get::<_, String>(5)?, - )) - }, - ) - .optional()?; - - let Some((user_id, email, display_name, profile_pic_url, groups_json, password_hash)) = record - else { - return Ok(None); - }; - - if !verify_password(password, &password_hash) { - return Ok(None); - } - - let token = random_token(); - conn.execute( - "INSERT INTO auth_session (id, user_id) VALUES (?, ?)", - params![token, user_id], - )?; - - Ok(Some(LocalAuthResponse { - access_token: token, - user: UserProfile { - id: user_id, - login_name: email, - display_name, - profile_pic_url, - groups: parse_json(&groups_json)?, - }, - })) -} - -pub fn user_for_session(path: &str, token: &str) -> Result> { - let conn = Connection::open(path)?; - let user_id = conn - .query_row( - "SELECT user_id FROM auth_session WHERE id = ? AND expires_at > datetime('now')", - [token], - |row| row.get::<_, i64>(0), - ) - .optional()?; - let Some(user_id) = user_id else { - return Ok(None); - }; - - Ok(Some(load_user(&conn, user_id)?)) -} - -pub fn upsert_node(path: &str, user: &StoredUser, request: &RegisterRequest) -> Result { - let conn = Connection::open(path)?; - let existing = find_existing_node(&conn, user.profile.id, request)?; - let name = Node::preferred_name(request); - let allowed_ips = Node::normalized_allowed_ips(request); - - match existing { - Some((node_id, stable_id, created_at)) => { - conn.execute( - "UPDATE control_node - SET name = ?, node_key = ?, machine_key = ?, disco_key = ?, addresses_json = ?, allowed_ips_json = ?, - endpoints_json = ?, home_derp = ?, hostinfo_json = ?, tags_json = ?, primary_routes_json = ?, - cap_version = ?, cap_map_json = ?, peer_cap_map_json = ?, updated_at = datetime('now'), - last_seen = datetime('now'), online = 1 - WHERE id = ?", - params![ - name, - request.node_key, - request.machine_key, - request.disco_key, - to_json(&request.addresses)?, - to_json(&allowed_ips)?, - to_json(&request.endpoints)?, - request.home_derp, - optional_json(&request.hostinfo)?, - to_json(&request.tags)?, - to_json(&request.primary_routes)?, - request.version.max(1), - to_json(&request.cap_map)?, - to_json(&request.peer_cap_map)?, - node_id, - ], - )?; - load_node(&conn, node_id, stable_id, Some(created_at)) - } - None => { - conn.execute( - "INSERT INTO control_node ( - stable_id, user_id, name, node_key, machine_key, disco_key, addresses_json, allowed_ips_json, - endpoints_json, home_derp, hostinfo_json, tags_json, primary_routes_json, cap_version, - cap_map_json, peer_cap_map_json, last_seen, online - ) VALUES ('', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'), 1)", - params![ - user.profile.id, - name, - request.node_key, - request.machine_key, - request.disco_key, - to_json(&request.addresses)?, - to_json(&allowed_ips)?, - to_json(&request.endpoints)?, - request.home_derp, - optional_json(&request.hostinfo)?, - to_json(&request.tags)?, - to_json(&request.primary_routes)?, - request.version.max(1), - to_json(&request.cap_map)?, - to_json(&request.peer_cap_map)?, - ], - )?; - let node_id = conn.last_insert_rowid(); - let stable_id = format!("bn-{node_id}"); - conn.execute( - "UPDATE control_node SET stable_id = ? WHERE id = ?", - params![stable_id, node_id], - )?; - load_node(&conn, node_id, stable_id, None) - } - } -} - -pub fn map_for_node( - path: &str, - user: &StoredUser, - request: &MapRequest, - domain: &str, -) -> Result { - let conn = Connection::open(path)?; - apply_map_request(&conn, user.profile.id, request)?; - let self_row = conn - .query_row( - "SELECT id, stable_id, created_at FROM control_node WHERE user_id = ? AND node_key = ?", - params![user.profile.id, request.node_key], - |row| { - Ok(( - row.get::<_, i64>(0)?, - row.get::<_, String>(1)?, - row.get::<_, String>(2)?, - )) - }, - ) - .optional()? - .ok_or_else(|| anyhow!("node not registered"))?; - - let node = load_node(&conn, self_row.0, self_row.1, Some(self_row.2))?; - let peers = load_peers(&conn, node.id)?; - Ok(MapResponse { - map_session_handle: Some(format!("map-{}", node.stable_id)), - seq: Some(request.map_session_seq.unwrap_or(0) + 1), - node, - peers, - domain: domain.to_owned(), - dns: Some(DnsConfig { - resolvers: vec!["1.1.1.1".to_owned(), "1.0.0.1".to_owned()], - search_domains: vec![domain.to_owned()], - magic_dns: true, - }), - packet_filters: vec![PacketFilter::default()], - }) -} - -pub static PATH: &str = "./server.sqlite3"; - -fn apply_map_request(conn: &Connection, user_id: i64, request: &MapRequest) -> Result<()> { - let current = conn - .query_row( - "SELECT id FROM control_node WHERE user_id = ? AND node_key = ?", - params![user_id, request.node_key], - |row| row.get::<_, i64>(0), - ) - .optional()?; - - let Some(node_id) = current else { - return Ok(()); - }; - - let hostinfo_json = optional_json(&request.hostinfo)?; - let endpoints_json = to_json(&request.endpoints)?; - conn.execute( - "UPDATE control_node - SET disco_key = COALESCE(?, disco_key), - hostinfo_json = CASE WHEN ? IS NULL THEN hostinfo_json ELSE ? END, - endpoints_json = CASE WHEN ? = '[]' THEN endpoints_json ELSE ? END, - updated_at = datetime('now'), - last_seen = datetime('now'), - online = 1 - WHERE id = ?", - params![ - request.disco_key, - hostinfo_json, - hostinfo_json, - endpoints_json, - endpoints_json, - node_id, - ], - )?; - Ok(()) -} - -fn find_existing_node( - conn: &Connection, - user_id: i64, - request: &RegisterRequest, -) -> Result> { - let mut candidates = vec![request.node_key.as_str()]; - if let Some(old) = request.old_node_key.as_deref() { - if old != request.node_key { - candidates.push(old); - } - } - - for candidate in candidates { - let hit = conn - .query_row( - "SELECT id, stable_id, created_at FROM control_node WHERE user_id = ? AND node_key = ?", - params![user_id, candidate], - |row| { - Ok(( - row.get::<_, i64>(0)?, - row.get::<_, String>(1)?, - row.get::<_, String>(2)?, - )) - }, - ) - .optional()?; - if hit.is_some() { - return Ok(hit); - } - } - Ok(None) -} - -fn load_peers(conn: &Connection, self_id: i64) -> Result> { - let mut stmt = conn.prepare( - "SELECT id, stable_id, created_at FROM control_node WHERE id != ? AND machine_authorized = 1 ORDER BY id", - )?; - let peers = stmt - .query_map([self_id], |row| { - Ok(( - row.get::<_, i64>(0)?, - row.get::<_, String>(1)?, - row.get::<_, String>(2)?, - )) - })? - .collect::>>()?; - peers - .into_iter() - .map(|(id, stable_id, created_at)| load_node(conn, id, stable_id, Some(created_at))) - .collect() -} - -fn load_node( - conn: &Connection, - id: i64, - stable_id: String, - created_at_hint: Option, -) -> Result { - let row = conn.query_row( - "SELECT user_id, name, node_key, machine_key, disco_key, addresses_json, allowed_ips_json, - endpoints_json, home_derp, hostinfo_json, tags_json, primary_routes_json, cap_version, - cap_map_json, peer_cap_map_json, machine_authorized, node_key_expired, - created_at, updated_at, last_seen, online - FROM control_node WHERE id = ?", - [id], - |row| { - Ok(( - row.get::<_, i64>(0)?, - row.get::<_, String>(1)?, - row.get::<_, String>(2)?, - row.get::<_, Option>(3)?, - row.get::<_, Option>(4)?, - row.get::<_, String>(5)?, - row.get::<_, String>(6)?, - row.get::<_, String>(7)?, - row.get::<_, Option>(8)?, - row.get::<_, Option>(9)?, - row.get::<_, String>(10)?, - row.get::<_, String>(11)?, - row.get::<_, i32>(12)?, - row.get::<_, String>(13)?, - row.get::<_, String>(14)?, - row.get::<_, i64>(15)?, - row.get::<_, i64>(16)?, - row.get::<_, String>(17)?, - row.get::<_, String>(18)?, - row.get::<_, Option>(19)?, - row.get::<_, Option>(20)?, - )) - }, - )?; - Ok(Node { - id, - stable_id, - user_id: row.0, - name: row.1, - node_key: row.2, - machine_key: row.3, - disco_key: row.4, - addresses: parse_json(&row.5)?, - allowed_ips: parse_json(&row.6)?, - endpoints: parse_json(&row.7)?, - home_derp: row.8, - hostinfo: row.9.map(|raw| parse_json::(&raw)).transpose()?, - tags: parse_json(&row.10)?, - primary_routes: parse_json(&row.11)?, - cap_version: row.12, - cap_map: parse_json::(&row.13)?, - peer_cap_map: parse_json::(&row.14)?, - machine_authorized: row.15 != 0, - node_key_expired: row.16 != 0, - created_at: Some(created_at_hint.unwrap_or(row.17)), - updated_at: Some(row.18), - last_seen: row.19, - online: row.20.map(|value| value != 0), - }) -} - -fn load_user(conn: &Connection, user_id: i64) -> Result { - let profile = load_user_profile(conn, user_id)?; - Ok(StoredUser { profile }) -} - -fn load_user_profile(conn: &Connection, user_id: i64) -> Result { - let row = conn.query_row( - "SELECT email, display_name, profile_pic_url, groups_json FROM auth_user WHERE id = ?", - [user_id], - |row| { - Ok(( - row.get::<_, String>(0)?, - row.get::<_, String>(1)?, - row.get::<_, Option>(2)?, - row.get::<_, String>(3)?, - )) - }, - )?; - Ok(UserProfile { - id: user_id, - login_name: row.0, - display_name: row.1, - profile_pic_url: row.2, - groups: parse_json(&row.3)?, - }) -} - -fn hash_password(password: &str) -> Result { - let salt = SaltString::generate(&mut argon2::password_hash::rand_core::OsRng); - let hash = Argon2::default() - .hash_password(password.as_bytes(), &salt) - .map_err(|err| anyhow!("failed to hash password: {err}"))?; - Ok(hash.to_string()) -} - -fn verify_password(password: &str, password_hash: &str) -> bool { - PasswordHash::new(password_hash) - .ok() - .and_then(|hash| { - Argon2::default() - .verify_password(password.as_bytes(), &hash) - .ok() - }) - .is_some() -} - -fn random_token() -> String { - let mut bytes = [0u8; 32]; - rand::thread_rng().fill_bytes(&mut bytes); - general_purpose::URL_SAFE_NO_PAD.encode(bytes) -} - -fn to_json(value: &T) -> Result { - serde_json::to_string(value).context("failed to serialize json") -} - -fn optional_json(value: &Option) -> Result> { - value.as_ref().map(to_json).transpose() -} - -fn parse_json(value: &str) -> Result { - serde_json::from_str(value) - .with_context(|| format!("failed to decode json payload from '{value}'")) -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::control::{Hostinfo, RegisterRequest}; - use tempfile::TempDir; - - fn temp_db() -> Result<(TempDir, String)> { - let dir = tempfile::tempdir()?; - let db_path = dir.path().join("server.sqlite3"); - Ok((dir, db_path.to_string_lossy().to_string())) - } - - #[test] - fn local_auth_and_map_round_trip() -> Result<()> { - let (_dir, db_path) = temp_db()?; - init_db(&db_path)?; - ensure_local_identity( - &db_path, - "contact", - "contact@burrow.net", - "Burrow Contact", - "password-1", - )?; - - let auth = authenticate_local(&db_path, "contact", "password-1")? - .expect("expected login to succeed"); - let user = - user_for_session(&db_path, &auth.access_token)?.expect("expected session to resolve"); - - let node = upsert_node( - &db_path, - &user, - &RegisterRequest { - node_key: "nodekey:aaaa".to_owned(), - machine_key: Some("machinekey:aaaa".to_owned()), - disco_key: Some("discokey:aaaa".to_owned()), - addresses: vec!["100.64.0.1/32".to_owned()], - endpoints: vec!["203.0.113.10:41641".to_owned()], - hostinfo: Some(Hostinfo { - hostname: Some("burrow-dev".to_owned()), - os: Some("linux".to_owned()), - os_version: Some("6.13".to_owned()), - services: vec!["ssh".to_owned()], - request_tags: vec!["tag:dev".to_owned()], - }), - ..RegisterRequest::default() - }, - )?; - assert_eq!(node.name, "burrow-dev"); - assert_eq!(node.allowed_ips, vec!["100.64.0.1/32"]); - - let map = map_for_node( - &db_path, - &user, - &MapRequest { - node_key: "nodekey:aaaa".to_owned(), - stream: true, - endpoints: vec!["203.0.113.10:41641".to_owned()], - ..MapRequest::default() - }, - "burrow.net", - )?; - assert_eq!(map.node.node_key, "nodekey:aaaa"); - assert_eq!(map.domain, "burrow.net"); - assert!(map.dns.expect("dns config").magic_dns); - Ok(()) - } - - #[test] - fn register_can_rotate_node_keys() -> Result<()> { - let (_dir, db_path) = temp_db()?; - init_db(&db_path)?; - ensure_local_identity( - &db_path, - "contact", - "contact@burrow.net", - "Burrow Contact", - "password-1", - )?; - let auth = authenticate_local(&db_path, "contact@burrow.net", "password-1")? - .expect("expected login to succeed"); - let user = - user_for_session(&db_path, &auth.access_token)?.expect("expected session to resolve"); - - upsert_node( - &db_path, - &user, - &RegisterRequest { - node_key: "nodekey:old".to_owned(), - addresses: vec!["100.64.0.2/32".to_owned()], - ..RegisterRequest::default() - }, - )?; - - let rotated = upsert_node( - &db_path, - &user, - &RegisterRequest { - node_key: "nodekey:new".to_owned(), - old_node_key: Some("nodekey:old".to_owned()), - addresses: vec!["100.64.0.3/32".to_owned()], - ..RegisterRequest::default() - }, - )?; - assert_eq!(rotated.node_key, "nodekey:new"); - assert_eq!(rotated.addresses, vec!["100.64.0.3/32"]); - Ok(()) - } -} diff --git a/burrow/src/auth/server/mod.rs b/burrow/src/auth/server/mod.rs deleted file mode 100644 index fdffce3..0000000 --- a/burrow/src/auth/server/mod.rs +++ /dev/null @@ -1,430 +0,0 @@ -pub mod db; -pub mod tailscale; - -use std::{env, path::Path}; - -use anyhow::{Context, Result}; -use axum::{ - extract::{Json, Path as AxumPath, Query, State}, - http::{header::AUTHORIZATION, HeaderMap, StatusCode}, - response::IntoResponse, - routing::{get, post}, - Router, -}; -use serde::Deserialize; -use tokio::signal; - -use crate::control::{ - discovery, LocalAuthRequest, LocalAuthResponse, MapRequest, MapResponse, RegisterRequest, - RegisterResponse, TailnetDiscovery, BURROW_TAILNET_DOMAIN, -}; - -#[derive(Clone, Debug)] -pub struct BootstrapIdentity { - pub username: String, - pub email: String, - pub display_name: String, - pub password_file: String, -} - -impl Default for BootstrapIdentity { - fn default() -> Self { - Self { - username: "contact".to_owned(), - email: "contact@burrow.net".to_owned(), - display_name: "Burrow Contact".to_owned(), - password_file: "intake/forgejo_pass_contact_at_burrow_net.txt".to_owned(), - } - } -} - -#[derive(Clone, Debug)] -pub struct AuthServerConfig { - pub listen: String, - pub db_path: String, - pub tailnet_domain: String, - pub bootstrap: BootstrapIdentity, -} - -impl Default for AuthServerConfig { - fn default() -> Self { - Self { - listen: "0.0.0.0:8080".to_owned(), - db_path: db::PATH.to_owned(), - tailnet_domain: BURROW_TAILNET_DOMAIN.to_owned(), - bootstrap: BootstrapIdentity::default(), - } - } -} - -impl AuthServerConfig { - pub fn from_env() -> Self { - let mut config = Self::default(); - if let Ok(value) = env::var("BURROW_AUTH_LISTEN") { - config.listen = value; - } - if let Ok(value) = env::var("BURROW_AUTH_DB_PATH") { - config.db_path = value; - } - if let Ok(value) = env::var("BURROW_AUTH_TAILNET_DOMAIN") { - config.tailnet_domain = value; - } - if let Ok(value) = env::var("BURROW_BOOTSTRAP_USERNAME") { - config.bootstrap.username = value; - } - if let Ok(value) = env::var("BURROW_BOOTSTRAP_EMAIL") { - config.bootstrap.email = value; - } - if let Ok(value) = env::var("BURROW_BOOTSTRAP_DISPLAY_NAME") { - config.bootstrap.display_name = value; - } - if let Ok(value) = env::var("BURROW_BOOTSTRAP_PASSWORD_FILE") { - config.bootstrap.password_file = value; - } - config - } - - fn bootstrap_password(&self) -> Result> { - let path = Path::new(&self.bootstrap.password_file); - if !path.exists() { - return Ok(None); - } - let password = std::fs::read_to_string(path).with_context(|| { - format!("failed to read bootstrap password from {}", path.display()) - })?; - let password = password.trim().to_owned(); - if password.is_empty() { - return Ok(None); - } - Ok(Some(password)) - } -} - -#[derive(Clone)] -struct AppState { - config: AuthServerConfig, - tailscale: tailscale::TailscaleBridgeManager, -} - -#[derive(Debug, Deserialize)] -struct TailnetDiscoveryQuery { - email: String, -} - -type AppResult = Result; - -pub async fn serve() -> Result<()> { - serve_with_config(AuthServerConfig::from_env()).await -} - -pub async fn serve_with_config(config: AuthServerConfig) -> Result<()> { - db::init_db(&config.db_path)?; - if let Some(password) = config.bootstrap_password()? { - db::ensure_local_identity( - &config.db_path, - &config.bootstrap.username, - &config.bootstrap.email, - &config.bootstrap.display_name, - &password, - )?; - } - - let app = build_router(config.clone()); - let listener = tokio::net::TcpListener::bind(&config.listen).await?; - log::info!("Starting auth server on {}", config.listen); - axum::serve(listener, app) - .with_graceful_shutdown(shutdown_signal()) - .await?; - Ok(()) -} - -pub fn build_router(config: AuthServerConfig) -> Router { - Router::new() - .route("/healthz", get(healthz)) - .route("/device/new", post(device_new)) - .route("/v1/auth/login", post(login_local)) - .route("/v1/control/register", post(control_register)) - .route("/v1/control/map", post(control_map)) - .route("/v1/tailnet/discover", get(tailnet_discover)) - .route("/v1/tailscale/login/start", post(tailscale_login_start)) - .route("/v1/tailscale/login/:session_id", get(tailscale_login_status)) - .with_state(AppState { - config, - tailscale: tailscale::TailscaleBridgeManager::default(), - }) -} - -async fn login_local( - State(state): State, - Json(request): Json, -) -> AppResult> { - let db_path = state.config.db_path.clone(); - blocking(move || db::authenticate_local(&db_path, &request.identifier, &request.password)) - .await? - .map(Json) - .ok_or_else(|| (StatusCode::UNAUTHORIZED, "invalid credentials".to_owned())) -} - -async fn control_register( - headers: HeaderMap, - State(state): State, - Json(request): Json, -) -> AppResult> { - let token = bearer_token(&headers)?; - let db_path = state.config.db_path.clone(); - let user = blocking({ - let db_path = db_path.clone(); - let token = token.clone(); - move || db::user_for_session(&db_path, &token) - }) - .await? - .ok_or_else(|| (StatusCode::UNAUTHORIZED, "unknown session".to_owned()))?; - - let response_user = user.profile.clone(); - let node = blocking(move || db::upsert_node(&db_path, &user, &request)).await?; - Ok(Json(RegisterResponse { - user: response_user, - machine_authorized: node.machine_authorized, - node_key_expired: node.node_key_expired, - auth_url: None, - error: None, - node, - })) -} - -async fn control_map( - headers: HeaderMap, - State(state): State, - Json(request): Json, -) -> AppResult> { - let token = bearer_token(&headers)?; - let db_path = state.config.db_path.clone(); - let domain = state.config.tailnet_domain.clone(); - let user = blocking({ - let db_path = db_path.clone(); - let token = token.clone(); - move || db::user_for_session(&db_path, &token) - }) - .await? - .ok_or_else(|| (StatusCode::UNAUTHORIZED, "unknown session".to_owned()))?; - - let response = blocking(move || db::map_for_node(&db_path, &user, &request, &domain)).await?; - Ok(Json(response)) -} - -async fn tailnet_discover( - Query(query): Query, -) -> AppResult> { - if query.email.trim().is_empty() { - return Err((StatusCode::BAD_REQUEST, "email is required".to_owned())); - } - - let discovery = discovery::discover_tailnet(&query.email) - .await - .map_err(|err| (StatusCode::BAD_GATEWAY, err.to_string()))?; - Ok(Json(discovery)) -} - -async fn tailscale_login_start( - State(state): State, - Json(request): Json, -) -> AppResult> { - let response = state - .tailscale - .start_login(request) - .await - .map_err(internal_error)?; - Ok(Json(response)) -} - -async fn tailscale_login_status( - AxumPath(session_id): AxumPath, - State(state): State, -) -> AppResult> { - state - .tailscale - .status(&session_id) - .await - .map_err(internal_error)? - .map(Json) - .ok_or_else(|| (StatusCode::NOT_FOUND, "unknown tailscale login session".to_owned())) -} - -async fn healthz() -> impl IntoResponse { - StatusCode::OK -} - -async fn device_new() -> impl IntoResponse { - StatusCode::OK -} - -async fn blocking(work: F) -> AppResult -where - F: FnOnce() -> Result + Send + 'static, - T: Send + 'static, -{ - tokio::task::spawn_blocking(work) - .await - .map_err(|err| (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()))? - .map_err(internal_error) -} - -fn internal_error(err: anyhow::Error) -> (StatusCode, String) { - (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()) -} - -fn bearer_token(headers: &HeaderMap) -> AppResult { - let value = headers.get(AUTHORIZATION).ok_or_else(|| { - ( - StatusCode::UNAUTHORIZED, - "missing authorization header".to_owned(), - ) - })?; - let value = value.to_str().map_err(|_| { - ( - StatusCode::BAD_REQUEST, - "invalid authorization header".to_owned(), - ) - })?; - value - .strip_prefix("Bearer ") - .map(ToOwned::to_owned) - .ok_or_else(|| (StatusCode::UNAUTHORIZED, "expected bearer token".to_owned())) -} - -async fn shutdown_signal() { - let ctrl_c = async { - signal::ctrl_c() - .await - .expect("failed to install Ctrl+C handler"); - }; - - #[cfg(unix)] - let terminate = async { - signal::unix::signal(signal::unix::SignalKind::terminate()) - .expect("failed to install signal handler") - .recv() - .await; - }; - - #[cfg(not(unix))] - let terminate = std::future::pending::<()>(); - - tokio::select! { - _ = ctrl_c => {}, - _ = terminate => {}, - } -} - -#[cfg(test)] -mod tests { - use super::*; - use axum::{ - body::{to_bytes, Body}, - http::{Request, StatusCode}, - }; - use tempfile::tempdir; - use tower::ServiceExt; - - #[tokio::test] - async fn login_register_and_map_round_trip() -> Result<()> { - let dir = tempdir()?; - let password_file = dir.path().join("bootstrap-password.txt"); - std::fs::write(&password_file, "bootstrap-pass\n")?; - let db_path = dir.path().join("server.sqlite3"); - let config = AuthServerConfig { - listen: "127.0.0.1:0".to_owned(), - db_path: db_path.to_string_lossy().to_string(), - tailnet_domain: "burrow.net".to_owned(), - bootstrap: BootstrapIdentity { - password_file: password_file.to_string_lossy().to_string(), - ..BootstrapIdentity::default() - }, - }; - - db::init_db(&config.db_path)?; - let password = config.bootstrap_password()?.expect("bootstrap password"); - db::ensure_local_identity( - &config.db_path, - &config.bootstrap.username, - &config.bootstrap.email, - &config.bootstrap.display_name, - &password, - )?; - - let app = build_router(config); - - let response = app - .clone() - .oneshot( - Request::post("/v1/auth/login") - .header("content-type", "application/json") - .body(Body::from(serde_json::to_vec(&LocalAuthRequest { - identifier: "contact".to_owned(), - password: "bootstrap-pass".to_owned(), - })?))?, - ) - .await?; - assert_eq!(response.status(), StatusCode::OK); - let login: LocalAuthResponse = - serde_json::from_slice(&to_bytes(response.into_body(), usize::MAX).await?)?; - - let response = app - .clone() - .oneshot( - Request::post("/v1/control/register") - .header("content-type", "application/json") - .header("authorization", format!("Bearer {}", login.access_token)) - .body(Body::from(serde_json::to_vec(&RegisterRequest { - node_key: "nodekey:1234".to_owned(), - machine_key: Some("machinekey:1234".to_owned()), - addresses: vec!["100.64.0.10/32".to_owned()], - endpoints: vec!["198.51.100.10:41641".to_owned()], - hostinfo: Some(crate::control::Hostinfo { - hostname: Some("devbox".to_owned()), - os: Some("linux".to_owned()), - os_version: Some("6.13".to_owned()), - services: vec!["ssh".to_owned()], - request_tags: vec!["tag:dev".to_owned()], - }), - ..RegisterRequest::default() - })?))?, - ) - .await?; - assert_eq!(response.status(), StatusCode::OK); - - let response = app - .oneshot( - Request::post("/v1/control/map") - .header("content-type", "application/json") - .header("authorization", format!("Bearer {}", login.access_token)) - .body(Body::from(serde_json::to_vec(&MapRequest { - node_key: "nodekey:1234".to_owned(), - stream: true, - endpoints: vec!["198.51.100.10:41641".to_owned()], - ..MapRequest::default() - })?))?, - ) - .await?; - assert_eq!(response.status(), StatusCode::OK); - let map: MapResponse = - serde_json::from_slice(&to_bytes(response.into_body(), usize::MAX).await?)?; - assert_eq!(map.domain, "burrow.net"); - assert_eq!(map.node.name, "devbox"); - assert!(map.dns.expect("dns").magic_dns); - Ok(()) - } - - #[tokio::test] - async fn tailnet_discover_requires_email() -> Result<()> { - let app = build_router(AuthServerConfig::default()); - let response = app - .oneshot( - Request::get("/v1/tailnet/discover?email=") - .body(Body::empty())?, - ) - .await?; - assert_eq!(response.status(), StatusCode::BAD_REQUEST); - Ok(()) - } -} diff --git a/burrow/src/auth/server/tailscale.rs b/burrow/src/auth/server/tailscale.rs deleted file mode 100644 index d08c807..0000000 --- a/burrow/src/auth/server/tailscale.rs +++ /dev/null @@ -1,519 +0,0 @@ -use std::{ - collections::HashMap, - env, - path::{Path, PathBuf}, - process::Stdio, - sync::Arc, - time::Duration, -}; - -use anyhow::{anyhow, Context, Result}; -use rand::RngCore; -use reqwest::Client; -use serde::{Deserialize, Serialize}; -use tokio::{ - io::{AsyncBufReadExt, BufReader}, - process::{Child, Command}, - sync::Mutex, - task::JoinHandle, -}; - -#[derive(Clone, Debug, Default, Deserialize)] -pub struct TailscaleLoginStartRequest { - pub account_name: String, - pub identity_name: String, - #[serde(default)] - pub hostname: Option, - #[serde(default)] - pub control_url: Option, - #[serde(default)] - pub packet_socket: Option, -} - -#[derive(Clone, Debug, Serialize, Deserialize, Default)] -pub struct TailscaleLoginStatus { - pub backend_state: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub auth_url: Option, - #[serde(default)] - pub running: bool, - #[serde(default)] - pub needs_login: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub tailnet_name: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub magic_dns_suffix: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub self_dns_name: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub tailscale_ips: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub health: Vec, -} - -#[derive(Clone, Debug, Serialize)] -pub struct TailscaleLoginStartResponse { - pub session_id: String, - pub status: TailscaleLoginStatus, -} - -pub struct TailscaleLoginSession { - pub session_id: String, - pub helper: Arc, - pub status: TailscaleLoginStatus, -} - -#[derive(Clone, Default)] -pub struct TailscaleBridgeManager { - client: Client, - sessions: Arc>>>, -} - -pub struct TailscaleHelperProcess { - session_id: String, - listen_url: String, - packet_socket: Option, - control_url: Option, - state_dir: PathBuf, - child: Arc>, - _stderr_task: JoinHandle<()>, -} - -type ManagedSession = TailscaleHelperProcess; - -#[derive(Debug, Deserialize)] -struct HelperHello { - listen_addr: String, - #[serde(default)] - packet_socket: Option, -} - -impl TailscaleBridgeManager { - pub async fn start_login( - &self, - request: TailscaleLoginStartRequest, - ) -> Result { - let session = self.ensure_session(request).await?; - Ok(TailscaleLoginStartResponse { - session_id: session.session_id, - status: session.status, - }) - } - - pub async fn ensure_session( - &self, - request: TailscaleLoginStartRequest, - ) -> Result { - let key = session_key_for_request(&request); - let requested_packet_socket = request - .packet_socket - .as_deref() - .map(str::trim) - .filter(|value| !value.is_empty()); - let requested_control_url = request - .control_url - .as_deref() - .map(str::trim) - .filter(|value| !value.is_empty()); - - if let Some(existing) = self.sessions.lock().await.get(&key).cloned() { - let needs_restart_for_socket = match (requested_packet_socket, existing.packet_socket()) - { - (Some(requested), Some(current)) => current != Path::new(requested), - (Some(_), None) => true, - _ => false, - }; - let needs_restart_for_control_url = - requested_control_url != existing.control_url().map(|value| value.trim()); - - if !needs_restart_for_socket && !needs_restart_for_control_url { - match self.fetch_status(existing.as_ref()).await { - Ok(status) => { - return Ok(TailscaleLoginSession { - session_id: existing.session_id.clone(), - helper: existing, - status, - }); - } - Err(err) => { - log::warn!( - "tailscale login session {} is stale, restarting: {err}", - existing.session_id - ); - } - } - } else { - log::info!( - "tailscale login session {} no longer matches requested transport, restarting", - existing.session_id - ); - } - - self.sessions.lock().await.remove(&key); - let _ = self.shutdown_session(existing.as_ref()).await; - } - - let session = Arc::new(spawn_tailscale_helper(&request).await?); - let status = self.wait_for_status(session.as_ref()).await?; - let response = TailscaleLoginSession { - session_id: session.session_id.clone(), - helper: session.clone(), - status, - }; - - self.sessions.lock().await.insert(key, session); - Ok(response) - } - - pub async fn status(&self, session_id: &str) -> Result> { - let session = { - let sessions = self.sessions.lock().await; - sessions - .values() - .find(|session| session.session_id == session_id) - .cloned() - }; - - match session { - Some(session) => match self.fetch_status(session.as_ref()).await { - Ok(status) => Ok(Some(status)), - Err(err) => { - self.remove_session_by_id(session_id).await; - Err(err) - } - }, - None => Ok(None), - } - } - - pub async fn cancel(&self, session_id: &str) -> Result { - let session = self.remove_session_by_id(session_id).await; - match session { - Some(session) => { - self.shutdown_session(session.as_ref()).await?; - Ok(true) - } - None => Ok(false), - } - } - - async fn wait_for_status(&self, session: &ManagedSession) -> Result { - let mut last_error = None; - let mut last_status = None; - for _ in 0..40 { - match session.status_with_client(&self.client).await { - Ok(status) if status.running || status.auth_url.is_some() => return Ok(status), - Ok(status) => last_status = Some(status), - Err(err) => last_error = Some(err), - } - tokio::time::sleep(Duration::from_millis(250)).await; - } - if let Some(status) = last_status { - return Ok(status); - } - Err(last_error.unwrap_or_else(|| anyhow!("tailscale helper did not become ready"))) - } - - async fn fetch_status(&self, session: &ManagedSession) -> Result { - session.status_with_client(&self.client).await - } - - async fn remove_session_by_id(&self, session_id: &str) -> Option> { - let mut sessions = self.sessions.lock().await; - let key = sessions - .iter() - .find_map(|(key, session)| (session.session_id == session_id).then(|| key.clone()))?; - sessions.remove(&key) - } - - async fn shutdown_session(&self, session: &ManagedSession) -> Result<()> { - session.shutdown_with_client(&self.client).await - } -} - -impl TailscaleHelperProcess { - pub fn session_id(&self) -> &str { - &self.session_id - } - - pub fn packet_socket(&self) -> Option<&Path> { - self.packet_socket.as_deref() - } - - pub fn control_url(&self) -> Option<&str> { - self.control_url.as_deref() - } - - pub fn state_dir(&self) -> &Path { - &self.state_dir - } - - pub async fn status(&self) -> Result { - self.status_with_client(&Client::new()).await - } - - pub async fn shutdown(&self) -> Result<()> { - self.shutdown_with_client(&Client::new()).await - } - - async fn status_with_client(&self, client: &Client) -> Result { - let mut child = self.child.lock().await; - if let Some(status) = child.try_wait()? { - return Err(anyhow!( - "tailscale helper exited with status {status} for {}", - self.state_dir.display() - )); - } - drop(child); - - let response = client - .get(format!("{}/status", self.listen_url)) - .send() - .await - .context("failed to query tailscale helper status")? - .error_for_status() - .context("tailscale helper status request failed")?; - - let status = response - .json::() - .await - .context("invalid tailscale helper status response")?; - - log::info!( - "tailscale helper status session={} backend_state={} running={} needs_login={} auth_url={:?}", - self.session_id, - status.backend_state, - status.running, - status.needs_login, - status.auth_url - ); - Ok(status) - } - - async fn shutdown_with_client(&self, client: &Client) -> Result<()> { - let _ = client.post(format!("{}/shutdown", self.listen_url)).send().await; - - for _ in 0..10 { - let mut child = self.child.lock().await; - if child.try_wait()?.is_some() { - return Ok(()); - } - drop(child); - tokio::time::sleep(Duration::from_millis(100)).await; - } - - let mut child = self.child.lock().await; - child - .start_kill() - .context("failed to kill tailscale helper")?; - let _ = child.wait().await; - Ok(()) - } -} - -pub async fn spawn_tailscale_helper( - request: &TailscaleLoginStartRequest, -) -> Result { - let state_dir = state_root().join(session_dir_name(request)); - tokio::fs::create_dir_all(&state_dir) - .await - .with_context(|| format!("failed to create {}", state_dir.display()))?; - - let mut child = helper_command(request, &state_dir)? - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn() - .context("failed to spawn tailscale login helper")?; - - let stdout = child - .stdout - .take() - .context("tailscale helper stdout unavailable")?; - let stderr = child - .stderr - .take() - .context("tailscale helper stderr unavailable")?; - - let hello_line = tokio::time::timeout(Duration::from_secs(20), async move { - let mut lines = BufReader::new(stdout).lines(); - lines.next_line().await - }) - .await - .context("timed out waiting for tailscale helper startup")?? - .context("tailscale helper exited before reporting listen address")?; - - let hello: HelperHello = - serde_json::from_str(&hello_line).context("invalid tailscale helper startup line")?; - - let stderr_task = tokio::spawn(async move { - let mut lines = BufReader::new(stderr).lines(); - while let Ok(Some(line)) = lines.next_line().await { - log::info!("tailscale-login-bridge: {line}"); - } - }); - - Ok(TailscaleHelperProcess { - session_id: random_session_id(), - listen_url: format!("http://{}", hello.listen_addr), - packet_socket: hello.packet_socket.map(PathBuf::from), - control_url: request.control_url.clone(), - state_dir, - child: Arc::new(Mutex::new(child)), - _stderr_task: stderr_task, - }) -} - -fn helper_command(request: &TailscaleLoginStartRequest, state_dir: &Path) -> Result { - let mut command = if let Ok(path) = env::var("BURROW_TAILSCALE_HELPER") { - Command::new(path) - } else { - let helper_dir = Path::new(env!("CARGO_MANIFEST_DIR")) - .join("..") - .join("Tools/tailscale-login-bridge"); - let mut command = Command::new("go"); - command.current_dir(helper_dir).arg("run").arg("."); - command.env("GOWORK", "off"); - command - }; - - command - .arg("--listen") - .arg("127.0.0.1:0") - .arg("--state-dir") - .arg(state_dir) - .arg("--hostname") - .arg(default_hostname(request)); - - if let Some(control_url) = request.control_url.as_deref() { - let trimmed = control_url.trim(); - if !trimmed.is_empty() { - command.arg("--control-url").arg(trimmed); - } - } - - if let Some(packet_socket) = request.packet_socket.as_deref() { - let trimmed = packet_socket.trim(); - if !trimmed.is_empty() { - command.arg("--packet-socket").arg(trimmed); - } - } - - Ok(command) -} - -pub(crate) fn packet_socket_path(request: &TailscaleLoginStartRequest) -> PathBuf { - state_root().join(session_dir_name(request)).join("packet.sock") -} - -pub(crate) fn state_root() -> PathBuf { - if let Ok(path) = env::var("BURROW_TAILSCALE_STATE_ROOT") { - return PathBuf::from(path); - } - - let home = env::var_os("HOME") - .map(PathBuf::from) - .unwrap_or_else(|| PathBuf::from(".")); - if cfg!(target_vendor = "apple") { - return home - .join("Library") - .join("Application Support") - .join("Burrow") - .join("tailscale"); - } - home.join(".local") - .join("share") - .join("burrow") - .join("tailscale") -} - -pub(crate) fn session_dir_name(request: &TailscaleLoginStartRequest) -> String { - format!( - "{}-{}-{}", - slug(&request.account_name), - slug(&request.identity_name), - slug(control_scope(request)) - ) -} - -fn session_key_for_request(request: &TailscaleLoginStartRequest) -> String { - format!( - "{}:{}:{}", - request.account_name, - request.identity_name, - control_scope(request) - ) -} - -fn control_scope(request: &TailscaleLoginStartRequest) -> &str { - request - .control_url - .as_deref() - .map(str::trim) - .filter(|value| !value.is_empty()) - .unwrap_or("tailscale-managed") -} - -pub(crate) fn default_hostname(request: &TailscaleLoginStartRequest) -> String { - request - .hostname - .as_deref() - .filter(|value| !value.trim().is_empty()) - .map(ToOwned::to_owned) - .unwrap_or_else(|| format!("burrow-{}", slug(&request.identity_name))) -} - -fn random_session_id() -> String { - let mut bytes = [0_u8; 12]; - rand::thread_rng().fill_bytes(&mut bytes); - bytes.iter().map(|byte| format!("{byte:02x}")).collect() -} - -fn slug(input: &str) -> String { - let mut output = String::with_capacity(input.len()); - for ch in input.chars() { - if ch.is_ascii_alphanumeric() { - output.push(ch.to_ascii_lowercase()); - } else if ch == '-' || ch == '_' { - output.push('-'); - } - } - if output.is_empty() { - "default".to_owned() - } else { - output - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn slug_sanitizes_input() { - assert_eq!(slug("Apple Phone"), "applephone"); - assert_eq!(slug("default_identity"), "default-identity"); - assert_eq!(slug(""), "default"); - } - - #[test] - fn state_dir_is_scoped_by_account_identity_and_control_plane() { - let request = TailscaleLoginStartRequest { - account_name: "default".to_owned(), - identity_name: "apple".to_owned(), - hostname: None, - control_url: None, - packet_socket: None, - }; - assert_eq!(session_dir_name(&request), "default-apple-tailscale-managed"); - assert_eq!(default_hostname(&request), "burrow-apple"); - - let custom_request = TailscaleLoginStartRequest { - control_url: Some("https://ts.burrow.net".to_owned()), - ..request - }; - assert_eq!( - session_dir_name(&custom_request), - "default-apple-httpstsburrownet" - ); - } -} diff --git a/burrow/src/control/config.rs b/burrow/src/control/config.rs deleted file mode 100644 index 3862bcd..0000000 --- a/burrow/src/control/config.rs +++ /dev/null @@ -1,87 +0,0 @@ -use anyhow::{Context, Result}; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] -pub enum TailnetProvider { - Tailscale, - Headscale, - Burrow, -} - -impl Default for TailnetProvider { - fn default() -> Self { - Self::Tailscale - } -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct TailnetConfig { - #[serde(default)] - pub provider: TailnetProvider, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub authority: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub account: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub identity: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub tailnet: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub hostname: Option, -} - -impl TailnetConfig { - pub fn from_slice(bytes: &[u8]) -> Result { - let payload = std::str::from_utf8(bytes).context("tailnet payload must be valid UTF-8")?; - Self::from_str(payload) - } - - pub fn from_str(payload: &str) -> Result { - let trimmed = payload.trim(); - if trimmed.starts_with('{') { - return serde_json::from_str(trimmed).context("invalid tailnet JSON payload"); - } - toml::from_str(trimmed).context("invalid tailnet TOML payload") - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parses_json_payload() { - let config = TailnetConfig::from_str( - r#"{ - "provider":"tailscale", - "account":"default", - "identity":"apple", - "tailnet":"example.ts.net", - "hostname":"burrow-phone" - }"#, - ) - .unwrap(); - assert_eq!(config.provider, TailnetProvider::Tailscale); - assert_eq!(config.account.as_deref(), Some("default")); - assert_eq!(config.identity.as_deref(), Some("apple")); - } - - #[test] - fn parses_toml_payload() { - let config = TailnetConfig::from_str( - r#" -provider = "headscale" -authority = "https://headscale.example.com" -account = "default" -identity = "apple" -"#, - ) - .unwrap(); - assert_eq!(config.provider, TailnetProvider::Headscale); - assert_eq!( - config.authority.as_deref(), - Some("https://headscale.example.com") - ); - } -} diff --git a/burrow/src/control/discovery.rs b/burrow/src/control/discovery.rs deleted file mode 100644 index d044a62..0000000 --- a/burrow/src/control/discovery.rs +++ /dev/null @@ -1,359 +0,0 @@ -use anyhow::{anyhow, Context, Result}; -use reqwest::{Client, StatusCode, Url}; -use serde::{Deserialize, Serialize}; -use tracing::{debug, info}; - -use super::TailnetProvider; - -pub const TAILNET_DISCOVERY_REL: &str = "https://burrow.net/rel/tailnet-control-server"; -const TAILNET_DISCOVERY_PATH: &str = "/.well-known/burrow-tailnet"; -const WEBFINGER_PATH: &str = "/.well-known/webfinger"; -const MANAGED_TAILSCALE_AUTHORITY: &str = "controlplane.tailscale.com"; - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] -pub struct TailnetDiscovery { - pub domain: String, - pub provider: TailnetProvider, - pub authority: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub oidc_issuer: Option, -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] -pub struct TailnetAuthorityProbe { - pub authority: String, - pub status_code: i32, - pub summary: String, - pub detail: String, - pub reachable: bool, -} - -#[derive(Clone, Debug, Default, Deserialize)] -struct WebFingerDocument { - #[serde(default)] - links: Vec, -} - -#[derive(Clone, Debug, Default, Deserialize)] -struct WebFingerLink { - #[serde(default)] - rel: String, - #[serde(default)] - href: Option, -} - -pub async fn discover_tailnet(email: &str) -> Result { - let domain = email_domain(email)?; - info!(%email, %domain, "tailnet discovery requested"); - let base_url = Url::parse(&format!("https://{domain}")) - .with_context(|| format!("invalid discovery domain {domain}"))?; - let client = Client::builder() - .user_agent("burrow-tailnet-discovery") - .timeout(std::time::Duration::from_secs(10)) - .build() - .context("failed to build tailnet discovery client")?; - discover_tailnet_at(&client, email, &base_url).await -} - -pub fn normalize_authority(authority: &str) -> String { - let trimmed = authority.trim(); - if trimmed.contains("://") { - trimmed.to_owned() - } else { - format!("https://{trimmed}") - } -} - -pub fn is_managed_tailscale_authority(authority: &str) -> bool { - let normalized = normalize_authority(authority) - .trim_end_matches('/') - .to_ascii_lowercase(); - normalized == format!("https://{MANAGED_TAILSCALE_AUTHORITY}") - || normalized == format!("http://{MANAGED_TAILSCALE_AUTHORITY}") -} - -pub async fn probe_tailnet_authority(authority: &str) -> Result { - let authority = normalize_authority(authority); - if is_managed_tailscale_authority(&authority) { - return Ok(TailnetAuthorityProbe { - authority, - status_code: 200, - summary: "Tailscale-managed control plane".to_owned(), - detail: "Using Tailscale's default login server.".to_owned(), - reachable: true, - }); - } - - let base_url = - Url::parse(&authority).with_context(|| format!("invalid tailnet authority {authority}"))?; - let client = Client::builder() - .user_agent("burrow-tailnet-probe") - .timeout(std::time::Duration::from_secs(10)) - .build() - .context("failed to build tailnet authority probe client")?; - - if let Some(status) = - probe_url(&client, base_url.join("/health")?, &authority, "Tailnet server reachable").await? - { - return Ok(status); - } - - if let Some(status) = probe_url( - &client, - base_url.clone(), - &authority, - "Tailnet server reachable", - ) - .await? - { - return Ok(status); - } - - Err(anyhow!("could not connect to the server")) -} - -pub async fn discover_tailnet_at( - client: &Client, - email: &str, - base_url: &Url, -) -> Result { - let domain = email_domain(email)?; - debug!(%email, %domain, base_url = %base_url, "starting tailnet domain discovery"); - - if let Some(discovery) = discover_well_known(client, base_url).await? { - info!( - %email, - %domain, - authority = %discovery.authority, - provider = ?discovery.provider, - "resolved tailnet discovery from well-known document" - ); - return Ok(TailnetDiscovery { domain, ..discovery }); - } - - if let Some(authority) = discover_webfinger(client, email, base_url).await? { - info!(%email, %domain, %authority, "resolved tailnet discovery from webfinger"); - return Ok(TailnetDiscovery { - domain, - provider: inferred_provider(Some(&authority), None), - authority, - oidc_issuer: None, - }); - } - - Err(anyhow!("no tailnet discovery metadata found for {domain}")) -} - -pub fn email_domain(email: &str) -> Result { - let trimmed = email.trim(); - let (_, domain) = trimmed - .rsplit_once('@') - .ok_or_else(|| anyhow!("email address must include a domain"))?; - let domain = domain.trim().trim_matches('.').to_ascii_lowercase(); - if domain.is_empty() { - return Err(anyhow!("email address must include a domain")); - } - Ok(domain) -} - -pub fn inferred_provider( - authority: Option<&str>, - explicit: Option<&TailnetProvider>, -) -> TailnetProvider { - if matches!(explicit, Some(TailnetProvider::Burrow)) { - return TailnetProvider::Burrow; - } - if authority.is_some_and(is_managed_tailscale_authority) { - return TailnetProvider::Tailscale; - } - TailnetProvider::Headscale -} - -async fn discover_well_known(client: &Client, base_url: &Url) -> Result> { - let url = base_url - .join(TAILNET_DISCOVERY_PATH) - .context("failed to build tailnet discovery URL")?; - debug!(%url, "requesting tailnet well-known document"); - let response = client - .get(url) - .header("accept", "application/json") - .send() - .await - .context("tailnet well-known request failed")?; - - match response.status() { - StatusCode::OK => response - .json::() - .await - .context("invalid tailnet discovery document") - .map(Some), - StatusCode::NOT_FOUND => Ok(None), - status => Err(anyhow!("tailnet well-known lookup failed with HTTP {status}")), - } -} - -async fn discover_webfinger(client: &Client, email: &str, base_url: &Url) -> Result> { - let mut url = base_url - .join(WEBFINGER_PATH) - .context("failed to build webfinger URL")?; - url.query_pairs_mut() - .append_pair("resource", &format!("acct:{email}")) - .append_pair("rel", TAILNET_DISCOVERY_REL); - debug!(%email, url = %url, "requesting tailnet webfinger document"); - - let response = client - .get(url) - .header("accept", "application/jrd+json, application/json") - .send() - .await - .context("tailnet webfinger request failed")?; - - match response.status() { - StatusCode::OK => { - let document = response - .json::() - .await - .context("invalid webfinger document")?; - Ok(document - .links - .into_iter() - .find(|link| link.rel == TAILNET_DISCOVERY_REL) - .and_then(|link| link.href) - .filter(|href| !href.trim().is_empty())) - } - StatusCode::NOT_FOUND => Ok(None), - status => Err(anyhow!("tailnet webfinger lookup failed with HTTP {status}")), - } -} - -async fn probe_url( - client: &Client, - url: Url, - authority: &str, - summary: &str, -) -> Result> { - let response = match client - .get(url) - .header("accept", "application/json") - .send() - .await - { - Ok(response) => response, - Err(_) => return Ok(None), - }; - - let status = response.status(); - if !status.is_success() { - return Ok(None); - } - - let detail = response.text().await.unwrap_or_default().trim().to_owned(); - Ok(Some(TailnetAuthorityProbe { - authority: authority.to_owned(), - status_code: i32::from(status.as_u16()), - summary: summary.to_owned(), - detail, - reachable: true, - })) -} - -#[cfg(test)] -mod tests { - use axum::{routing::get, Router}; - use serde_json::json; - use tokio::net::TcpListener; - - use super::*; - - #[test] - fn extracts_domain_from_email() { - assert_eq!(email_domain("Contact@Burrow.net").unwrap(), "burrow.net"); - assert!(email_domain("contact").is_err()); - } - - #[test] - fn detects_managed_tailscale_authority() { - assert!(is_managed_tailscale_authority("controlplane.tailscale.com")); - assert!(is_managed_tailscale_authority("https://controlplane.tailscale.com/")); - assert!(!is_managed_tailscale_authority("https://ts.burrow.net")); - } - - #[tokio::test] - async fn discovers_from_well_known_document() -> Result<()> { - let router = Router::new().route( - TAILNET_DISCOVERY_PATH, - get(|| async { - axum::Json(json!({ - "domain": "burrow.net", - "provider": "headscale", - "authority": "https://ts.burrow.net", - "oidc_issuer": "https://auth.burrow.net/application/o/ts/" - })) - }), - ); - - let listener = TcpListener::bind("127.0.0.1:0").await?; - let base_url = Url::parse(&format!("http://{}", listener.local_addr()?))?; - let server = tokio::spawn(async move { axum::serve(listener, router).await }); - - let client = Client::builder().build()?; - let discovery = discover_tailnet_at(&client, "contact@burrow.net", &base_url).await?; - assert_eq!(discovery.provider, TailnetProvider::Headscale); - assert_eq!(discovery.authority, "https://ts.burrow.net"); - assert_eq!(discovery.domain, "burrow.net"); - - server.abort(); - Ok(()) - } - - #[tokio::test] - async fn falls_back_to_webfinger_authority() -> Result<()> { - let router = Router::new() - .route( - TAILNET_DISCOVERY_PATH, - get(|| async { (StatusCode::NOT_FOUND, "") }), - ) - .route( - WEBFINGER_PATH, - get(|| async { - axum::Json(json!({ - "subject": "acct:contact@burrow.net", - "links": [ - { - "rel": TAILNET_DISCOVERY_REL, - "href": "https://ts.burrow.net" - } - ] - })) - }), - ); - - let listener = TcpListener::bind("127.0.0.1:0").await?; - let base_url = Url::parse(&format!("http://{}", listener.local_addr()?))?; - let server = tokio::spawn(async move { axum::serve(listener, router).await }); - - let client = Client::builder().build()?; - let discovery = discover_tailnet_at(&client, "contact@burrow.net", &base_url).await?; - assert_eq!(discovery.provider, TailnetProvider::Headscale); - assert_eq!(discovery.authority, "https://ts.burrow.net"); - - server.abort(); - Ok(()) - } - - #[tokio::test] - async fn probes_custom_authority() -> Result<()> { - let router = Router::new().route("/health", get(|| async { "ok" })); - let listener = TcpListener::bind("127.0.0.1:0").await?; - let authority = format!("http://{}", listener.local_addr()?); - let server = tokio::spawn(async move { axum::serve(listener, router).await }); - - let status = probe_tailnet_authority(&authority).await?; - assert_eq!(status.authority, authority); - assert_eq!(status.status_code, 200); - assert!(status.reachable); - - server.abort(); - Ok(()) - } -} diff --git a/burrow/src/control/mod.rs b/burrow/src/control/mod.rs deleted file mode 100644 index 472f673..0000000 --- a/burrow/src/control/mod.rs +++ /dev/null @@ -1,255 +0,0 @@ -pub mod config; -pub mod discovery; - -use std::collections::BTreeMap; - -use serde::{Deserialize, Serialize}; -use serde_json::Value; - -pub use config::{TailnetConfig, TailnetProvider}; -pub use discovery::{TailnetDiscovery, TAILNET_DISCOVERY_REL}; - -pub const BURROW_CAPABILITY_VERSION: i32 = 1; -pub const BURROW_TAILNET_DOMAIN: &str = "burrow.net"; - -pub type NodeCapMap = BTreeMap>; -pub type PeerCapMap = BTreeMap>; - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct Hostinfo { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub hostname: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub os: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub os_version: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub services: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub request_tags: Vec, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct UserProfile { - pub id: i64, - pub login_name: String, - pub display_name: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub profile_pic_url: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub groups: Vec, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct RegisterAuth { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub auth_key: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub oauth_access_token: Option, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] -pub struct Node { - pub id: i64, - pub stable_id: String, - pub name: String, - pub user_id: i64, - pub node_key: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub machine_key: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub disco_key: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub addresses: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub allowed_ips: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub endpoints: Vec, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub home_derp: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub hostinfo: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub tags: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub primary_routes: Vec, - #[serde(default = "default_capability_version")] - pub cap_version: i32, - #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - pub cap_map: NodeCapMap, - #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - pub peer_cap_map: PeerCapMap, - #[serde(default)] - pub machine_authorized: bool, - #[serde(default)] - pub node_key_expired: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub created_at: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub updated_at: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub last_seen: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub online: Option, -} - -impl Node { - pub fn preferred_name(request: &RegisterRequest) -> String { - if let Some(name) = request.name.as_deref() { - return name.to_owned(); - } - if let Some(hostname) = request - .hostinfo - .as_ref() - .and_then(|hostinfo| hostinfo.hostname.as_deref()) - { - return hostname.to_owned(); - } - format!("node-{}", short_key(&request.node_key)) - } - - pub fn normalized_allowed_ips(request: &RegisterRequest) -> Vec { - if request.allowed_ips.is_empty() { - return request.addresses.clone(); - } - request.allowed_ips.clone() - } -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct RegisterRequest { - #[serde(default = "default_capability_version")] - pub version: i32, - pub node_key: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub old_node_key: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub machine_key: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub disco_key: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub auth: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub expiry: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub followup: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub hostinfo: Option, - #[serde(default)] - pub ephemeral: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub tailnet: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub addresses: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub allowed_ips: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub endpoints: Vec, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub home_derp: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub tags: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub primary_routes: Vec, - #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - pub cap_map: NodeCapMap, - #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - pub peer_cap_map: PeerCapMap, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] -pub struct RegisterResponse { - pub user: UserProfile, - pub node: Node, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub auth_url: Option, - pub machine_authorized: bool, - pub node_key_expired: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub error: Option, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct MapRequest { - #[serde(default = "default_capability_version")] - pub version: i32, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub compress: Option, - #[serde(default)] - pub keep_alive: bool, - pub node_key: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub disco_key: Option, - #[serde(default)] - pub stream: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub hostinfo: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub map_session_handle: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub map_session_seq: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub endpoints: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub debug_flags: Vec, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub connection_handle: Option, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct DnsConfig { - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub resolvers: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub search_domains: Vec, - #[serde(default)] - pub magic_dns: bool, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct PacketFilter { - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub sources: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub destinations: Vec, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub protocols: Vec, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] -pub struct MapResponse { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub map_session_handle: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub seq: Option, - pub node: Node, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub peers: Vec, - pub domain: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub dns: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub packet_filters: Vec, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct LocalAuthRequest { - pub identifier: String, - pub password: String, -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct LocalAuthResponse { - pub access_token: String, - pub user: UserProfile, -} - -fn default_capability_version() -> i32 { - BURROW_CAPABILITY_VERSION -} - -fn short_key(key: &str) -> String { - key.chars().take(8).collect() -} diff --git a/burrow/src/daemon/apple.rs b/burrow/src/daemon/apple.rs index f369ea9..9460613 100644 --- a/burrow/src/daemon/apple.rs +++ b/burrow/src/daemon/apple.rs @@ -1,11 +1,11 @@ use std::{ ffi::{c_char, CStr}, path::PathBuf, - sync::{Arc, Mutex}, + sync::Arc, thread, }; -use once_cell::sync::{Lazy, OnceCell}; +use once_cell::sync::OnceCell; use tokio::{ runtime::{Builder, Handle}, sync::Notify, @@ -14,35 +14,20 @@ use tracing::error; use crate::daemon::daemon_main; +static BURROW_NOTIFY: OnceCell> = OnceCell::new(); static BURROW_HANDLE: OnceCell = OnceCell::new(); -static BURROW_READY: OnceCell<()> = OnceCell::new(); -static BURROW_SPAWN_LOCK: Lazy> = Lazy::new(|| Mutex::new(())); #[no_mangle] -pub unsafe extern "C" fn spawn_in_process(path: *const c_char, db_path: *const c_char) { - let path_buf = if path.is_null() { - None - } else { - Some(PathBuf::from(CStr::from_ptr(path).to_str().unwrap())) - }; - let db_path_buf = if db_path.is_null() { - None - } else { - Some(PathBuf::from(CStr::from_ptr(db_path).to_str().unwrap())) - }; - spawn_in_process_with_paths(path_buf, db_path_buf); -} - -pub fn spawn_in_process_with_paths(path_buf: Option, db_path_buf: Option) { +pub unsafe extern "C" fn spawn_in_process(path: *const c_char) { crate::tracing::initialize(); - let _guard = BURROW_SPAWN_LOCK.lock().unwrap(); - if BURROW_READY.get().is_some() { - return; - } - - let notify = Arc::new(Notify::new()); + let notify = BURROW_NOTIFY.get_or_init(|| Arc::new(Notify::new())); let handle = BURROW_HANDLE.get_or_init(|| { + let path_buf = if path.is_null() { + None + } else { + Some(PathBuf::from(CStr::from_ptr(path).to_str().unwrap())) + }; let sender = notify.clone(); let (handle_tx, handle_rx) = tokio::sync::oneshot::channel(); @@ -55,12 +40,7 @@ pub fn spawn_in_process_with_paths(path_buf: Option, db_path_buf: Optio .unwrap(); handle_tx.send(runtime.handle().clone()).unwrap(); runtime.block_on(async { - let result = daemon_main( - path_buf.as_deref(), - db_path_buf.as_deref(), - Some(sender.clone()), - ) - .await; + let result = daemon_main(path_buf.as_deref(), Some(sender.clone())).await; if let Err(error) = result.as_ref() { error!("Burrow thread exited: {}", error); } @@ -72,5 +52,4 @@ pub fn spawn_in_process_with_paths(path_buf: Option, db_path_buf: Optio let receiver = notify.clone(); handle.block_on(async move { receiver.notified().await }); - let _ = BURROW_READY.set(()); } diff --git a/burrow/src/daemon/rpc/request.rs b/burrow/src/daemon/command.rs similarity index 82% rename from burrow/src/daemon/rpc/request.rs rename to burrow/src/daemon/command.rs index 91562cc..53b4108 100644 --- a/burrow/src/daemon/rpc/request.rs +++ b/burrow/src/daemon/command.rs @@ -3,13 +3,11 @@ use serde::{Deserialize, Serialize}; use tun::TunOptions; #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] -#[serde(tag = "method", content = "params")] pub enum DaemonCommand { Start(DaemonStartOptions), ServerInfo, ServerConfig, Stop, - ReloadConfig(String), } #[derive(Debug, Clone, Default, Serialize, Deserialize, JsonSchema)] @@ -17,13 +15,6 @@ pub struct DaemonStartOptions { pub tun: TunOptions, } -#[derive(Clone, Serialize, Deserialize)] -pub struct DaemonRequest { - pub id: u64, - #[serde(flatten)] - pub command: DaemonCommand, -} - #[test] fn test_daemoncommand_serialization() { insta::assert_snapshot!(serde_json::to_string(&DaemonCommand::Start( diff --git a/burrow/src/daemon/instance.rs b/burrow/src/daemon/instance.rs index 9b2e138..34e9878 100644 --- a/burrow/src/daemon/instance.rs +++ b/burrow/src/daemon/instance.rs @@ -1,540 +1,117 @@ -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::sync::Arc; use anyhow::Result; -use rusqlite::Connection; -use tokio::sync::{mpsc, watch, RwLock}; -use tokio_stream::wrappers::ReceiverStream; -use tonic::{Request, Response, Status as RspStatus}; +use tokio::{sync::RwLock, task::JoinHandle}; use tracing::{debug, info, warn}; use tun::tokio::TunInterface; -use super::{ - rpc::grpc_defs::{ - networks_server::Networks, tailnet_control_server::TailnetControl, tunnel_server::Tunnel, - Empty, Network, NetworkDeleteRequest, NetworkListResponse, NetworkReorderRequest, - State as RPCTunnelState, TailnetDiscoverRequest, TailnetDiscoverResponse, - TailnetProbeRequest, TailnetProbeResponse, TunnelConfigurationResponse, TunnelPacket, - TunnelStatusResponse, - }, - runtime::{tailnet_helper_request, ActiveTunnel, ResolvedTunnel}, -}; use crate::{ - auth::server::tailscale::{ - packet_socket_path, TailscaleBridgeManager, - TailscaleLoginStartRequest as BridgeLoginStartRequest, TailscaleLoginStatus, + daemon::{ + command::DaemonCommand, + response::{DaemonResponse, DaemonResponseData, ServerConfig, ServerInfo}, }, - control::discovery, - daemon::rpc::ServerConfig, - database::{add_network, delete_network, get_connection, list_networks, reorder_network}, + wireguard::Interface, }; -#[derive(Debug, Clone)] enum RunState { - Running, + Running(JoinHandle>), Idle, } -impl RunState { - fn to_rpc(&self) -> RPCTunnelState { - match self { - Self::Running => RPCTunnelState::Running, - Self::Idle => RPCTunnelState::Stopped, +pub struct DaemonInstance { + rx: async_channel::Receiver, + sx: async_channel::Sender, + tun_interface: Option>>, + wg_interface: Arc>, + wg_state: RunState, +} + +impl DaemonInstance { + pub fn new( + rx: async_channel::Receiver, + sx: async_channel::Sender, + wg_interface: Arc>, + ) -> Self { + Self { + rx, + sx, + wg_interface, + tun_interface: None, + wg_state: RunState::Idle, } } -} -#[derive(Clone)] -pub struct DaemonRPCServer { - tun_interface: Arc>>, - db_path: Option, - wg_state_chan: (watch::Sender, watch::Receiver), - network_update_chan: (watch::Sender<()>, watch::Receiver<()>), - active_tunnel: Arc>>, - tailnet_login: TailscaleBridgeManager, -} + async fn proc_command(&mut self, command: DaemonCommand) -> Result { + info!("Daemon got command: {:?}", command); + match command { + DaemonCommand::Start(st) => { + match self.wg_state { + RunState::Running(_) => { + warn!("Got start, but tun interface already up."); + } + RunState::Idle => { + let tun_if = Arc::new(RwLock::new(st.tun.open()?)); -impl DaemonRPCServer { - pub fn new(db_path: Option<&Path>) -> Result { - Ok(Self { - tun_interface: Arc::new(RwLock::new(None)), - db_path: db_path.map(Path::to_owned), - wg_state_chan: watch::channel(RunState::Idle), - network_update_chan: watch::channel(()), - active_tunnel: Arc::new(RwLock::new(None)), - tailnet_login: TailscaleBridgeManager::default(), - }) + debug!("Setting tun_interface"); + self.tun_interface = Some(tun_if.clone()); + debug!("tun_interface set: {:?}", self.tun_interface); + + debug!("Setting tun on wg_interface"); + self.wg_interface.write().await.set_tun(tun_if); + debug!("tun set on wg_interface"); + + debug!("Cloning wg_interface"); + let tmp_wg = self.wg_interface.clone(); + debug!("wg_interface cloned"); + + debug!("Spawning run task"); + let run_task = tokio::spawn(async move { + debug!("Running wg_interface"); + let twlock = tmp_wg.read().await; + debug!("wg_interface read lock acquired"); + twlock.run().await + }); + debug!("Run task spawned: {:?}", run_task); + + debug!("Setting wg_state to Running"); + self.wg_state = RunState::Running(run_task); + debug!("wg_state set to Running"); + + info!("Daemon started tun interface"); + } + } + Ok(DaemonResponseData::None) + } + DaemonCommand::ServerInfo => match &self.tun_interface { + None => Ok(DaemonResponseData::None), + Some(ti) => { + info!("{:?}", ti); + Ok(DaemonResponseData::ServerInfo(ServerInfo::try_from( + ti.read().await.inner.get_ref(), + )?)) + } + }, + DaemonCommand::Stop => { + if self.tun_interface.is_some() { + self.tun_interface = None; + info!("Daemon stopping tun interface."); + } else { + warn!("Got stop, but tun interface is not up.") + } + Ok(DaemonResponseData::None) + } + DaemonCommand::ServerConfig => { + Ok(DaemonResponseData::ServerConfig(ServerConfig::default())) + } + } } - fn get_connection(&self) -> Result { - get_connection(self.db_path.as_deref()).map_err(proc_err) - } - - async fn set_wg_state(&self, state: RunState) -> Result<(), RspStatus> { - self.wg_state_chan.0.send(state).map_err(proc_err) - } - - async fn notify_network_update(&self) -> Result<(), RspStatus> { - self.network_update_chan.0.send(()).map_err(proc_err) - } - - async fn resolve_tunnel(&self) -> Result { - let conn = self.get_connection()?; - let networks = list_networks(&conn).map_err(proc_err)?; - ResolvedTunnel::from_networks(&networks).map_err(proc_err) - } - - async fn current_tunnel_configuration(&self) -> Result { - let config = { - let active = self.active_tunnel.read().await; - active - .as_ref() - .map(|tunnel| tunnel.server_config().clone()) - }; - let config = match config { - Some(config) => config, - None => self - .resolve_tunnel() - .await? - .server_config() - .map_err(proc_err)?, - }; - Ok(configuration_rsp(config)) - } - - async fn stop_active_tunnel(&self) -> Result { - let current = { self.active_tunnel.write().await.take() }; - let Some(current) = current else { - return Ok(false); - }; - - current - .shutdown(&self.tun_interface) - .await - .map_err(proc_err)?; - self.set_wg_state(RunState::Idle).await?; - Ok(true) - } - - async fn replace_active_tunnel(&self, desired: ResolvedTunnel) -> Result<(), RspStatus> { - let _ = self.stop_active_tunnel().await?; - let tailnet_helper = match &desired { - ResolvedTunnel::Tailnet { identity, config } => Some( - self.tailnet_login - .ensure_session(tailnet_helper_request(identity, config)) - .await - .map_err(proc_err)? - .helper, - ), - _ => None, - }; - let active = desired - .start(self.tun_interface.clone(), tailnet_helper) - .await - .map_err(proc_err)?; - self.active_tunnel.write().await.replace(active); - self.set_wg_state(RunState::Running).await?; + pub async fn run(&mut self) -> Result<()> { + while let Ok(command) = self.rx.recv().await { + let response = self.proc_command(command).await; + info!("Daemon response: {:?}", response); + self.sx.send(DaemonResponse::new(response)).await?; + } Ok(()) } - - async fn reconcile_runtime(&self) -> Result<(), RspStatus> { - let desired = self.resolve_tunnel().await?; - let needs_restart = { - let guard = self.active_tunnel.read().await; - guard - .as_ref() - .map(|active| active.identity() != desired.identity()) - .unwrap_or(false) - }; - - if needs_restart { - self.replace_active_tunnel(desired).await?; - } - - Ok(()) - } - - fn tailnet_bridge_request( - account_name: String, - identity_name: String, - hostname: String, - authority: String, - ) -> BridgeLoginStartRequest { - let mut request = BridgeLoginStartRequest { - account_name, - identity_name, - hostname: (!hostname.trim().is_empty()).then_some(hostname), - control_url: Self::tailnet_control_url(&authority), - packet_socket: None, - }; - request.packet_socket = Some(packet_socket_path(&request).display().to_string()); - request - } - - fn tailnet_control_url(authority: &str) -> Option { - let authority = discovery::normalize_authority(authority); - (!discovery::is_managed_tailscale_authority(&authority)).then_some(authority) - } -} - -#[tonic::async_trait] -impl Tunnel for DaemonRPCServer { - type TunnelConfigurationStream = ReceiverStream>; - type TunnelPacketsStream = ReceiverStream>; - type TunnelStatusStream = ReceiverStream>; - - async fn tunnel_configuration( - &self, - _request: Request, - ) -> Result, RspStatus> { - let (tx, rx) = mpsc::channel(10); - let server = self.clone(); - let mut sub = self.network_update_chan.1.clone(); - - tokio::spawn(async move { - loop { - let response = server.current_tunnel_configuration().await; - if tx.send(response).await.is_err() { - break; - } - if sub.changed().await.is_err() { - break; - } - } - }); - - Ok(Response::new(ReceiverStream::new(rx))) - } - - async fn tunnel_packets( - &self, - request: Request>, - ) -> Result, RspStatus> { - let (packet_tx, mut packet_rx) = { - let guard = self.active_tunnel.read().await; - let Some(active) = guard.as_ref() else { - return Err(RspStatus::failed_precondition("no active tunnel")); - }; - active.packet_stream().ok_or_else(|| { - RspStatus::failed_precondition( - "active tunnel does not support packet streaming", - ) - })? - }; - - let (tx, rx) = mpsc::channel(128); - tokio::spawn(async move { - loop { - match packet_rx.recv().await { - Ok(payload) => { - if tx.send(Ok(TunnelPacket { payload })).await.is_err() { - break; - } - } - Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => continue, - Err(tokio::sync::broadcast::error::RecvError::Closed) => break, - } - } - }); - - let mut inbound = request.into_inner(); - tokio::spawn(async move { - loop { - match inbound.message().await { - Ok(Some(packet)) => { - debug!( - "daemon tunnel packet stream received {} bytes from client", - packet.payload.len() - ); - if packet_tx.send(packet.payload).await.is_err() { - break; - } - } - Ok(None) => break, - Err(error) => { - warn!("tailnet packet stream receive error: {error}"); - break; - } - } - } - }); - - Ok(Response::new(ReceiverStream::new(rx))) - } - - async fn tunnel_start(&self, _request: Request) -> Result, RspStatus> { - let desired = self.resolve_tunnel().await?; - let already_running = { - let guard = self.active_tunnel.read().await; - guard - .as_ref() - .map(|active| active.identity() == desired.identity()) - .unwrap_or(false) - }; - - if already_running { - warn!("Got start, but active tunnel already matches desired network."); - return Ok(Response::new(Empty {})); - } - - self.replace_active_tunnel(desired).await?; - Ok(Response::new(Empty {})) - } - - async fn tunnel_stop(&self, _request: Request) -> Result, RspStatus> { - let _ = self.stop_active_tunnel().await?; - Ok(Response::new(Empty {})) - } - - async fn tunnel_status( - &self, - _request: Request, - ) -> Result, RspStatus> { - let (tx, rx) = mpsc::channel(10); - let mut state_rx = self.wg_state_chan.1.clone(); - tokio::spawn(async move { - let cur = state_rx.borrow_and_update().to_owned(); - if tx.send(Ok(status_rsp(cur))).await.is_err() { - return; - } - - loop { - if state_rx.changed().await.is_err() { - break; - } - let cur = state_rx.borrow().to_owned(); - if tx.send(Ok(status_rsp(cur))).await.is_err() { - break; - } - } - }); - Ok(Response::new(ReceiverStream::new(rx))) - } -} - -#[tonic::async_trait] -impl Networks for DaemonRPCServer { - type NetworkListStream = ReceiverStream>; - - async fn network_add(&self, request: Request) -> Result, RspStatus> { - let conn = self.get_connection()?; - let network = request.into_inner(); - add_network(&conn, &network).map_err(proc_err)?; - self.notify_network_update().await?; - self.reconcile_runtime().await?; - Ok(Response::new(Empty {})) - } - - async fn network_list( - &self, - _request: Request, - ) -> Result, RspStatus> { - let (tx, rx) = mpsc::channel(10); - let conn = self.get_connection()?; - let mut sub = self.network_update_chan.1.clone(); - tokio::spawn(async move { - loop { - let networks = list_networks(&conn) - .map(|res| NetworkListResponse { network: res }) - .map_err(proc_err); - if tx.send(networks).await.is_err() { - break; - } - if sub.changed().await.is_err() { - break; - } - } - }); - Ok(Response::new(ReceiverStream::new(rx))) - } - - async fn network_reorder( - &self, - request: Request, - ) -> Result, RspStatus> { - let conn = self.get_connection()?; - reorder_network(&conn, request.into_inner()).map_err(proc_err)?; - self.notify_network_update().await?; - self.reconcile_runtime().await?; - Ok(Response::new(Empty {})) - } - - async fn network_delete( - &self, - request: Request, - ) -> Result, RspStatus> { - let conn = self.get_connection()?; - delete_network(&conn, request.into_inner()).map_err(proc_err)?; - self.notify_network_update().await?; - self.reconcile_runtime().await?; - Ok(Response::new(Empty {})) - } -} - -#[tonic::async_trait] -impl TailnetControl for DaemonRPCServer { - async fn discover( - &self, - request: Request, - ) -> Result, RspStatus> { - let request = request.into_inner(); - info!(email = %request.email, "daemon tailnet discover RPC received"); - let discovery = discovery::discover_tailnet(&request.email) - .await - .map_err(proc_err)?; - info!( - email = %request.email, - authority = %discovery.authority, - provider = ?discovery.provider, - "daemon tailnet discover RPC resolved" - ); - - Ok(Response::new(TailnetDiscoverResponse { - domain: discovery.domain, - authority: discovery.authority.clone(), - oidc_issuer: discovery.oidc_issuer.unwrap_or_default(), - managed: matches!( - discovery::inferred_provider(Some(&discovery.authority), Some(&discovery.provider)), - crate::control::TailnetProvider::Tailscale - ), - })) - } - - async fn probe( - &self, - request: Request, - ) -> Result, RspStatus> { - let request = request.into_inner(); - let status = discovery::probe_tailnet_authority(&request.authority) - .await - .map_err(proc_err)?; - - Ok(Response::new(TailnetProbeResponse { - authority: status.authority, - status_code: status.status_code, - summary: status.summary, - detail: status.detail, - reachable: status.reachable, - })) - } - - async fn login_start( - &self, - request: Request, - ) -> Result, RspStatus> { - let request = request.into_inner(); - info!( - account = %request.account_name, - identity = %request.identity_name, - authority = %request.authority, - "daemon tailnet login start RPC received" - ); - let response = self - .tailnet_login - .start_login(Self::tailnet_bridge_request( - request.account_name, - request.identity_name, - request.hostname, - request.authority, - )) - .await - .map_err(proc_err)?; - - info!( - session_id = %response.session_id, - backend_state = %response.status.backend_state, - running = response.status.running, - needs_login = response.status.needs_login, - auth_url = ?response.status.auth_url, - "daemon tailnet login start RPC resolved" - ); - - Ok(Response::new(tailnet_login_rsp( - response.session_id, - response.status, - ))) - } - - async fn login_status( - &self, - request: Request, - ) -> Result, RspStatus> { - let request = request.into_inner(); - info!(session_id = %request.session_id, "daemon tailnet login status RPC received"); - let status = self - .tailnet_login - .status(&request.session_id) - .await - .map_err(proc_err)?; - let Some(status) = status else { - return Err(RspStatus::not_found("tailnet login session not found")); - }; - info!( - session_id = %request.session_id, - backend_state = %status.backend_state, - running = status.running, - needs_login = status.needs_login, - auth_url = ?status.auth_url, - "daemon tailnet login status RPC resolved" - ); - Ok(Response::new(tailnet_login_rsp(request.session_id, status))) - } - - async fn login_cancel( - &self, - request: Request, - ) -> Result, RspStatus> { - let request = request.into_inner(); - let canceled = self - .tailnet_login - .cancel(&request.session_id) - .await - .map_err(proc_err)?; - if !canceled { - return Err(RspStatus::not_found("tailnet login session not found")); - } - Ok(Response::new(Empty {})) - } -} - -fn proc_err(err: impl ToString) -> RspStatus { - RspStatus::internal(err.to_string()) -} - -fn configuration_rsp(config: ServerConfig) -> TunnelConfigurationResponse { - TunnelConfigurationResponse { - addresses: config.address, - mtu: config.mtu.unwrap_or(1000), - routes: config.routes, - dns_servers: config.dns_servers, - search_domains: config.search_domains, - include_default_route: config.include_default_route, - } -} - -fn status_rsp(state: RunState) -> TunnelStatusResponse { - TunnelStatusResponse { - state: state.to_rpc().into(), - start: None, // TODO: Add timestamp - } -} - -fn tailnet_login_rsp( - session_id: String, - status: TailscaleLoginStatus, -) -> super::rpc::grpc_defs::TailnetLoginStatusResponse { - super::rpc::grpc_defs::TailnetLoginStatusResponse { - session_id, - backend_state: status.backend_state, - auth_url: status.auth_url.unwrap_or_default(), - running: status.running, - needs_login: status.needs_login, - tailnet_name: status.tailnet_name.unwrap_or_default(), - magic_dns_suffix: status.magic_dns_suffix.unwrap_or_default(), - self_dns_name: status.self_dns_name.unwrap_or_default(), - tailnet_ips: status.tailscale_ips, - health: status.health, - } } diff --git a/burrow/src/daemon/mod.rs b/burrow/src/daemon/mod.rs index 724e3bb..2a971dd 100644 --- a/burrow/src/daemon/mod.rs +++ b/burrow/src/daemon/mod.rs @@ -1,281 +1,60 @@ use std::{path::Path, sync::Arc}; pub mod apple; +mod command; mod instance; mod net; -pub mod rpc; -mod runtime; +mod response; -use anyhow::{Error as AhError, Result}; -use instance::DaemonRPCServer; -pub use net::{get_socket_path, DaemonClient}; -pub use rpc::{DaemonCommand, DaemonResponseData, DaemonStartOptions}; -use tokio::{net::UnixListener, sync::Notify}; -use tokio_stream::wrappers::UnixListenerStream; -use tonic::transport::Server; -use tracing::info; +use anyhow::Result; +pub use command::{DaemonCommand, DaemonStartOptions}; +use instance::DaemonInstance; +pub use net::{DaemonClient, Listener}; +pub use response::{DaemonResponse, DaemonResponseData, ServerInfo}; +use tokio::sync::{Notify, RwLock}; +use tracing::{error, info}; -use crate::{ - daemon::rpc::grpc_defs::{ - networks_server::NetworksServer, tailnet_control_server::TailnetControlServer, - tunnel_server::TunnelServer, - }, - database::get_connection, -}; +use crate::wireguard::{Config, Interface}; -pub async fn daemon_main( - socket_path: Option<&Path>, - db_path: Option<&Path>, - notify_ready: Option>, -) -> Result<()> { - let _conn = get_connection(db_path)?; - let burrow_server = DaemonRPCServer::new(db_path)?; - let spp = socket_path.clone(); - let tmp = get_socket_path(); - let sock_path = spp.unwrap_or(Path::new(tmp.as_str())); - if sock_path.exists() { - std::fs::remove_file(sock_path)?; - } - let uds = UnixListener::bind(sock_path)?; - let serve_job = tokio::spawn(async move { - let uds_stream = UnixListenerStream::new(uds); - let tailnet_server = burrow_server.clone(); - let _srv = Server::builder() - .add_service(TunnelServer::new(burrow_server.clone())) - .add_service(NetworksServer::new(burrow_server)) - .add_service(TailnetControlServer::new(tailnet_server)) - .serve_with_incoming(uds_stream) - .await?; - Ok::<(), AhError>(()) - }); +pub async fn daemon_main(path: Option<&Path>, notify_ready: Option>) -> Result<()> { + let (commands_tx, commands_rx) = async_channel::unbounded(); + let (response_tx, response_rx) = async_channel::unbounded(); + let listener = if let Some(path) = path { + info!("Creating listener... {:?}", path); + Listener::new_with_path(commands_tx, response_rx, path) + } else { + info!("Creating listener..."); + Listener::new(commands_tx, response_rx) + }; if let Some(n) = notify_ready { - n.notify_one(); + n.notify_one() } + let listener = listener?; + + let config = Config::default(); + let iface: Interface = config.try_into()?; + let mut instance = DaemonInstance::new(commands_rx, response_tx, Arc::new(RwLock::new(iface))); info!("Starting daemon..."); - tokio::try_join!(serve_job) + let main_job = tokio::spawn(async move { + let result = instance.run().await; + if let Err(e) = result.as_ref() { + error!("Instance exited: {}", e); + } + result + }); + + let listener_job = tokio::spawn(async move { + let result = listener.run().await; + if let Err(e) = result.as_ref() { + error!("Listener exited: {}", e); + } + result + }); + + tokio::try_join!(main_job, listener_job) .map(|_| ()) .map_err(|e| e.into()) } - -#[cfg(test)] -mod tests { - use std::{ - path::PathBuf, - time::{SystemTime, UNIX_EPOCH}, - }; - - use anyhow::{anyhow, Result}; - use tokio::time::{timeout, Duration}; - - use super::*; - use crate::daemon::rpc::{ - client::BurrowClient, - grpc_defs::{ - Empty, Network, NetworkListResponse, NetworkReorderRequest, NetworkType, - TunnelConfigurationResponse, TunnelStatusResponse, - }, - }; - - #[tokio::test] - async fn daemon_tracks_network_priority_via_grpc() -> Result<()> { - let socket_path = temp_path("sock"); - let db_path = temp_path("sqlite3"); - let ready = Arc::new(Notify::new()); - - let daemon_ready = ready.clone(); - let daemon_socket_path = socket_path.clone(); - let daemon_db_path = db_path.clone(); - let daemon_task = tokio::spawn(async move { - daemon_main( - Some(daemon_socket_path.as_path()), - Some(daemon_db_path.as_path()), - Some(daemon_ready), - ) - .await - }); - - timeout(Duration::from_secs(5), ready.notified()).await?; - - let mut client = timeout( - Duration::from_secs(5), - BurrowClient::from_uds_path(&socket_path), - ) - .await??; - let mut config_stream = client - .tunnel_client - .tunnel_configuration(Empty {}) - .await? - .into_inner(); - let mut network_stream = client - .networks_client - .network_list(Empty {}) - .await? - .into_inner(); - let mut status_stream = client - .tunnel_client - .tunnel_status(Empty {}) - .await? - .into_inner(); - - let initial_config = next_configuration(&mut config_stream).await?; - assert!(initial_config.addresses.is_empty()); - assert_eq!(initial_config.mtu, 1500); - - let initial_networks = next_networks(&mut network_stream).await?; - assert!(initial_networks.network.is_empty()); - - let initial_status = next_status(&mut status_stream).await?; - assert_eq!( - initial_status.state(), - crate::daemon::rpc::grpc_defs::State::Stopped - ); - - client.tunnel_client.tunnel_start(Empty {}).await?; - - let passthrough_status = next_status(&mut status_stream).await?; - assert_eq!( - passthrough_status.state(), - crate::daemon::rpc::grpc_defs::State::Running - ); - - client.tunnel_client.tunnel_stop(Empty {}).await?; - - let stopped_status = next_status(&mut status_stream).await?; - assert_eq!( - stopped_status.state(), - crate::daemon::rpc::grpc_defs::State::Stopped - ); - - client - .networks_client - .network_add(Network { - id: 1, - r#type: NetworkType::WireGuard.into(), - payload: sample_wireguard_payload(), - }) - .await?; - - let networks_after_wg = next_networks(&mut network_stream).await?; - assert_eq!( - network_ids(&networks_after_wg), - vec![(1, NetworkType::WireGuard)] - ); - - let wireguard_config = next_configuration(&mut config_stream).await?; - assert_eq!( - wireguard_config.addresses, - vec!["10.8.0.2/32", "fd00::2/128"] - ); - assert_eq!(wireguard_config.mtu, 1420); - - client - .networks_client - .network_add(Network { - id: 2, - r#type: NetworkType::WireGuard.into(), - payload: sample_wireguard_payload_with("10.77.0.2/32", 1380), - }) - .await?; - - let networks_after_second_add = next_networks(&mut network_stream).await?; - assert_eq!( - network_ids(&networks_after_second_add), - vec![(1, NetworkType::WireGuard), (2, NetworkType::WireGuard)] - ); - - let still_wireguard = next_configuration(&mut config_stream).await?; - assert_eq!(still_wireguard.addresses, wireguard_config.addresses); - - client - .networks_client - .network_reorder(NetworkReorderRequest { id: 2, index: 0 }) - .await?; - - let networks_after_reorder = next_networks(&mut network_stream).await?; - assert_eq!( - network_ids(&networks_after_reorder), - vec![(2, NetworkType::WireGuard), (1, NetworkType::WireGuard)] - ); - - let second_wireguard_config = next_configuration(&mut config_stream).await?; - assert_eq!(second_wireguard_config.addresses, vec!["10.77.0.2/32"]); - assert_eq!(second_wireguard_config.mtu, 1380); - - daemon_task.abort(); - let _ = daemon_task.await; - cleanup_path(&socket_path); - cleanup_path(&db_path); - - Ok(()) - } - - fn temp_path(ext: &str) -> PathBuf { - let now = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("system time is after unix epoch") - .as_nanos(); - std::env::temp_dir().join(format!("burrow-daemon-test-{now}.{ext}")) - } - - fn cleanup_path(path: &Path) { - let _ = std::fs::remove_file(path); - } - - fn sample_wireguard_payload() -> Vec { - br#"[Interface] -PrivateKey = OEPVdomeLTxTIBvv3TYsJRge0Hp9NMiY0sIrhT8OWG8= -Address = 10.8.0.2/32, fd00::2/128 -ListenPort = 51820 -MTU = 1420 - -[Peer] -PublicKey = 8GaFjVO6c4luCHG4ONO+1bFG8tO+Zz5/Gy+Geht1USM= -PresharedKey = ha7j4BjD49sIzyF9SNlbueK0AMHghlj6+u0G3bzC698= -AllowedIPs = 0.0.0.0/0, ::/0 -Endpoint = wg.burrow.rs:51820 -"# - .to_vec() - } - - fn sample_wireguard_payload_with(address: &str, mtu: u16) -> Vec { - format!( - "[Interface]\nPrivateKey = OEPVdomeLTxTIBvv3TYsJRge0Hp9NMiY0sIrhT8OWG8=\nAddress = {address}\nListenPort = 51820\nMTU = {mtu}\n\n[Peer]\nPublicKey = 8GaFjVO6c4luCHG4ONO+1bFG8tO+Zz5/Gy+Geht1USM=\nPresharedKey = ha7j4BjD49sIzyF9SNlbueK0AMHghlj6+u0G3bzC698=\nAllowedIPs = 0.0.0.0/0, ::/0\nEndpoint = wg.burrow.rs:51820\n" - ) - .into_bytes() - } - - async fn next_configuration( - stream: &mut tonic::Streaming, - ) -> Result { - timeout(Duration::from_secs(5), stream.message()) - .await?? - .ok_or_else(|| anyhow!("configuration stream ended unexpectedly")) - } - - async fn next_networks( - stream: &mut tonic::Streaming, - ) -> Result { - timeout(Duration::from_secs(5), stream.message()) - .await?? - .ok_or_else(|| anyhow!("network stream ended unexpectedly")) - } - - async fn next_status( - stream: &mut tonic::Streaming, - ) -> Result { - timeout(Duration::from_secs(5), stream.message()) - .await?? - .ok_or_else(|| anyhow!("status stream ended unexpectedly")) - } - - fn network_ids(response: &NetworkListResponse) -> Vec<(i32, NetworkType)> { - response - .network - .iter() - .map(|network| (network.id, network.r#type())) - .collect() - } -} diff --git a/burrow/src/daemon/net/mod.rs b/burrow/src/daemon/net/mod.rs index eb45335..fe35bae 100644 --- a/burrow/src/daemon/net/mod.rs +++ b/burrow/src/daemon/net/mod.rs @@ -1,11 +1,21 @@ +use serde::{Deserialize, Serialize}; + +use super::DaemonCommand; + #[cfg(target_family = "unix")] mod unix; #[cfg(target_family = "unix")] -pub use unix::{get_socket_path, DaemonClient, Listener}; +pub use unix::{DaemonClient, Listener}; #[cfg(target_os = "windows")] mod windows; #[cfg(target_os = "windows")] pub use windows::{DaemonClient, Listener}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct DaemonRequest { + pub id: u64, + pub command: DaemonCommand, +} diff --git a/burrow/src/daemon/net/unix.rs b/burrow/src/daemon/net/unix.rs index f7f9433..26e901d 100644 --- a/burrow/src/daemon/net/unix.rs +++ b/burrow/src/daemon/net/unix.rs @@ -10,10 +10,8 @@ use tokio::{ }; use tracing::{debug, error, info}; -use crate::daemon::rpc::{ - DaemonCommand, DaemonMessage, DaemonNotification, DaemonRequest, DaemonResponse, - DaemonResponseData, -}; +use super::*; +use crate::daemon::{DaemonCommand, DaemonResponse, DaemonResponseData}; #[cfg(not(target_vendor = "apple"))] const UNIX_SOCKET_PATH: &str = "/run/burrow.sock"; @@ -21,18 +19,11 @@ const UNIX_SOCKET_PATH: &str = "/run/burrow.sock"; #[cfg(target_vendor = "apple")] const UNIX_SOCKET_PATH: &str = "burrow.sock"; -pub fn get_socket_path() -> String { - if std::env::var("BURROW_SOCKET_PATH").is_ok() { - return std::env::var("BURROW_SOCKET_PATH").unwrap(); - } - UNIX_SOCKET_PATH.to_string() -} - +#[derive(Debug)] pub struct Listener { cmd_tx: async_channel::Sender, rsp_rx: async_channel::Receiver, - sub_chan: async_channel::Receiver, - pub inner: UnixListener, + inner: UnixListener, } impl Listener { @@ -40,11 +31,9 @@ impl Listener { pub fn new( cmd_tx: async_channel::Sender, rsp_rx: async_channel::Receiver, - sub_chan: async_channel::Receiver, ) -> Self { - let socket_path = get_socket_path(); - let path = Path::new(OsStr::new(&socket_path)); - Self::new_with_path(cmd_tx, rsp_rx, sub_chan, path)? + let path = Path::new(OsStr::new(UNIX_SOCKET_PATH)); + Self::new_with_path(cmd_tx, rsp_rx, path)? } #[throws] @@ -52,16 +41,10 @@ impl Listener { pub fn new_with_path( cmd_tx: async_channel::Sender, rsp_rx: async_channel::Receiver, - sub_chan: async_channel::Receiver, path: &Path, ) -> Self { let inner = listener_from_path_or_fd(&path, raw_fd())?; - Self { - cmd_tx, - rsp_rx, - sub_chan, - inner, - } + Self { cmd_tx, rsp_rx, inner } } #[throws] @@ -69,16 +52,10 @@ impl Listener { pub fn new_with_path( cmd_tx: async_channel::Sender, rsp_rx: async_channel::Receiver, - sub_chan: async_channel::Receiver, path: &Path, ) -> Self { let inner = listener_from_path(path)?; - Self { - cmd_tx, - rsp_rx, - inner, - sub_chan, - } + Self { cmd_tx, rsp_rx, inner } } pub async fn run(&self) -> Result<()> { @@ -87,10 +64,9 @@ impl Listener { let (stream, _) = self.inner.accept().await?; let cmd_tx = self.cmd_tx.clone(); let rsp_rxc = self.rsp_rx.clone(); - let sub_chan = self.sub_chan.clone(); tokio::task::spawn(async move { info!("Got connection: {:?}", stream); - Self::stream(stream, cmd_tx, rsp_rxc, sub_chan).await; + Self::stream(stream, cmd_tx, rsp_rxc).await; }); } } @@ -99,46 +75,34 @@ impl Listener { stream: UnixStream, cmd_tx: async_channel::Sender, rsp_rxc: async_channel::Receiver, - sub_chan: async_channel::Receiver, ) { let mut stream = stream; let (mut read_stream, mut write_stream) = stream.split(); let buf_reader = BufReader::new(&mut read_stream); let mut lines = buf_reader.lines(); - loop { - tokio::select! { - Ok(Some(line)) = lines.next_line() => { - info!("Line: {}", line); - let mut res: DaemonResponse = DaemonResponseData::None.into(); - let req = match serde_json::from_str::(&line) { - Ok(req) => Some(req), - Err(e) => { - res.result = Err(e.to_string()); - error!("Failed to parse request: {}", e); - None - } - }; - - let res = serde_json::to_string(&DaemonMessage::from(res)).unwrap(); - - if let Some(req) = req { - cmd_tx.send(req.command).await.unwrap(); - let res = rsp_rxc.recv().await.unwrap().with_id(req.id); - let mut payload = serde_json::to_string(&DaemonMessage::from(res)).unwrap(); - payload.push('\n'); - info!("Sending response: {}", payload); - write_stream.write_all(payload.as_bytes()).await.unwrap(); - } else { - write_stream.write_all(res.as_bytes()).await.unwrap(); - } - } - Ok(cmd) = sub_chan.recv() => { - info!("Got subscription command: {:?}", cmd); - let msg = DaemonMessage::from(cmd); - let mut payload = serde_json::to_string(&msg).unwrap(); - payload.push('\n'); - write_stream.write_all(payload.as_bytes()).await.unwrap(); + while let Ok(Some(line)) = lines.next_line().await { + info!("Line: {}", line); + let mut res: DaemonResponse = DaemonResponseData::None.into(); + let req = match serde_json::from_str::(&line) { + Ok(req) => Some(req), + Err(e) => { + res.result = Err(e.to_string()); + error!("Failed to parse request: {}", e); + None } + }; + let mut res = serde_json::to_string(&res).unwrap(); + res.push('\n'); + + if let Some(req) = req { + cmd_tx.send(req.command).await.unwrap(); + let res = rsp_rxc.recv().await.unwrap().with_id(req.id); + let mut retres = serde_json::to_string(&res).unwrap(); + retres.push('\n'); + info!("Sending response: {}", retres); + write_stream.write_all(retres.as_bytes()).await.unwrap(); + } else { + write_stream.write_all(res.as_bytes()).await.unwrap(); } } } @@ -212,8 +176,7 @@ pub struct DaemonClient { impl DaemonClient { pub async fn new() -> Result { - let socket_path = get_socket_path(); - let path = Path::new(OsStr::new(&socket_path)); + let path = Path::new(OsStr::new(UNIX_SOCKET_PATH)); Self::new_with_path(path).await } diff --git a/burrow/src/daemon/rpc/response.rs b/burrow/src/daemon/response.rs similarity index 69% rename from burrow/src/daemon/rpc/response.rs rename to burrow/src/daemon/response.rs index 6d03581..37ee5d9 100644 --- a/burrow/src/daemon/rpc/response.rs +++ b/burrow/src/daemon/response.rs @@ -2,8 +2,6 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use tun::TunInterface; -use crate::wireguard::Config; - #[derive(Clone, Serialize, Deserialize, Debug, JsonSchema)] pub struct DaemonResponse { // Error types can't be serialized, so this is the second best option. @@ -36,8 +34,6 @@ impl DaemonResponse { pub struct ServerInfo { pub name: Option, pub ip: Option, - #[serde(default)] - pub ipv6: Vec, pub mtu: Option, } @@ -49,12 +45,6 @@ impl TryFrom<&TunInterface> for ServerInfo { Ok(ServerInfo { name: server.name().ok(), ip: server.ipv4_addr().ok().map(|ip| ip.to_string()), - ipv6: server - .ipv6_addrs() - .unwrap_or_default() - .into_iter() - .map(|ip| ip.to_string()) - .collect(), mtu: server.mtu().ok(), }) } @@ -68,46 +58,14 @@ impl TryFrom<&TunInterface> for ServerInfo { #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct ServerConfig { pub address: Vec, - #[serde(default)] - pub routes: Vec, - #[serde(default)] - pub dns_servers: Vec, - #[serde(default)] - pub search_domains: Vec, - #[serde(default)] - pub include_default_route: bool, pub name: Option, pub mtu: Option, } -impl TryFrom<&Config> for ServerConfig { - type Error = anyhow::Error; - - fn try_from(config: &Config) -> anyhow::Result { - Ok(ServerConfig { - address: config.interface.address.clone(), - routes: config - .peers - .iter() - .flat_map(|peer| peer.allowed_ips.iter().cloned()) - .collect(), - dns_servers: config.interface.dns.clone(), - search_domains: Vec::new(), - include_default_route: false, - name: None, - mtu: config.interface.mtu.map(|mtu| mtu as i32), - }) - } -} - impl Default for ServerConfig { fn default() -> Self { Self { address: vec!["10.13.13.2".to_string()], // Dummy remote address - routes: Vec::new(), - dns_servers: Vec::new(), - search_domains: Vec::new(), - include_default_route: false, name: None, mtu: None, } @@ -115,7 +73,6 @@ impl Default for ServerConfig { } #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -#[serde(tag = "type")] pub enum DaemonResponseData { ServerInfo(ServerInfo), ServerConfig(ServerConfig), @@ -137,7 +94,6 @@ fn test_response_serialization() -> anyhow::Result<()> { DaemonResponseData::ServerInfo(ServerInfo { name: Some("burrow".to_string()), ip: None, - ipv6: Vec::new(), mtu: Some(1500) }) )))?); diff --git a/burrow/src/daemon/rpc/client.rs b/burrow/src/daemon/rpc/client.rs deleted file mode 100644 index aa84c64..0000000 --- a/burrow/src/daemon/rpc/client.rs +++ /dev/null @@ -1,46 +0,0 @@ -use anyhow::Result; -use hyper_util::rt::TokioIo; -use std::path::Path; -use tokio::net::UnixStream; -use tonic::transport::{Endpoint, Uri}; -use tower::service_fn; - -use super::grpc_defs::{ - networks_client::NetworksClient, tailnet_control_client::TailnetControlClient, - tunnel_client::TunnelClient, -}; -use crate::daemon::get_socket_path; - -pub struct BurrowClient { - pub networks_client: NetworksClient, - pub tailnet_client: TailnetControlClient, - pub tunnel_client: TunnelClient, -} - -impl BurrowClient { - #[cfg(any(target_os = "linux", target_vendor = "apple"))] - pub async fn from_uds() -> Result { - Self::from_uds_path(get_socket_path()).await - } - - #[cfg(any(target_os = "linux", target_vendor = "apple"))] - pub async fn from_uds_path(path: impl AsRef) -> Result { - let socket_path = path.as_ref().to_owned(); - let channel = Endpoint::try_from("http://[::]:50051")? // NOTE: this is a hack(?) - .connect_with_connector(service_fn(move |_: Uri| { - let socket_path = socket_path.clone(); - async move { - Ok::<_, std::io::Error>(TokioIo::new(UnixStream::connect(&socket_path).await?)) - } - })) - .await?; - let nw_client = NetworksClient::new(channel.clone()); - let tailnet_client = TailnetControlClient::new(channel.clone()); - let tun_client = TunnelClient::new(channel.clone()); - Ok(BurrowClient { - networks_client: nw_client, - tailnet_client, - tunnel_client: tun_client, - }) - } -} diff --git a/burrow/src/daemon/rpc/grpc_defs.rs b/burrow/src/daemon/rpc/grpc_defs.rs deleted file mode 100644 index f3085ee..0000000 --- a/burrow/src/daemon/rpc/grpc_defs.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub use burrowgrpc::*; - -mod burrowgrpc { - tonic::include_proto!("burrow"); -} diff --git a/burrow/src/daemon/rpc/mod.rs b/burrow/src/daemon/rpc/mod.rs deleted file mode 100644 index 512662c..0000000 --- a/burrow/src/daemon/rpc/mod.rs +++ /dev/null @@ -1,43 +0,0 @@ -pub mod client; -pub mod grpc_defs; -pub mod notification; -pub mod request; -pub mod response; - -pub use client::BurrowClient; -pub use notification::DaemonNotification; -pub use request::{DaemonCommand, DaemonRequest, DaemonStartOptions}; -pub use response::{DaemonResponse, DaemonResponseData, ServerConfig, ServerInfo}; -use serde::{Deserialize, Serialize}; - -/// The `Message` object contains either a `DaemonRequest` or a `DaemonResponse` to be serialized / deserialized -/// for our IPC communication. Our IPC protocol is based on jsonrpc (https://www.jsonrpc.org/specification#overview), -/// but deviates from it in a few ways: -/// - We differentiate Notifications from Requests explicitly. -/// - We have a "type" field to differentiate between a request, a response, and a notification. -/// - The params field may receive any json value(such as a string), not just an object or an array. -#[derive(Serialize, Deserialize)] -#[serde(tag = "type")] -pub enum DaemonMessage { - Request(DaemonRequest), - Response(DaemonResponse), - Notification(DaemonNotification), -} - -impl From for DaemonMessage { - fn from(request: DaemonRequest) -> Self { - DaemonMessage::Request(request) - } -} - -impl From for DaemonMessage { - fn from(response: DaemonResponse) -> Self { - DaemonMessage::Response(response) - } -} - -impl From for DaemonMessage { - fn from(notification: DaemonNotification) -> Self { - DaemonMessage::Notification(notification) - } -} diff --git a/burrow/src/daemon/rpc/notification.rs b/burrow/src/daemon/rpc/notification.rs deleted file mode 100644 index 135b0e4..0000000 --- a/burrow/src/daemon/rpc/notification.rs +++ /dev/null @@ -1,11 +0,0 @@ -use rpc::ServerConfig; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; - -use crate::daemon::rpc; - -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] -#[serde(tag = "method", content = "params")] -pub enum DaemonNotification { - ConfigChange(ServerConfig), -} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-2.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-2.snap deleted file mode 100644 index 01ec8a7..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-2.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/request.rs -expression: "serde_json::to_string(&DaemonCommand::Start(DaemonStartOptions {\n tun: TunOptions { ..TunOptions::default() },\n })).unwrap()" ---- -{"method":"Start","params":{"tun":{"name":null,"no_pi":false,"tun_excl":false,"tun_retrieve":false,"address":[]}}} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-3.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-3.snap deleted file mode 100644 index a6a0466..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-3.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/request.rs -expression: "serde_json::to_string(&DaemonCommand::ServerInfo).unwrap()" ---- -{"method":"ServerInfo"} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-4.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-4.snap deleted file mode 100644 index f930051..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-4.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/request.rs -expression: "serde_json::to_string(&DaemonCommand::Stop).unwrap()" ---- -{"method":"Stop"} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-5.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-5.snap deleted file mode 100644 index 89dc42c..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization-5.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/request.rs -expression: "serde_json::to_string(&DaemonCommand::ServerConfig).unwrap()" ---- -{"method":"ServerConfig"} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization.snap deleted file mode 100644 index aeca659..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__request__daemoncommand_serialization.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/request.rs -expression: "serde_json::to_string(&DaemonCommand::Start(DaemonStartOptions::default())).unwrap()" ---- -{"method":"Start","params":{"tun":{"name":null,"no_pi":false,"tun_excl":false,"tun_retrieve":false,"address":[]}}} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-2.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-2.snap deleted file mode 100644 index 76aa944..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-2.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/response.rs -expression: "serde_json::to_string(&DaemonResponse::new(Ok::(DaemonResponseData::ServerInfo(ServerInfo {\n name: Some(\"burrow\".to_string()),\n ip: None,\n ipv6: Vec::new(),\n mtu: Some(1500),\n }))))?" ---- -{"result":{"Ok":{"type":"ServerInfo","name":"burrow","ip":null,"ipv6":[],"mtu":1500}},"id":0} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-3.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-3.snap deleted file mode 100644 index 30068f3..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-3.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/response.rs -expression: "serde_json::to_string(&DaemonResponse::new(Err::(\"error\".to_string())))?" ---- -{"result":{"Err":"error"},"id":0} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-4.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-4.snap deleted file mode 100644 index 68b4195..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization-4.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/response.rs -expression: "serde_json::to_string(&DaemonResponse::new(Ok::(DaemonResponseData::ServerConfig(ServerConfig::default()))))?" ---- -{"result":{"Ok":{"type":"ServerConfig","address":["10.13.13.2"],"routes":[],"dns_servers":[],"search_domains":[],"include_default_route":false,"name":null,"mtu":null}},"id":0} diff --git a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization.snap b/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization.snap deleted file mode 100644 index 31bd84b..0000000 --- a/burrow/src/daemon/rpc/snapshots/burrow__daemon__rpc__response__response_serialization.snap +++ /dev/null @@ -1,5 +0,0 @@ ---- -source: burrow/src/daemon/rpc/response.rs -expression: "serde_json::to_string(&DaemonResponse::new(Ok::(DaemonResponseData::None)))?" ---- -{"result":{"Ok":{"type":"None"}},"id":0} diff --git a/burrow/src/daemon/runtime.rs b/burrow/src/daemon/runtime.rs deleted file mode 100644 index 31821a2..0000000 --- a/burrow/src/daemon/runtime.rs +++ /dev/null @@ -1,618 +0,0 @@ -use std::{path::PathBuf, sync::Arc}; - -use anyhow::{bail, Context, Result}; -use tokio::{ - io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}, - net::UnixStream, - sync::{broadcast, mpsc, RwLock}, - task::JoinHandle, - time::{sleep, Duration}, -}; -use tun::{tokio::TunInterface, TunOptions}; - -use super::rpc::{ - grpc_defs::{Network, NetworkType}, - ServerConfig, -}; -use crate::{ - auth::server::tailscale::{ - default_hostname, packet_socket_path, spawn_tailscale_helper, TailscaleHelperProcess, - TailscaleLoginStartRequest, TailscaleLoginStatus, - }, - control::{discovery, TailnetConfig}, - wireguard::{Config, Interface as WireGuardInterface}, -}; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum RuntimeIdentity { - Passthrough, - Network { - id: i32, - network_type: NetworkType, - payload: Vec, - }, -} - -#[derive(Clone, Debug)] -pub enum ResolvedTunnel { - Passthrough { - identity: RuntimeIdentity, - }, - Tailnet { - identity: RuntimeIdentity, - config: TailnetConfig, - }, - WireGuard { - identity: RuntimeIdentity, - config: Config, - }, -} - -impl ResolvedTunnel { - pub fn from_networks(networks: &[Network]) -> Result { - let Some(network) = networks.first() else { - return Ok(Self::Passthrough { - identity: RuntimeIdentity::Passthrough, - }); - }; - - let identity = RuntimeIdentity::Network { - id: network.id, - network_type: network.r#type(), - payload: network.payload.clone(), - }; - - match network.r#type() { - NetworkType::Tailnet => { - let config = TailnetConfig::from_slice(&network.payload)?; - Ok(Self::Tailnet { identity, config }) - } - NetworkType::WireGuard => { - let payload = String::from_utf8(network.payload.clone()) - .context("wireguard payload must be valid UTF-8")?; - let config = Config::from_content_fmt(&payload, "ini")?; - Ok(Self::WireGuard { identity, config }) - } - } - } - - pub fn identity(&self) -> &RuntimeIdentity { - match self { - Self::Passthrough { identity } - | Self::Tailnet { identity, .. } - | Self::WireGuard { identity, .. } => identity, - } - } - - pub fn server_config(&self) -> Result { - match self { - Self::Passthrough { .. } => Ok(ServerConfig { - address: Vec::new(), - routes: Vec::new(), - dns_servers: Vec::new(), - search_domains: Vec::new(), - include_default_route: false, - name: None, - mtu: Some(1500), - }), - Self::Tailnet { .. } => Ok(ServerConfig { - address: Vec::new(), - routes: tailnet_routes(), - dns_servers: tailnet_dns_servers(), - search_domains: Vec::new(), - include_default_route: false, - name: None, - mtu: Some(1280), - }), - Self::WireGuard { config, .. } => ServerConfig::try_from(config), - } - } - - pub async fn start( - self, - tun_interface: Arc>>, - tailnet_helper: Option>, - ) -> Result { - match self { - Self::Passthrough { identity } => Ok(ActiveTunnel::Passthrough { - identity, - server_config: ServerConfig { - address: Vec::new(), - routes: Vec::new(), - dns_servers: Vec::new(), - search_domains: Vec::new(), - include_default_route: false, - name: None, - mtu: Some(1500), - }, - }), - Self::Tailnet { identity, config } => { - let (helper, shutdown_helper_on_stop) = match tailnet_helper { - Some(helper) => (helper, false), - None => { - let helper_request = tailnet_helper_request(&identity, &config); - let helper = Arc::new(spawn_tailscale_helper(&helper_request).await?); - (helper, true) - } - }; - let status = wait_for_tailnet_ready(helper.as_ref()).await?; - let server_config = tailnet_server_config(&status); - let packet_socket = helper - .packet_socket() - .map(PathBuf::from) - .ok_or_else(|| anyhow::anyhow!("tailnet helper did not report a packet socket"))?; - let packet_bridge = connect_tailnet_packet_bridge(packet_socket).await?; - #[cfg(target_vendor = "apple")] - let tun_task = None; - #[cfg(not(target_vendor = "apple"))] - let tun_task = { - let tun = TunOptions::new().open()?; - tun_interface.write().await.replace(tun); - Some(tokio::spawn(run_tailnet_tun_bridge( - tun_interface.clone(), - packet_bridge.outbound_sender(), - packet_bridge.subscribe(), - ))) - }; - - Ok(ActiveTunnel::Tailnet { - identity, - server_config, - helper, - shutdown_helper_on_stop, - packet_bridge, - tun_task, - }) - } - Self::WireGuard { identity, config } => { - let server_config = ServerConfig::try_from(&config)?; - let tun = TunOptions::new().open()?; - tun_interface.write().await.replace(tun); - - match start_wireguard_runtime(config, tun_interface.clone()).await { - Ok((interface, task)) => Ok(ActiveTunnel::WireGuard { - identity, - server_config, - interface, - task, - }), - Err(err) => { - tun_interface.write().await.take(); - Err(err) - } - } - } - } - } -} - -pub enum ActiveTunnel { - Passthrough { - identity: RuntimeIdentity, - server_config: ServerConfig, - }, - Tailnet { - identity: RuntimeIdentity, - server_config: ServerConfig, - helper: Arc, - shutdown_helper_on_stop: bool, - packet_bridge: TailnetPacketBridge, - tun_task: Option>>, - }, - WireGuard { - identity: RuntimeIdentity, - server_config: ServerConfig, - interface: Arc>, - task: JoinHandle>, - }, -} - -impl ActiveTunnel { - pub fn identity(&self) -> &RuntimeIdentity { - match self { - Self::Passthrough { identity, .. } - | Self::Tailnet { identity, .. } - | Self::WireGuard { identity, .. } => identity, - } - } - - pub fn server_config(&self) -> &ServerConfig { - match self { - Self::Passthrough { server_config, .. } - | Self::Tailnet { server_config, .. } - | Self::WireGuard { server_config, .. } => server_config, - } - } - - pub fn packet_stream( - &self, - ) -> Option<(mpsc::Sender>, broadcast::Receiver>)> { - match self { - Self::Tailnet { packet_bridge, .. } => Some(( - packet_bridge.outbound_sender(), - packet_bridge.subscribe(), - )), - _ => None, - } - } - - pub async fn shutdown(self, tun_interface: &Arc>>) -> Result<()> { - match self { - Self::Passthrough { .. } => Ok(()), - Self::Tailnet { - helper, - shutdown_helper_on_stop, - packet_bridge, - tun_task, - .. - } => { - if let Some(tun_task) = tun_task { - tun_task.abort(); - match tun_task.await { - Ok(Ok(())) => {} - Ok(Err(err)) => return Err(err), - Err(err) if err.is_cancelled() => {} - Err(err) => return Err(err.into()), - } - } - packet_bridge.task.abort(); - match packet_bridge.task.await { - Ok(Ok(())) => {} - Ok(Err(err)) => return Err(err), - Err(err) if err.is_cancelled() => {} - Err(err) => return Err(err.into()), - } - tun_interface.write().await.take(); - if shutdown_helper_on_stop { - helper.shutdown().await?; - } - Ok(()) - } - Self::WireGuard { - interface, - task, - .. - } => { - interface.read().await.remove_tun().await; - let task_result = task.await; - tun_interface.write().await.take(); - task_result??; - Ok(()) - } - } - } -} - -pub struct TailnetPacketBridge { - outbound: mpsc::Sender>, - inbound: broadcast::Sender>, - task: JoinHandle>, -} - -impl TailnetPacketBridge { - fn outbound_sender(&self) -> mpsc::Sender> { - self.outbound.clone() - } - - fn subscribe(&self) -> broadcast::Receiver> { - self.inbound.subscribe() - } -} - -async fn start_wireguard_runtime( - config: Config, - tun_interface: Arc>>, -) -> Result<(Arc>, JoinHandle>)> { - let mut interface: WireGuardInterface = config.try_into()?; - interface.set_tun_ref(tun_interface).await; - let interface = Arc::new(RwLock::new(interface)); - let run_interface = interface.clone(); - let task = tokio::spawn(async move { - let guard = run_interface.read().await; - guard.run().await - }); - Ok((interface, task)) -} - -pub(crate) fn tailnet_helper_request( - identity: &RuntimeIdentity, - config: &TailnetConfig, -) -> TailscaleLoginStartRequest { - let account_name = config - .account - .as_deref() - .filter(|value| !value.trim().is_empty()) - .unwrap_or("default") - .to_owned(); - let identity_name = config - .identity - .as_deref() - .filter(|value| !value.trim().is_empty()) - .map(ToOwned::to_owned) - .unwrap_or_else(|| match identity { - RuntimeIdentity::Network { id, .. } => format!("network-{id}"), - RuntimeIdentity::Passthrough => "apple".to_owned(), - }); - let control_url = config.authority.as_deref().and_then(|authority| { - let authority = discovery::normalize_authority(authority); - (!discovery::is_managed_tailscale_authority(&authority)).then_some(authority) - }); - - let mut request = TailscaleLoginStartRequest { - account_name, - identity_name, - hostname: config.hostname.clone(), - control_url, - packet_socket: None, - }; - request.packet_socket = Some(packet_socket_path(&request).display().to_string()); - if request - .hostname - .as_deref() - .map(|value| value.trim().is_empty()) - .unwrap_or(true) - { - request.hostname = Some(default_hostname(&request)); - } - request -} - -async fn wait_for_tailnet_ready(helper: &TailscaleHelperProcess) -> Result { - let mut last_status = None; - for _ in 0..120 { - let status = helper.status().await?; - if status.running && !status.tailscale_ips.is_empty() { - return Ok(status); - } - if status.needs_login || status.auth_url.is_some() { - bail!("tailnet runtime requires a completed login before the tunnel can start"); - } - last_status = Some(status); - sleep(Duration::from_millis(250)).await; - } - - if let Some(status) = last_status { - bail!( - "tailnet helper never became ready (backend_state={})", - status.backend_state - ); - } - bail!("tailnet helper never produced a status update") -} - -fn tailnet_server_config(status: &TailscaleLoginStatus) -> ServerConfig { - let mut search_domains = Vec::new(); - if let Some(suffix) = status.magic_dns_suffix.as_deref() { - let suffix = suffix.trim().trim_end_matches('.'); - if !suffix.is_empty() { - search_domains.push(suffix.to_owned()); - } - } - - ServerConfig { - address: status - .tailscale_ips - .iter() - .map(|ip| tailnet_cidr(ip)) - .collect(), - routes: tailnet_routes(), - dns_servers: tailnet_dns_servers(), - search_domains, - include_default_route: false, - name: status.self_dns_name.clone(), - mtu: Some(1280), - } -} - -fn tailnet_routes() -> Vec { - vec!["100.64.0.0/10".to_owned(), "fd7a:115c:a1e0::/48".to_owned()] -} - -fn tailnet_dns_servers() -> Vec { - vec!["100.100.100.100".to_owned()] -} - -fn tailnet_cidr(ip: &str) -> String { - if ip.contains('/') { - return ip.to_owned(); - } - if ip.contains(':') { - format!("{ip}/128") - } else { - format!("{ip}/32") - } -} - -async fn connect_tailnet_packet_bridge(packet_socket: PathBuf) -> Result { - let mut last_error = None; - let mut stream = None; - for _ in 0..50 { - match UnixStream::connect(&packet_socket).await { - Ok(connected) => { - stream = Some(connected); - break; - } - Err(err) => { - last_error = Some(err); - sleep(Duration::from_millis(100)).await; - } - } - } - let stream = if let Some(stream) = stream { - stream - } else { - return Err(last_error - .context("failed to connect to tailnet helper packet socket")? - .into()); - }; - - let (outbound_tx, outbound_rx) = mpsc::channel(128); - let (inbound_tx, _) = broadcast::channel(128); - let task = tokio::spawn(run_tailnet_socket_bridge( - stream, - outbound_rx, - inbound_tx.clone(), - )); - - Ok(TailnetPacketBridge { - outbound: outbound_tx, - inbound: inbound_tx, - task, - }) -} - -async fn run_tailnet_socket_bridge( - stream: UnixStream, - mut outbound_rx: mpsc::Receiver>, - inbound_tx: broadcast::Sender>, -) -> Result<()> { - let (mut reader, mut writer) = stream.into_split(); - - let inbound = tokio::spawn(async move { - loop { - let packet = read_packet_frame(&mut reader).await?; - tracing::debug!( - "tailnet packet bridge received {} bytes from helper socket", - packet.len() - ); - let _ = inbound_tx.send(packet); - } - #[allow(unreachable_code)] - Result::<()>::Ok(()) - }); - - let outbound = tokio::spawn(async move { - while let Some(packet) = outbound_rx.recv().await { - tracing::debug!( - "tailnet packet bridge writing {} bytes to helper socket", - packet.len() - ); - write_packet_frame(&mut writer, &packet).await?; - } - Result::<()>::Ok(()) - }); - - let (inbound_result, outbound_result) = tokio::try_join!(inbound, outbound)?; - inbound_result?; - outbound_result?; - Ok(()) -} - -#[cfg(not(target_vendor = "apple"))] -async fn run_tailnet_tun_bridge( - tun_interface: Arc>>, - outbound_tx: mpsc::Sender>, - mut inbound_rx: broadcast::Receiver>, -) -> Result<()> { - let inbound_tun = tun_interface.clone(); - let inbound = tokio::spawn(async move { - loop { - let packet = match inbound_rx.recv().await { - Ok(packet) => packet, - Err(broadcast::error::RecvError::Lagged(_)) => continue, - Err(broadcast::error::RecvError::Closed) => break, - }; - let guard = inbound_tun.read().await; - let Some(tun) = guard.as_ref() else { - bail!("tailnet tun interface unavailable"); - }; - tun.send(&packet) - .await - .context("failed to write tailnet packet to tun")?; - } - Result::<()>::Ok(()) - }); - - let outbound_tun = tun_interface.clone(); - let outbound = tokio::spawn(async move { - let mut buf = vec![0u8; 65_535]; - loop { - let len = { - let guard = outbound_tun.read().await; - let Some(tun) = guard.as_ref() else { - bail!("tailnet tun interface unavailable"); - }; - tun.recv(&mut buf) - .await - .context("failed to read packet from tailnet tun")? - }; - outbound_tx - .send(buf[..len].to_vec()) - .await - .context("failed to forward packet to tailnet helper")?; - } - #[allow(unreachable_code)] - Result::<()>::Ok(()) - }); - - let (inbound_result, outbound_result) = tokio::try_join!(inbound, outbound)?; - inbound_result?; - outbound_result?; - Ok(()) -} - -async fn read_packet_frame(reader: &mut R) -> Result> -where - R: AsyncRead + Unpin, -{ - let mut len_buf = [0u8; 4]; - reader - .read_exact(&mut len_buf) - .await - .context("failed to read tailnet packet frame length")?; - let len = u32::from_be_bytes(len_buf) as usize; - let mut packet = vec![0u8; len]; - reader - .read_exact(&mut packet) - .await - .context("failed to read tailnet packet frame payload")?; - Ok(packet) -} - -async fn write_packet_frame(writer: &mut W, packet: &[u8]) -> Result<()> -where - W: AsyncWrite + Unpin, -{ - writer - .write_all(&(packet.len() as u32).to_be_bytes()) - .await - .context("failed to write tailnet packet frame length")?; - writer - .write_all(packet) - .await - .context("failed to write tailnet packet frame payload")?; - writer - .flush() - .await - .context("failed to flush tailnet packet frame") -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn no_networks_resolve_to_passthrough() { - let resolved = ResolvedTunnel::from_networks(&[]).unwrap(); - assert_eq!(resolved.identity(), &RuntimeIdentity::Passthrough); - assert_eq!( - resolved.server_config().unwrap().address, - Vec::::new() - ); - } - - #[test] - fn tailnet_server_config_uses_host_prefixes() { - let status = TailscaleLoginStatus { - running: true, - tailscale_ips: vec!["100.101.102.103".to_owned(), "fd7a:115c:a1e0::123".to_owned()], - ..Default::default() - }; - let config = tailnet_server_config(&status); - assert_eq!( - config.address, - vec!["100.101.102.103/32", "fd7a:115c:a1e0::123/128"] - ); - assert_eq!(config.mtu, Some(1280)); - } -} diff --git a/burrow/src/daemon/snapshots/burrow__daemon__response__response_serialization-2.snap b/burrow/src/daemon/snapshots/burrow__daemon__response__response_serialization-2.snap index 20988bf..3787cd1 100644 --- a/burrow/src/daemon/snapshots/burrow__daemon__response__response_serialization-2.snap +++ b/burrow/src/daemon/snapshots/burrow__daemon__response__response_serialization-2.snap @@ -1,5 +1,5 @@ --- source: burrow/src/daemon/response.rs -expression: "serde_json::to_string(&DaemonResponse::new(Ok::(DaemonResponseData::ServerInfo(ServerInfo {\n name: Some(\"burrow\".to_string()),\n ip: None,\n ipv6: Vec::new(),\n mtu: Some(1500),\n }))))?" +expression: "serde_json::to_string(&DaemonResponse::new(Ok::(DaemonResponseData::ServerInfo(ServerInfo {\n name: Some(\"burrow\".to_string()),\n ip: None,\n mtu: Some(1500),\n }))))?" --- -{"result":{"Ok":{"ServerInfo":{"name":"burrow","ip":null,"ipv6":[],"mtu":1500}}},"id":0} +{"result":{"Ok":{"ServerInfo":{"name":"burrow","ip":null,"mtu":1500}}},"id":0} diff --git a/burrow/src/database.rs b/burrow/src/database.rs deleted file mode 100644 index fe9a3c7..0000000 --- a/burrow/src/database.rs +++ /dev/null @@ -1,409 +0,0 @@ -use std::path::Path; - -use anyhow::Result; -use rusqlite::{params, Connection}; - -use crate::{ - control::TailnetConfig, - daemon::rpc::grpc_defs::{ - Network as RPCNetwork, NetworkDeleteRequest, NetworkReorderRequest, NetworkType, - }, - wireguard::config::{Config, Interface, Peer}, -}; - -#[cfg(target_vendor = "apple")] -const DB_PATH: &str = "burrow.db"; - -#[cfg(not(target_vendor = "apple"))] -const DB_PATH: &str = "/var/lib/burrow/burrow.db"; - -const CREATE_WG_INTERFACE_TABLE: &str = "CREATE TABLE IF NOT EXISTS wg_interface ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT, - listen_port INTEGER, - mtu INTEGER, - private_key TEXT NOT NULL, - address TEXT NOT NULL, - dns TEXT NOT NULL -)"; - -const CREATE_WG_PEER_TABLE: &str = "CREATE TABLE IF NOT EXISTS wg_peer ( - interface_id INT REFERENCES wg_interface(id) ON UPDATE CASCADE, - endpoint TEXT NOT NULL, - public_key TEXT NOT NULL, - allowed_ips TEXT NOT NULL, - preshared_key TEXT -)"; - -const CREATE_NETWORK_TABLE: &str = "CREATE TABLE IF NOT EXISTS network ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - type TEXT NOT NULL, - payload BLOB, - idx INTEGER, - interface_id INT REFERENCES wg_interface(id) ON UPDATE CASCADE -); -CREATE TRIGGER IF NOT EXISTS increment_network_idx -AFTER INSERT ON network -BEGIN - UPDATE network - SET idx = (SELECT COALESCE(MAX(idx), 0) + 1 FROM network) - WHERE id = NEW.id; -END; -"; - -pub fn initialize_tables(conn: &Connection) -> Result<()> { - conn.execute(CREATE_WG_INTERFACE_TABLE, [])?; - conn.execute(CREATE_WG_PEER_TABLE, [])?; - conn.execute_batch(CREATE_NETWORK_TABLE)?; - Ok(()) -} - -pub fn load_interface(conn: &Connection, interface_id: &str) -> Result { - let iface = conn.query_row( - "SELECT private_key, dns, address, listen_port, mtu FROM wg_interface WHERE id = ?", - [&interface_id], - |row| { - let dns_rw: String = row.get(1)?; - let dns = parse_lst(&dns_rw); - let address_rw: String = row.get(2)?; - let address = parse_lst(&address_rw); - Ok(Interface { - private_key: row.get(0)?, - dns, - address, - mtu: row.get(4)?, - listen_port: row.get(3)?, - }) - }, - )?; - let mut peers_stmt = conn.prepare("SELECT public_key, preshared_key, allowed_ips, endpoint FROM wg_peer WHERE interface_id = ?")?; - let peers = peers_stmt - .query_map([&interface_id], |row| { - let preshared_key: Option = row.get(1)?; - let allowed_ips_rw: String = row.get(2)?; - let allowed_ips: Vec = - allowed_ips_rw.split(',').map(|s| s.to_string()).collect(); - Ok(Peer { - public_key: row.get(0)?, - preshared_key, - allowed_ips, - endpoint: row.get(3)?, - persistent_keepalive: None, - name: None, - }) - })? - .collect::>>()?; - Ok(Config { interface: iface, peers }) -} - -pub fn dump_interface(conn: &Connection, config: &Config) -> Result<()> { - let mut stmt = conn.prepare("INSERT INTO wg_interface (private_key, dns, address, listen_port, mtu) VALUES (?, ?, ?, ?, ?)")?; - let cif = &config.interface; - stmt.execute(params![ - cif.private_key, - to_lst(&cif.dns), - to_lst(&cif.address), - cif.listen_port.unwrap_or(51820), - cif.mtu - ])?; - let interface_id = conn.last_insert_rowid(); - let mut stmt = conn.prepare("INSERT INTO wg_peer (interface_id, public_key, preshared_key, allowed_ips, endpoint) VALUES (?, ?, ?, ?, ?)")?; - for peer in &config.peers { - stmt.execute(params![ - &interface_id, - &peer.public_key, - &peer.preshared_key, - &peer.allowed_ips.join(","), - &peer.endpoint - ])?; - } - Ok(()) -} - -pub fn get_connection(path: Option<&Path>) -> Result { - let p = path.unwrap_or_else(|| std::path::Path::new(DB_PATH)); - let conn = Connection::open(p)?; - initialize_tables(&conn)?; - Ok(conn) -} - -pub fn add_network(conn: &Connection, network: &RPCNetwork) -> Result<()> { - validate_network_payload(network)?; - let mut stmt = conn.prepare("INSERT INTO network (id, type, payload) VALUES (?, ?, ?)")?; - stmt.execute(params![ - network.id, - network.r#type().as_str_name(), - &network.payload - ])?; - Ok(()) -} - -pub fn list_networks(conn: &Connection) -> Result> { - let mut stmt = conn.prepare("SELECT id, type, payload FROM network ORDER BY idx, id")?; - let networks: Vec = stmt - .query_map([], |row| { - let network_id: i32 = row.get(0)?; - let network_type: String = row.get(1)?; - let network_type = NetworkType::from_str_name(network_type.as_str()) - .ok_or(rusqlite::Error::InvalidQuery)?; - let payload: Vec = row.get(2)?; - Ok(RPCNetwork { - id: network_id, - r#type: network_type.into(), - payload: payload.into(), - }) - })? - .collect::, rusqlite::Error>>()?; - Ok(networks) -} - -pub fn reorder_network(conn: &Connection, req: NetworkReorderRequest) -> Result<()> { - let mut ordered_ids = ordered_network_ids(conn)?; - let Some(current_idx) = ordered_ids.iter().position(|id| *id == req.id) else { - return Err(anyhow::anyhow!("No such network exists")); - }; - - let target_idx = usize::try_from(req.index) - .map_err(|_| anyhow::anyhow!("Network index must be non-negative"))?; - - let moved_id = ordered_ids.remove(current_idx); - let target_idx = target_idx.min(ordered_ids.len()); - ordered_ids.insert(target_idx, moved_id); - - renumber_networks(conn, &ordered_ids) -} - -pub fn delete_network(conn: &Connection, req: NetworkDeleteRequest) -> Result<()> { - let mut stmt = conn.prepare("DELETE FROM network WHERE id = ?")?; - let res = stmt.execute(params![req.id])?; - if res == 0 { - return Err(anyhow::anyhow!("No such network exists")); - } - let ordered_ids = ordered_network_ids(conn)?; - renumber_networks(conn, &ordered_ids) -} - -fn parse_lst(s: &str) -> Vec { - if s.is_empty() { - return vec![]; - } - s.split(',').map(|s| s.to_string()).collect() -} - -fn to_lst(v: &Vec) -> String { - v.iter() - .map(|s| s.to_string()) - .collect::>() - .join(",") -} - -fn validate_network_payload(network: &RPCNetwork) -> Result<()> { - match network.r#type() { - NetworkType::WireGuard => { - let payload_str = String::from_utf8(network.payload.clone())?; - Config::from_content_fmt(&payload_str, "ini")?; - } - NetworkType::Tailnet => { - TailnetConfig::from_slice(&network.payload)?; - } - } - Ok(()) -} - -fn ordered_network_ids(conn: &Connection) -> Result> { - let mut stmt = conn.prepare("SELECT id FROM network ORDER BY idx, id")?; - let ids = stmt - .query_map([], |row| row.get::<_, i32>(0))? - .collect::>>()?; - Ok(ids) -} - -fn renumber_networks(conn: &Connection, ordered_ids: &[i32]) -> Result<()> { - conn.execute_batch("BEGIN IMMEDIATE")?; - let result = (|| -> Result<()> { - let mut stmt = conn.prepare("UPDATE network SET idx = ? WHERE id = ?")?; - for (idx, id) in ordered_ids.iter().enumerate() { - stmt.execute(params![idx as i32, id])?; - } - Ok(()) - })(); - - match result { - Ok(()) => { - conn.execute_batch("COMMIT")?; - Ok(()) - } - Err(err) => { - let _ = conn.execute_batch("ROLLBACK"); - Err(err) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use tempfile::tempdir; - - fn sample_wireguard_payload() -> Vec { - br#"[Interface] -PrivateKey = OEPVdomeLTxTIBvv3TYsJRge0Hp9NMiY0sIrhT8OWG8= -Address = 10.13.13.2/24 -ListenPort = 51820 - -[Peer] -PublicKey = 8GaFjVO6c4luCHG4ONO+1bFG8tO+Zz5/Gy+Geht1USM= -PresharedKey = ha7j4BjD49sIzyF9SNlbueK0AMHghlj6+u0G3bzC698= -AllowedIPs = 0.0.0.0/0, 8.8.8.8/32 -Endpoint = wg.burrow.rs:51820 -"# - .to_vec() - } - - fn sample_wireguard_payload_with_address(address: &str, mtu: u16) -> Vec { - format!( - "[Interface]\nPrivateKey = OEPVdomeLTxTIBvv3TYsJRge0Hp9NMiY0sIrhT8OWG8=\nAddress = {address}\nListenPort = 51820\nMTU = {mtu}\n\n[Peer]\nPublicKey = 8GaFjVO6c4luCHG4ONO+1bFG8tO+Zz5/Gy+Geht1USM=\nPresharedKey = ha7j4BjD49sIzyF9SNlbueK0AMHghlj6+u0G3bzC698=\nAllowedIPs = 0.0.0.0/0\nEndpoint = wg.burrow.rs:51820\n" - ) - .into_bytes() - } - - fn sample_tailnet_payload() -> Vec { - br#"{ - "provider":"tailscale", - "account":"default", - "identity":"apple", - "tailnet":"example.ts.net", - "hostname":"burrow-phone" -}"# - .to_vec() - } - - #[test] - fn test_db() { - let conn = Connection::open_in_memory().unwrap(); - initialize_tables(&conn).unwrap(); - let config = Config::default(); - dump_interface(&conn, &config).unwrap(); - let loaded = load_interface(&conn, "1").unwrap(); - assert_eq!(config, loaded); - } - - #[test] - fn add_network_validates_payloads() { - let conn = Connection::open_in_memory().unwrap(); - initialize_tables(&conn).unwrap(); - - add_network( - &conn, - &RPCNetwork { - id: 1, - r#type: NetworkType::WireGuard.into(), - payload: sample_wireguard_payload(), - }, - ) - .unwrap(); - - add_network( - &conn, - &RPCNetwork { - id: 2, - r#type: NetworkType::Tailnet.into(), - payload: sample_tailnet_payload(), - }, - ) - .unwrap(); - - add_network( - &conn, - &RPCNetwork { - id: 3, - r#type: NetworkType::WireGuard.into(), - payload: sample_wireguard_payload_with_address("10.42.0.2/32", 1380), - }, - ) - .unwrap(); - - assert!(add_network( - &conn, - &RPCNetwork { - id: 4, - r#type: NetworkType::WireGuard.into(), - payload: b"not-a-config".to_vec(), - }, - ) - .is_err()); - - assert!(add_network( - &conn, - &RPCNetwork { - id: 5, - r#type: NetworkType::Tailnet.into(), - payload: b"not-a-tailnet-config".to_vec(), - }, - ) - .is_err()); - - let ids: Vec = list_networks(&conn) - .unwrap() - .into_iter() - .map(|n| n.id) - .collect(); - assert_eq!(ids, vec![1, 2, 3]); - } - - #[test] - fn reorder_and_delete_networks_keep_priority_stable() { - let conn = Connection::open_in_memory().unwrap(); - initialize_tables(&conn).unwrap(); - - for (id, address, mtu) in [ - (1, "10.42.0.2/32", 1380), - (2, "10.42.0.3/32", 1381), - (3, "10.42.0.4/32", 1382), - ] { - add_network( - &conn, - &RPCNetwork { - id, - r#type: NetworkType::WireGuard.into(), - payload: sample_wireguard_payload_with_address(address, mtu), - }, - ) - .unwrap(); - } - - reorder_network(&conn, NetworkReorderRequest { id: 3, index: 0 }).unwrap(); - let ids: Vec = list_networks(&conn) - .unwrap() - .into_iter() - .map(|n| n.id) - .collect(); - assert_eq!(ids, vec![3, 1, 2]); - - delete_network(&conn, NetworkDeleteRequest { id: 1 }).unwrap(); - let ids: Vec = list_networks(&conn) - .unwrap() - .into_iter() - .map(|n| n.id) - .collect(); - assert_eq!(ids, vec![3, 2]); - } - - #[test] - fn get_connection_does_not_seed_a_default_interface() { - let dir = tempdir().unwrap(); - let db_path = dir.path().join("burrow.sqlite3"); - - let conn = get_connection(Some(db_path.as_path())).unwrap(); - - let interface_count: i64 = conn - .query_row("SELECT COUNT(*) FROM wg_interface", [], |row| row.get(0)) - .unwrap(); - let network_count: i64 = conn - .query_row("SELECT COUNT(*) FROM network", [], |row| row.get(0)) - .unwrap(); - - assert_eq!(interface_count, 0); - assert_eq!(network_count, 0); - } -} diff --git a/burrow/src/lib.rs b/burrow/src/lib.rs index 7867d18..c5406b2 100644 --- a/burrow/src/lib.rs +++ b/burrow/src/lib.rs @@ -1,25 +1,18 @@ -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -pub mod control; - #[cfg(any(target_os = "linux", target_vendor = "apple"))] pub mod wireguard; -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -mod auth; #[cfg(any(target_os = "linux", target_vendor = "apple"))] mod daemon; -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -pub mod database; -#[cfg(target_os = "linux")] -pub mod tor; pub(crate) mod tracing; -#[cfg(target_os = "linux")] -pub mod usernet; -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -pub use daemon::apple::{spawn_in_process, spawn_in_process_with_paths}; +#[cfg(target_vendor = "apple")] +pub use daemon::apple::spawn_in_process; #[cfg(any(target_os = "linux", target_vendor = "apple"))] pub use daemon::{ - rpc::grpc_defs, rpc::BurrowClient, rpc::DaemonResponse, rpc::ServerInfo, DaemonClient, - DaemonCommand, DaemonResponseData, DaemonStartOptions, + DaemonClient, + DaemonCommand, + DaemonResponse, + DaemonResponseData, + DaemonStartOptions, + ServerInfo, }; diff --git a/burrow/src/main.rs b/burrow/src/main.rs index cfa2085..71d1c02 100644 --- a/burrow/src/main.rs +++ b/burrow/src/main.rs @@ -1,8 +1,6 @@ use anyhow::Result; use clap::{Args, Parser, Subcommand}; -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -mod control; #[cfg(any(target_os = "linux", target_vendor = "apple"))] mod daemon; pub(crate) mod tracing; @@ -10,24 +8,12 @@ pub(crate) mod tracing; mod wireguard; #[cfg(any(target_os = "linux", target_vendor = "apple"))] -mod auth; -#[cfg(target_os = "linux")] -mod tor; -#[cfg(target_os = "linux")] -mod usernet; - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -use daemon::{DaemonClient, DaemonCommand}; +use daemon::{DaemonClient, DaemonCommand, DaemonStartOptions}; +use tun::TunOptions; #[cfg(any(target_os = "linux", target_vendor = "apple"))] use crate::daemon::DaemonResponseData; -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -pub mod database; - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -use crate::daemon::rpc::{grpc_defs::Empty, BurrowClient}; - #[derive(Parser)] #[command(name = "Burrow")] #[command(author = "Hack Club ")] @@ -56,42 +42,6 @@ enum Commands { ServerInfo, /// Server config ServerConfig, - /// Reload Config - ReloadConfig(ReloadConfigArgs), - /// Authentication server - AuthServer, - /// Server Status - ServerStatus, - /// Tunnel Config - TunnelConfig, - /// Add Network - NetworkAdd(NetworkAddArgs), - /// List Networks - NetworkList, - /// Reorder Network - NetworkReorder(NetworkReorderArgs), - /// Delete Network - NetworkDelete(NetworkDeleteArgs), - /// Discover a Tailnet authority through the daemon - TailnetDiscover(TailnetDiscoverArgs), - /// Probe a Tailnet authority through the daemon - TailnetProbe(TailnetProbeArgs), - /// Send an ICMP echo probe through the active Tailnet tunnel over daemon packet streaming - TailnetPing(TailnetPingArgs), - /// Send a UDP echo probe through the active Tailnet tunnel over daemon packet streaming - TailnetUdpEcho(TailnetUdpEchoArgs), - #[cfg(target_os = "linux")] - /// Run a command in an unshared Linux namespace using a Burrow backend - Exec(ExecArgs), - #[cfg(target_os = "linux")] - /// Run a command in a Linux user namespace with Tor-backed networking - TorExec(TorExecArgs), -} - -#[derive(Args)] -struct ReloadConfigArgs { - #[clap(long, short)] - interface_id: String, } #[derive(Args)] @@ -100,600 +50,32 @@ struct StartArgs {} #[derive(Args)] struct DaemonArgs {} -#[derive(Args)] -struct NetworkAddArgs { - id: i32, - network_type: i32, - payload_path: String, -} - -#[derive(Args)] -struct NetworkReorderArgs { - id: i32, - index: i32, -} - -#[derive(Args)] -struct NetworkDeleteArgs { - id: i32, -} - -#[derive(Args)] -struct TailnetDiscoverArgs { - email: String, -} - -#[derive(Args)] -struct TailnetProbeArgs { - authority: String, -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -#[derive(Args)] -struct TailnetPingArgs { - remote: String, - #[arg(long, default_value = "burrow-tailnet-smoke")] - payload: String, - #[arg(long, default_value_t = 5000)] - timeout_ms: u64, -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -#[derive(Args)] -struct TailnetUdpEchoArgs { - remote: String, - #[arg(long, default_value = "burrow-tailnet-smoke")] - message: String, - #[arg(long, default_value_t = 5000)] - timeout_ms: u64, -} - -#[cfg(target_os = "linux")] -#[derive(Args)] -struct TorExecArgs { - payload_path: String, - #[arg(required = true, num_args = 1.., trailing_var_arg = true)] - command: Vec, -} - -#[cfg(target_os = "linux")] -#[derive(Args)] -struct ExecArgs { - #[arg(long, value_enum)] - backend: usernet::ExecBackendKind, - #[arg(long)] - payload: Option, - #[arg(required = true, num_args = 1.., trailing_var_arg = true)] - command: Vec, -} - #[cfg(any(target_os = "linux", target_vendor = "apple"))] async fn try_start() -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - let res = client.tunnel_client.tunnel_start(Empty {}).await?; - println!("Got results! {:?}", res); - Ok(()) + let mut client = DaemonClient::new().await?; + client + .send_command(DaemonCommand::Start(DaemonStartOptions { + tun: TunOptions::new().address(vec!["10.13.13.2", "::2"]), + })) + .await + .map(|_| ()) } #[cfg(any(target_os = "linux", target_vendor = "apple"))] async fn try_stop() -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - let res = client.tunnel_client.tunnel_stop(Empty {}).await?; - println!("Got results! {:?}", res); + let mut client = DaemonClient::new().await?; + client.send_command(DaemonCommand::Stop).await?; Ok(()) } #[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_serverstatus() -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - let mut res = client - .tunnel_client - .tunnel_status(Empty {}) - .await? - .into_inner(); - if let Some(st) = res.message().await? { - println!("Server Status: {:?}", st); - } else { - println!("Server Status is None"); - } - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_tun_config() -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - let mut res = client - .tunnel_client - .tunnel_configuration(Empty {}) - .await? - .into_inner(); - if let Some(config) = res.message().await? { - println!("Tunnel Config: {:?}", config); - } else { - println!("Tunnel Config is None"); - } - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_network_add(id: i32, network_type: i32, payload_path: &str) -> Result<()> { - use tokio::{fs::File, io::AsyncReadExt}; - - use crate::daemon::rpc::grpc_defs::Network; - - let mut file = File::open(payload_path).await?; - let mut payload = Vec::new(); - file.read_to_end(&mut payload).await?; - - let mut client = BurrowClient::from_uds().await?; - let network = Network { - id, - r#type: network_type, - payload, - }; - let res = client.networks_client.network_add(network).await?; - println!("Network Add Response: {:?}", res); - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_network_list() -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - let mut res = client - .networks_client - .network_list(Empty {}) - .await? - .into_inner(); - while let Some(network_list) = res.message().await? { - println!("Network List: {:?}", network_list); - } - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_network_reorder(id: i32, index: i32) -> Result<()> { - use crate::daemon::rpc::grpc_defs::NetworkReorderRequest; - - let mut client = BurrowClient::from_uds().await?; - let reorder_request = NetworkReorderRequest { id, index }; - let res = client - .networks_client - .network_reorder(reorder_request) - .await?; - println!("Network Reorder Response: {:?}", res); - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_network_delete(id: i32) -> Result<()> { - use crate::daemon::rpc::grpc_defs::NetworkDeleteRequest; - - let mut client = BurrowClient::from_uds().await?; - let delete_request = NetworkDeleteRequest { id }; - let res = client - .networks_client - .network_delete(delete_request) - .await?; - println!("Network Delete Response: {:?}", res); - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_tailnet_discover(email: &str) -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - let response = client - .tailnet_client - .discover(crate::daemon::rpc::grpc_defs::TailnetDiscoverRequest { email: email.to_owned() }) - .await? - .into_inner(); - println!("Tailnet Discover Response: {:?}", response); - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_tailnet_probe(authority: &str) -> Result<()> { - let mut client = BurrowClient::from_uds().await?; - let response = client - .tailnet_client - .probe(crate::daemon::rpc::grpc_defs::TailnetProbeRequest { - authority: authority.to_owned(), - }) - .await? - .into_inner(); - println!("Tailnet Probe Response: {:?}", response); - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_tailnet_ping(remote: &str, payload: &str, timeout_ms: u64) -> Result<()> { - use std::net::IpAddr; - - use anyhow::Context; - use rand::Rng; - use tokio::{ - sync::mpsc, - time::{timeout, Duration}, - }; - use tokio_stream::wrappers::ReceiverStream; - - use crate::daemon::rpc::grpc_defs::{Empty, TunnelPacket}; - - let remote_ip: IpAddr = remote - .parse() - .with_context(|| format!("invalid remote IP address {remote}"))?; - let message = payload.as_bytes().to_vec(); - - let mut client = BurrowClient::from_uds().await?; - client.tunnel_client.tunnel_start(Empty {}).await?; - - let mut config_stream = client - .tunnel_client - .tunnel_configuration(Empty {}) - .await? - .into_inner(); - let config = config_stream - .message() - .await? - .context("tunnel configuration stream ended before yielding a config")?; - let local_ip = select_tailnet_local_ip(&config.addresses, remote_ip)?; - - let identifier = rand::thread_rng().gen::(); - let sequence = 1_u16; - let packet = build_icmp_echo_request(local_ip, remote_ip, identifier, sequence, &message)?; - - let (outbound_tx, outbound_rx) = mpsc::channel::(128); - let mut tunnel_packets = client - .tunnel_client - .tunnel_packets(ReceiverStream::new(outbound_rx)) - .await? - .into_inner(); - - outbound_tx - .send(TunnelPacket { payload: packet }) - .await - .context("failed to send ICMP echo probe into daemon packet stream")?; - log::debug!( - "tailnet ping probe queued from {local_ip} to {remote_ip} identifier={identifier} sequence={sequence}" - ); - drop(outbound_tx); - - let reply = timeout(Duration::from_millis(timeout_ms), async { - loop { - let packet = tunnel_packets - .message() - .await - .context("failed to read packet from daemon packet stream")? - .context("daemon packet stream ended before returning a reply")?; - log::debug!( - "tailnet ping received {} bytes from daemon packet stream", - packet.payload.len() - ); - if let Some(reply) = - parse_icmp_echo_reply(&packet.payload, local_ip, remote_ip, identifier, sequence)? - { - break Ok::<_, anyhow::Error>(reply); - } +async fn try_serverinfo() -> Result<()> { + let mut client = DaemonClient::new().await?; + let res = client.send_command(DaemonCommand::ServerInfo).await?; + match res.result { + Ok(DaemonResponseData::ServerInfo(si)) => { + println!("Got Result! {:?}", si); } - }) - .await - .with_context(|| format!("timed out waiting for ICMP echo reply from {remote_ip}"))??; - - println!("Tailnet Ping Source: {}", reply.source); - println!("Tailnet Ping Destination: {}", reply.destination); - println!( - "Tailnet Ping Payload: {}", - String::from_utf8_lossy(&reply.payload) - ); - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_tailnet_udp_echo(remote: &str, message: &str, timeout_ms: u64) -> Result<()> { - use std::net::SocketAddr; - - use anyhow::{bail, Context}; - use futures::{SinkExt, StreamExt}; - use netstack_smoltcp::StackBuilder; - use tokio::{ - sync::mpsc, - time::{timeout, Duration}, - }; - use tokio_stream::wrappers::ReceiverStream; - - use crate::daemon::rpc::grpc_defs::{Empty, TunnelPacket}; - - let remote_addr: SocketAddr = remote - .parse() - .with_context(|| format!("invalid remote socket address {remote}"))?; - - let mut client = BurrowClient::from_uds().await?; - client.tunnel_client.tunnel_start(Empty {}).await?; - - let mut config_stream = client - .tunnel_client - .tunnel_configuration(Empty {}) - .await? - .into_inner(); - let config = config_stream - .message() - .await? - .context("tunnel configuration stream ended before yielding a config")?; - let local_addr = select_tailnet_local_socket(&config.addresses, remote_addr.ip())?; - - let (stack, runner, udp_socket, _) = StackBuilder::default() - .enable_udp(true) - .enable_tcp(true) - .build() - .context("failed to build userspace UDP stack")?; - let runner = runner.context("userspace UDP stack runner unavailable")?; - let udp_socket = udp_socket.context("userspace UDP stack socket unavailable")?; - let (mut stack_sink, mut stack_stream) = stack.split(); - let (mut udp_reader, mut udp_writer) = udp_socket.split(); - - let (outbound_tx, outbound_rx) = mpsc::channel::(128); - let mut tunnel_packets = client - .tunnel_client - .tunnel_packets(ReceiverStream::new(outbound_rx)) - .await? - .into_inner(); - - let ingress_task = tokio::spawn(async move { - loop { - match tunnel_packets.message().await? { - Some(packet) => { - log::debug!( - "tailnet udp echo received {} bytes from daemon packet stream", - packet.payload.len() - ); - stack_sink - .send(packet.payload) - .await - .context("failed to feed inbound tailnet packet into userspace stack")?; - } - None => break, - } - } - Result::<()>::Ok(()) - }); - - let egress_task = tokio::spawn(async move { - while let Some(packet) = stack_stream.next().await { - let payload = packet.context("failed to read outbound packet from userspace stack")?; - log::debug!( - "tailnet udp echo sending {} bytes into daemon packet stream", - payload.len() - ); - outbound_tx - .send(TunnelPacket { payload }) - .await - .context("failed to forward outbound tailnet packet to daemon")?; - } - Result::<()>::Ok(()) - }); - - let runner_task = tokio::spawn(async move { runner.await.map_err(anyhow::Error::from) }); - - udp_writer - .send((message.as_bytes().to_vec(), local_addr, remote_addr)) - .await - .context("failed to send UDP echo probe into userspace stack")?; - log::debug!("tailnet udp echo probe queued from {local_addr} to {remote_addr}"); - - let response = timeout(Duration::from_millis(timeout_ms), udp_reader.next()) - .await - .with_context(|| format!("timed out waiting for UDP echo from {remote_addr}"))? - .context("userspace UDP stack ended before returning a reply")?; - let (payload, reply_source, reply_destination) = response; - let response_text = String::from_utf8_lossy(&payload); - - ingress_task.abort(); - egress_task.abort(); - runner_task.abort(); - - if reply_source != remote_addr { - bail!("received UDP reply from unexpected source {reply_source}"); - } - if reply_destination != local_addr { - bail!("received UDP reply for unexpected local socket {reply_destination}"); - } - if payload != message.as_bytes() { - bail!("UDP echo payload mismatch"); - } - - println!("Tailnet UDP Echo Source: {reply_source}"); - println!("Tailnet UDP Echo Destination: {reply_destination}"); - println!("Tailnet UDP Echo Payload: {response_text}"); - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -fn select_tailnet_local_ip( - addresses: &[String], - remote_ip: std::net::IpAddr, -) -> Result { - use anyhow::Context; - - let family_is_v4 = remote_ip.is_ipv4(); - addresses - .iter() - .filter_map(|cidr| cidr.split('/').next()) - .filter_map(|ip| ip.parse::().ok()) - .find(|ip| ip.is_ipv4() == family_is_v4) - .with_context(|| { - format!( - "no local {} tailnet address found in daemon config {:?}", - if family_is_v4 { "IPv4" } else { "IPv6" }, - addresses - ) - }) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -fn select_tailnet_local_socket( - addresses: &[String], - remote_ip: std::net::IpAddr, -) -> Result { - use rand::Rng; - - let local_ip = select_tailnet_local_ip(addresses, remote_ip)?; - let port = rand::thread_rng().gen_range(40000..50000); - Ok(std::net::SocketAddr::new(local_ip, port)) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -struct IcmpEchoReply { - source: std::net::IpAddr, - destination: std::net::IpAddr, - payload: Vec, -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -fn build_icmp_echo_request( - source: std::net::IpAddr, - destination: std::net::IpAddr, - identifier: u16, - sequence: u16, - payload: &[u8], -) -> Result> { - use anyhow::bail; - - let (source, destination) = match (source, destination) { - (std::net::IpAddr::V4(source), std::net::IpAddr::V4(destination)) => (source, destination), - _ => bail!("tailnet ping currently supports IPv4 only"), - }; - - let mut icmp = Vec::with_capacity(8 + payload.len()); - icmp.push(8); - icmp.push(0); - icmp.extend_from_slice(&[0, 0]); - icmp.extend_from_slice(&identifier.to_be_bytes()); - icmp.extend_from_slice(&sequence.to_be_bytes()); - icmp.extend_from_slice(payload); - let icmp_checksum = internet_checksum(&icmp); - icmp[2..4].copy_from_slice(&icmp_checksum.to_be_bytes()); - - let total_len = 20 + icmp.len(); - let mut packet = Vec::with_capacity(total_len); - packet.push(0x45); - packet.push(0); - packet.extend_from_slice(&(total_len as u16).to_be_bytes()); - packet.extend_from_slice(&0u16.to_be_bytes()); - packet.extend_from_slice(&0u16.to_be_bytes()); - packet.push(64); - packet.push(1); - packet.extend_from_slice(&[0, 0]); - packet.extend_from_slice(&source.octets()); - packet.extend_from_slice(&destination.octets()); - let header_checksum = internet_checksum(&packet); - packet[10..12].copy_from_slice(&header_checksum.to_be_bytes()); - packet.extend_from_slice(&icmp); - Ok(packet) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -fn parse_icmp_echo_reply( - packet: &[u8], - local_ip: std::net::IpAddr, - remote_ip: std::net::IpAddr, - identifier: u16, - sequence: u16, -) -> Result> { - use anyhow::bail; - - let (local_ip, remote_ip) = match (local_ip, remote_ip) { - (std::net::IpAddr::V4(local_ip), std::net::IpAddr::V4(remote_ip)) => (local_ip, remote_ip), - _ => bail!("tailnet ping currently supports IPv4 only"), - }; - - if packet.len() < 20 { - return Ok(None); - } - let version = packet[0] >> 4; - if version != 4 { - return Ok(None); - } - let ihl = (packet[0] & 0x0f) as usize * 4; - if packet.len() < ihl + 8 { - return Ok(None); - } - if packet[9] != 1 { - return Ok(None); - } - - let source = std::net::Ipv4Addr::new(packet[12], packet[13], packet[14], packet[15]); - let destination = std::net::Ipv4Addr::new(packet[16], packet[17], packet[18], packet[19]); - if source != remote_ip || destination != local_ip { - return Ok(None); - } - - let icmp = &packet[ihl..]; - if icmp[0] != 0 || icmp[1] != 0 { - return Ok(None); - } - let reply_identifier = u16::from_be_bytes([icmp[4], icmp[5]]); - let reply_sequence = u16::from_be_bytes([icmp[6], icmp[7]]); - if reply_identifier != identifier || reply_sequence != sequence { - return Ok(None); - } - - Ok(Some(IcmpEchoReply { - source: std::net::IpAddr::V4(source), - destination: std::net::IpAddr::V4(destination), - payload: icmp[8..].to_vec(), - })) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -fn internet_checksum(bytes: &[u8]) -> u16 { - let mut sum = 0u32; - let mut chunks = bytes.chunks_exact(2); - for chunk in &mut chunks { - sum += u16::from_be_bytes([chunk[0], chunk[1]]) as u32; - } - if let Some(&last) = chunks.remainder().first() { - sum += (last as u32) << 8; - } - while (sum >> 16) != 0 { - sum = (sum & 0xffff) + (sum >> 16); - } - !(sum as u16) -} - -#[cfg(target_os = "linux")] -async fn try_tor_exec(payload_path: &str, command: Vec) -> Result<()> { - let exit_code = usernet::run_exec(usernet::ExecInvocation { - backend: usernet::ExecBackendKind::Tor, - payload_path: Some(payload_path.into()), - command, - }) - .await?; - if exit_code != 0 { - std::process::exit(exit_code); - } - Ok(()) -} - -#[cfg(target_os = "linux")] -async fn try_exec( - backend: usernet::ExecBackendKind, - payload: Option, - command: Vec, -) -> Result<()> { - let exit_code = usernet::run_exec(usernet::ExecInvocation { - backend, - payload_path: payload.map(Into::into), - command, - }) - .await?; - if exit_code != 0 { - std::process::exit(exit_code); - } - Ok(()) -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -fn handle_unexpected(res: Result) { - match res { Ok(DaemonResponseData::None) => { println!("Server not started.") } @@ -704,17 +86,6 @@ fn handle_unexpected(res: Result) { println!("Error when retrieving from server: {}", e) } } -} - -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_serverinfo() -> Result<()> { - let mut client = DaemonClient::new().await?; - let res = client.send_command(DaemonCommand::ServerInfo).await?; - if let Ok(DaemonResponseData::ServerInfo(si)) = res.result { - println!("Got Result! {:?}", si); - } else { - handle_unexpected(res.result); - } Ok(()) } @@ -722,70 +93,55 @@ async fn try_serverinfo() -> Result<()> { async fn try_serverconfig() -> Result<()> { let mut client = DaemonClient::new().await?; let res = client.send_command(DaemonCommand::ServerConfig).await?; - if let Ok(DaemonResponseData::ServerConfig(cfig)) = res.result { - println!("Got Result! {:?}", cfig); - } else { - handle_unexpected(res.result); + match res.result { + Ok(DaemonResponseData::ServerConfig(cfig)) => { + println!("Got Result! {:?}", cfig); + } + Ok(DaemonResponseData::None) => { + println!("Server not started.") + } + Ok(res) => { + println!("Unexpected Response: {:?}", res) + } + Err(e) => { + println!("Error when retrieving from server: {}", e) + } } Ok(()) } -#[cfg(any(target_os = "linux", target_vendor = "apple"))] -async fn try_reloadconfig(interface_id: String) -> Result<()> { - let mut client = DaemonClient::new().await?; - let res = client - .send_command(DaemonCommand::ReloadConfig(interface_id)) - .await?; - if let Ok(DaemonResponseData::ServerConfig(cfig)) = res.result { - println!("Got Result! {:?}", cfig); - } else { - handle_unexpected(res.result); - } +#[cfg(not(any(target_os = "linux", target_vendor = "apple")))] +async fn try_start() -> Result<()> { + Ok(()) +} + +#[cfg(not(any(target_os = "linux", target_vendor = "apple")))] +async fn try_stop() -> Result<()> { + Ok(()) +} + +#[cfg(not(any(target_os = "linux", target_vendor = "apple")))] +async fn try_serverinfo() -> Result<()> { + Ok(()) +} + +#[cfg(not(any(target_os = "linux", target_vendor = "apple")))] +async fn try_serverconfig() -> Result<()> { Ok(()) } #[cfg(any(target_os = "linux", target_vendor = "apple"))] -#[tokio::main] +#[tokio::main(flavor = "current_thread")] async fn main() -> Result<()> { tracing::initialize(); - dotenv::dotenv().ok(); let cli = Cli::parse(); match &cli.command { Commands::Start(..) => try_start().await?, Commands::Stop => try_stop().await?, - Commands::Daemon(_) => daemon::daemon_main(None, None, None).await?, + Commands::Daemon(_) => daemon::daemon_main(None, None).await?, Commands::ServerInfo => try_serverinfo().await?, Commands::ServerConfig => try_serverconfig().await?, - Commands::ReloadConfig(args) => try_reloadconfig(args.interface_id.clone()).await?, - Commands::AuthServer => crate::auth::server::serve().await?, - Commands::ServerStatus => try_serverstatus().await?, - Commands::TunnelConfig => try_tun_config().await?, - Commands::NetworkAdd(args) => { - try_network_add(args.id, args.network_type, &args.payload_path).await? - } - Commands::NetworkList => try_network_list().await?, - Commands::NetworkReorder(args) => try_network_reorder(args.id, args.index).await?, - Commands::NetworkDelete(args) => try_network_delete(args.id).await?, - Commands::TailnetDiscover(args) => try_tailnet_discover(&args.email).await?, - Commands::TailnetProbe(args) => try_tailnet_probe(&args.authority).await?, - Commands::TailnetPing(args) => { - try_tailnet_ping(&args.remote, &args.payload, args.timeout_ms).await? - } - Commands::TailnetUdpEcho(args) => { - try_tailnet_udp_echo(&args.remote, &args.message, args.timeout_ms).await? - } - #[cfg(target_os = "linux")] - Commands::Exec(args) => { - try_exec( - args.backend.clone(), - args.payload.clone(), - args.command.clone(), - ) - .await? - } - #[cfg(target_os = "linux")] - Commands::TorExec(args) => try_tor_exec(&args.payload_path, args.command.clone()).await?, } Ok(()) @@ -793,5 +149,5 @@ async fn main() -> Result<()> { #[cfg(not(any(target_os = "linux", target_vendor = "apple")))] pub fn main() { - eprintln!("This platform is not supported") + eprintln!("This platform is not supported currently.") } diff --git a/burrow/src/tor/config.rs b/burrow/src/tor/config.rs deleted file mode 100644 index d3de9ec..0000000 --- a/burrow/src/tor/config.rs +++ /dev/null @@ -1,187 +0,0 @@ -use std::{net::SocketAddr, path::PathBuf, str}; - -use anyhow::{Context, Result}; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct Config { - #[serde(default)] - pub account: Option, - #[serde(default)] - pub identity: Option, - #[serde(default)] - pub address: Vec, - #[serde(default)] - pub dns: Vec, - #[serde(default)] - pub mtu: Option, - #[serde(default)] - pub tun_name: Option, - #[serde(default)] - pub arti: ArtiConfig, - #[serde(default)] - pub tcp_stack: TcpStackConfig, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct ArtiConfig { - pub state_dir: String, - pub cache_dir: String, -} - -impl Default for ArtiConfig { - fn default() -> Self { - Self { - state_dir: "/var/lib/burrow/arti/state".to_string(), - cache_dir: "/var/cache/burrow/arti".to_string(), - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum TcpStackConfig { - System(SystemTcpStackConfig), -} - -impl Default for TcpStackConfig { - fn default() -> Self { - Self::System(SystemTcpStackConfig::default()) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct SystemTcpStackConfig { - #[serde(default = "default_system_listen")] - pub listen: String, -} - -impl Default for SystemTcpStackConfig { - fn default() -> Self { - Self { - listen: default_system_listen(), - } - } -} - -impl Config { - pub fn from_payload(payload: &[u8]) -> Result { - if let Ok(config) = serde_json::from_slice(payload) { - return Ok(config); - } - - let payload = str::from_utf8(payload).context("tor payload must be valid UTF-8")?; - toml::from_str(payload).context("failed to parse tor payload as JSON or TOML") - } - - pub fn listen_addr(&self) -> Result { - match &self.tcp_stack { - TcpStackConfig::System(config) => config - .listen - .parse() - .with_context(|| format!("invalid system tcp listen address '{}'", config.listen)), - } - } - - pub fn authority(&self) -> String { - "arti://local".to_owned() - } - - pub fn account_name(&self) -> String { - self.account - .clone() - .filter(|value| !value.trim().is_empty()) - .unwrap_or_else(|| "default".to_owned()) - } - - pub fn identity_name(&self, network_id: i32) -> String { - self.identity - .clone() - .filter(|value| !value.trim().is_empty()) - .or_else(|| self.tun_name.clone()) - .unwrap_or_else(|| format!("tor-{network_id}")) - } - - pub fn runtime_dirs(&self, network_id: i32) -> (String, String) { - let authority = sanitize_path_component(&self.authority()); - let account = sanitize_path_component(&self.account_name()); - let identity = sanitize_path_component(&self.identity_name(network_id)); - ( - append_runtime_path(&self.arti.state_dir, &[&authority, &account, &identity]), - append_runtime_path(&self.arti.cache_dir, &[&authority, &account, &identity]), - ) - } -} - -fn default_system_listen() -> String { - "127.0.0.1:9040".to_string() -} - -fn append_runtime_path(base: &str, parts: &[&str]) -> String { - let mut path = PathBuf::from(base); - for part in parts { - path.push(part); - } - path.to_string_lossy().to_string() -} - -fn sanitize_path_component(value: &str) -> String { - let sanitized: String = value - .chars() - .map(|ch| { - if ch.is_ascii_alphanumeric() || ch == '-' || ch == '_' { - ch - } else { - '_' - } - }) - .collect(); - - if sanitized.is_empty() { - "default".to_owned() - } else { - sanitized - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parses_json_payload() { - let payload = br#"{ - "address":["100.64.0.2/32"], - "mtu":1400, - "arti":{"state_dir":"/tmp/state","cache_dir":"/tmp/cache"}, - "tcp_stack":{"kind":"system","listen":"127.0.0.1:9150"} - }"#; - - let config = Config::from_payload(payload).unwrap(); - assert_eq!(config.address, vec!["100.64.0.2/32"]); - assert_eq!(config.listen_addr().unwrap().to_string(), "127.0.0.1:9150"); - assert!(config.runtime_dirs(7).0.contains("arti___local")); - } - - #[test] - fn parses_toml_payload() { - let payload = r#" -address = ["100.64.0.3/32"] -mtu = 1280 -tun_name = "burrow-tor" - -[arti] -state_dir = "/tmp/state" -cache_dir = "/tmp/cache" - -[tcp_stack] -kind = "system" -listen = "127.0.0.1:9140" -"#; - - let config = Config::from_payload(payload.as_bytes()).unwrap(); - assert_eq!(config.tun_name.as_deref(), Some("burrow-tor")); - assert_eq!(config.listen_addr().unwrap().to_string(), "127.0.0.1:9140"); - assert_eq!(config.identity_name(11), "burrow-tor"); - } -} diff --git a/burrow/src/tor/dns.rs b/burrow/src/tor/dns.rs deleted file mode 100644 index d918fc4..0000000 --- a/burrow/src/tor/dns.rs +++ /dev/null @@ -1,177 +0,0 @@ -use std::{ - net::{IpAddr, SocketAddr}, - sync::Arc, -}; - -use anyhow::{Context, Result}; -use arti_client::TorClient; -use hickory_proto::{ - op::{Message, MessageType, ResponseCode}, - rr::{rdata::A, rdata::AAAA, RData, Record, RecordType}, -}; -use tokio::{net::UdpSocket, sync::watch, task::JoinError}; -use tor_rtcompat::PreferredRuntime; -use tracing::{debug, warn}; - -const DNS_TTL_SECS: u32 = 60; - -#[derive(Debug)] -pub struct TorDnsHandle { - shutdown: watch::Sender, - task: tokio::task::JoinHandle<()>, -} - -impl TorDnsHandle { - pub async fn shutdown(self) -> Result<()> { - let _ = self.shutdown.send(true); - match self.task.await { - Ok(()) => Ok(()), - Err(err) if err.is_cancelled() => Ok(()), - Err(err) => Err(join_error(err)), - } - } -} - -pub async fn spawn( - bind_addr: SocketAddr, - tor_client: Arc>, -) -> Result { - let socket = UdpSocket::bind(bind_addr) - .await - .with_context(|| format!("failed to bind tor dns proxy on {bind_addr}"))?; - let (shutdown_tx, mut shutdown_rx) = watch::channel(false); - let task = tokio::spawn(async move { - let mut buffer = [0u8; 4096]; - loop { - tokio::select! { - changed = shutdown_rx.changed() => { - match changed { - Ok(()) if *shutdown_rx.borrow() => break, - Ok(()) => continue, - Err(_) => break, - } - } - received = socket.recv_from(&mut buffer) => { - let (len, peer_addr) = match received { - Ok(value) => value, - Err(err) => { - warn!(?err, "tor dns proxy recv failed"); - continue; - } - }; - - let response = match build_response(&buffer[..len], tor_client.as_ref()).await { - Ok(message) => message, - Err(err) => { - debug!(?err, "tor dns proxy failed to answer query"); - continue; - } - }; - - if let Err(err) = socket.send_to(&response, peer_addr).await { - warn!(?err, "tor dns proxy send failed"); - } - } - } - } - }); - - Ok(TorDnsHandle { shutdown: shutdown_tx, task }) -} - -pub(crate) async fn build_response( - packet: &[u8], - tor_client: &TorClient, -) -> Result> { - let request = Message::from_vec(packet).context("failed to parse dns packet")?; - let mut response = Message::new(); - response - .set_id(request.id()) - .set_op_code(request.op_code()) - .set_message_type(MessageType::Response) - .set_recursion_desired(request.recursion_desired()) - .set_recursion_available(true) - .set_response_code(ResponseCode::NoError); - - for query in request.queries().iter().cloned() { - response.add_query(query.clone()); - match query.query_type() { - RecordType::A | RecordType::AAAA => { - let hostname = query.name().to_utf8(); - let hostname = hostname.trim_end_matches('.'); - match tor_client.resolve(hostname).await { - Ok(addrs) => { - for addr in addrs { - if let Some(answer) = - record_for_address(query.name().clone(), query.query_type(), addr) - { - response.add_answer(answer); - } - } - } - Err(err) => { - debug!(hostname, ?err, "tor dns lookup failed"); - response.set_response_code(ResponseCode::ServFail); - } - } - } - _ => { - response.set_response_code(ResponseCode::NotImp); - } - } - } - - response.to_vec().context("failed to encode dns response") -} - -fn record_for_address( - name: hickory_proto::rr::Name, - record_type: RecordType, - addr: IpAddr, -) -> Option { - match (record_type, addr) { - (RecordType::A, IpAddr::V4(ip)) => Some(Record::from_rdata( - name, - DNS_TTL_SECS, - RData::A(A::from(ip)), - )), - (RecordType::AAAA, IpAddr::V6(ip)) => Some(Record::from_rdata( - name, - DNS_TTL_SECS, - RData::AAAA(AAAA::from(ip)), - )), - _ => None, - } -} - -fn join_error(err: JoinError) -> anyhow::Error { - anyhow::anyhow!("tor dns task failed: {err}") -} - -#[cfg(test)] -mod tests { - use super::*; - use hickory_proto::rr::Name; - use std::net::{Ipv4Addr, Ipv6Addr}; - - #[test] - fn builds_a_record_for_ipv4_answer() { - let record = record_for_address( - Name::from_ascii("example.com.").unwrap(), - RecordType::A, - IpAddr::V4(Ipv4Addr::new(1, 2, 3, 4)), - ) - .unwrap(); - assert_eq!(record.record_type(), RecordType::A); - } - - #[test] - fn skips_mismatched_record_type() { - let record = record_for_address( - Name::from_ascii("example.com.").unwrap(), - RecordType::A, - IpAddr::V6(Ipv6Addr::LOCALHOST), - ); - assert!(record.is_none()); - } -} diff --git a/burrow/src/tor/exec.rs b/burrow/src/tor/exec.rs deleted file mode 100644 index 7f4317d..0000000 --- a/burrow/src/tor/exec.rs +++ /dev/null @@ -1,439 +0,0 @@ -use std::{ - ffi::{OsStr, OsString}, - fs, - net::{IpAddr, Ipv4Addr, SocketAddr}, - os::unix::process::ExitStatusExt, - path::PathBuf, - process::{Command, ExitStatus, Stdio}, - sync::Arc, - time::Duration, -}; - -use anyhow::{bail, Context, Result}; -use tokio::process::Command as TokioCommand; -use tor_rtcompat::PreferredRuntime; -use tracing::{debug, info}; - -use super::{ - bootstrap_client, - dns::{spawn as spawn_dns, TorDnsHandle}, - runtime::{spawn_with_client, TorHandle}, - Config, SystemTcpStackConfig, TcpStackConfig, -}; - -const CHILD_PREFIX_LEN: u8 = 30; -const CHILD_DNS_PORT: u16 = 53; -const LISTENER_READY_TIMEOUT: Duration = Duration::from_secs(10); -const LISTENER_READY_POLL: Duration = Duration::from_millis(100); - -pub async fn run_exec(mut config: Config, command: Vec) -> Result { - if command.is_empty() { - bail!("tor-exec requires a command to run"); - } - ensure_root()?; - ensure_host_tool("ip")?; - ensure_host_tool("iptables")?; - ensure_host_tool("unshare")?; - - let requested_listener = config.listen_addr()?; - if requested_listener.port() == 0 { - bail!("tor-exec requires a fixed listener port"); - } - - let plan = NamespacePlan::new(requested_listener.port()); - let (state_dir, cache_dir) = config.runtime_dirs(std::process::id() as i32); - config.arti.state_dir = state_dir; - config.arti.cache_dir = cache_dir; - config.tcp_stack = TcpStackConfig::System(SystemTcpStackConfig { - listen: format!("{}:{}", plan.host_ip, plan.listener_port), - }); - - let namespace = NamespaceGuard::create(&plan)?; - let tor_client = bootstrap_client(&config).await?; - let tor_handle = spawn_with_client(config, tor_client.clone()).await?; - wait_for_listener(SocketAddr::new( - IpAddr::V4(plan.host_ip), - plan.listener_port, - )) - .await?; - let dns_handle = spawn_dns( - SocketAddr::new(IpAddr::V4(plan.host_ip), CHILD_DNS_PORT), - tor_client, - ) - .await?; - - let status = namespace.run_child(&command).await; - let dns_shutdown = dns_handle.shutdown().await; - let tor_shutdown = tor_handle.shutdown().await; - - let status = status?; - dns_shutdown?; - tor_shutdown?; - child_exit_code(status) -} - -fn ensure_root() -> Result<()> { - if unsafe { libc::geteuid() } != 0 { - bail!("tor-exec currently requires root on linux"); - } - Ok(()) -} - -fn ensure_host_tool(tool: &str) -> Result<()> { - let status = Command::new("sh") - .args(["-lc", &format!("command -v {tool} >/dev/null")]) - .status() - .with_context(|| format!("failed to probe required tool '{tool}'"))?; - if !status.success() { - bail!("required host tool '{tool}' is not available"); - } - Ok(()) -} - -async fn wait_for_listener(addr: SocketAddr) -> Result<()> { - let deadline = tokio::time::Instant::now() + LISTENER_READY_TIMEOUT; - loop { - match tokio::net::TcpStream::connect(addr).await { - Ok(stream) => { - drop(stream); - return Ok(()); - } - Err(err) if tokio::time::Instant::now() < deadline => { - debug!(%addr, ?err, "waiting for tor transparent listener"); - tokio::time::sleep(LISTENER_READY_POLL).await; - } - Err(err) => return Err(err).with_context(|| format!("timed out waiting for {addr}")), - } - } -} - -fn child_exit_code(status: ExitStatus) -> Result { - if let Some(code) = status.code() { - return Ok(code); - } - if let Some(signal) = status.signal() { - return Ok(128 + signal); - } - bail!("child process terminated without an exit code"); -} - -#[derive(Debug, Clone)] -struct NamespacePlan { - netns_name: String, - host_if: String, - child_if: String, - host_ip: Ipv4Addr, - child_ip: Ipv4Addr, - listener_port: u16, -} - -impl NamespacePlan { - fn new(listener_port: u16) -> Self { - let token = std::process::id() % 10_000; - let segment = ((std::process::id() % 200) as u8) + 20; - Self { - netns_name: format!("burrow-tor-{token}"), - host_if: format!("bth{token}"), - child_if: format!("btc{token}"), - host_ip: Ipv4Addr::new(100, 90, segment, 1), - child_ip: Ipv4Addr::new(100, 90, segment, 2), - listener_port, - } - } - - fn host_cidr(&self) -> String { - format!("{}/{}", self.host_ip, CHILD_PREFIX_LEN) - } - - fn child_cidr(&self) -> String { - format!("{}/{}", self.child_ip, CHILD_PREFIX_LEN) - } -} - -struct NamespaceGuard { - plan: NamespacePlan, - resolv_conf: PathBuf, - nat_rule_installed: bool, - forward_rule_installed: bool, - netns_created: bool, - host_link_created: bool, -} - -impl NamespaceGuard { - fn create(plan: &NamespacePlan) -> Result { - let mut guard = Self { - plan: plan.clone(), - resolv_conf: write_resolv_conf(plan.host_ip)?, - nat_rule_installed: false, - forward_rule_installed: false, - netns_created: false, - host_link_created: false, - }; - - let setup = (|| -> Result<()> { - run_host_command(["ip", "netns", "add", &guard.plan.netns_name])?; - guard.netns_created = true; - - run_host_command([ - "ip", - "link", - "add", - &guard.plan.host_if, - "type", - "veth", - "peer", - "name", - &guard.plan.child_if, - ])?; - guard.host_link_created = true; - - run_host_command([ - "ip", - "addr", - "add", - &guard.plan.host_cidr(), - "dev", - &guard.plan.host_if, - ])?; - run_host_command(["ip", "link", "set", &guard.plan.host_if, "up"])?; - run_host_command([ - "ip", - "link", - "set", - &guard.plan.child_if, - "netns", - &guard.plan.netns_name, - ])?; - run_host_command([ - "ip", - "netns", - "exec", - &guard.plan.netns_name, - "ip", - "link", - "set", - "lo", - "up", - ])?; - run_host_command([ - "ip", - "netns", - "exec", - &guard.plan.netns_name, - "ip", - "addr", - "add", - &guard.plan.child_cidr(), - "dev", - &guard.plan.child_if, - ])?; - run_host_command([ - "ip", - "netns", - "exec", - &guard.plan.netns_name, - "ip", - "link", - "set", - &guard.plan.child_if, - "up", - ])?; - run_host_command([ - "ip", - "netns", - "exec", - &guard.plan.netns_name, - "ip", - "route", - "add", - "default", - "via", - &guard.plan.host_ip.to_string(), - "dev", - &guard.plan.child_if, - ])?; - run_host_command([ - "iptables", - "-t", - "nat", - "-A", - "PREROUTING", - "-i", - &guard.plan.host_if, - "-p", - "tcp", - "-j", - "DNAT", - "--to-destination", - &format!("{}:{}", guard.plan.host_ip, guard.plan.listener_port), - ])?; - guard.nat_rule_installed = true; - - run_host_command([ - "iptables", - "-A", - "FORWARD", - "-i", - &guard.plan.host_if, - "-j", - "REJECT", - ])?; - guard.forward_rule_installed = true; - Ok(()) - })(); - - if let Err(err) = setup { - guard.cleanup(); - return Err(err); - } - - Ok(guard) - } - - async fn run_child(&self, command: &[String]) -> Result { - let mut args = vec![ - OsString::from("netns"), - OsString::from("exec"), - OsString::from(&self.plan.netns_name), - OsString::from("unshare"), - OsString::from("--user"), - OsString::from("--map-root-user"), - OsString::from("--mount"), - OsString::from("--pid"), - OsString::from("--fork"), - OsString::from("--kill-child"), - OsString::from("sh"), - OsString::from("-ceu"), - OsString::from(CHILD_SCRIPT), - OsString::from("sh"), - self.resolv_conf.as_os_str().to_os_string(), - ]; - args.extend(command.iter().map(OsString::from)); - - let status = TokioCommand::new("ip") - .args(args) - .stdin(Stdio::inherit()) - .stdout(Stdio::inherit()) - .stderr(Stdio::inherit()) - .status() - .await - .context("failed to execute child in tor namespace")?; - Ok(status) - } - - fn cleanup(&mut self) { - if self.forward_rule_installed { - let _ = run_host_command([ - "iptables", - "-D", - "FORWARD", - "-i", - &self.plan.host_if, - "-j", - "REJECT", - ]); - self.forward_rule_installed = false; - } - if self.nat_rule_installed { - let _ = run_host_command([ - "iptables", - "-t", - "nat", - "-D", - "PREROUTING", - "-i", - &self.plan.host_if, - "-p", - "tcp", - "-j", - "DNAT", - "--to-destination", - &format!("{}:{}", self.plan.host_ip, self.plan.listener_port), - ]); - self.nat_rule_installed = false; - } - if self.host_link_created { - let _ = run_host_command(["ip", "link", "delete", &self.plan.host_if]); - self.host_link_created = false; - } - if self.netns_created { - let _ = run_host_command(["ip", "netns", "delete", &self.plan.netns_name]); - self.netns_created = false; - } - let _ = fs::remove_file(&self.resolv_conf); - } -} - -impl Drop for NamespaceGuard { - fn drop(&mut self) { - self.cleanup(); - } -} - -fn write_resolv_conf(nameserver: Ipv4Addr) -> Result { - let path = std::env::temp_dir().join(format!("burrow-tor-resolv-{}.conf", std::process::id())); - fs::write(&path, format!("nameserver {nameserver}\noptions ndots:1\n")) - .with_context(|| format!("failed to write {}", path.display()))?; - Ok(path) -} - -fn run_host_command(args: [&str; N]) -> Result<()> { - let (program, rest) = args - .split_first() - .expect("run_host_command requires a program and arguments"); - let status = Command::new(program) - .args(rest) - .stdin(Stdio::null()) - .stdout(Stdio::null()) - .stderr(Stdio::piped()) - .status() - .with_context(|| format!("failed to start host command {}", shell_words(&args)))?; - if status.success() { - Ok(()) - } else { - bail!("host command failed: {}", shell_words(&args)); - } -} - -fn shell_words(args: &[&str]) -> String { - args.iter() - .map(|arg| shlex_escape(arg)) - .collect::>() - .join(" ") -} - -fn shlex_escape(value: &str) -> String { - if value - .chars() - .all(|ch| ch.is_ascii_alphanumeric() || "-_./:=+".contains(ch)) - { - value.to_string() - } else { - format!("'{}'", value.replace('\'', "'\\''")) - } -} - -const CHILD_SCRIPT: &str = r#" -mount -t proc proc /proc -mount --bind "$1" /etc/resolv.conf -shift -exec "$@" -"#; - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn namespace_plan_uses_short_interface_names() { - let plan = NamespacePlan::new(9040); - assert!(plan.host_if.len() <= 15); - assert!(plan.child_if.len() <= 15); - } - - #[test] - fn signal_exit_code_uses_shell_convention() { - let status = ExitStatus::from_raw(libc::SIGTERM); - assert_eq!(child_exit_code(status).unwrap(), 128 + libc::SIGTERM); - } -} diff --git a/burrow/src/tor/mod.rs b/burrow/src/tor/mod.rs deleted file mode 100644 index 635c355..0000000 --- a/burrow/src/tor/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod config; -pub(crate) mod dns; -mod exec; -mod runtime; -mod system; - -pub use config::{ArtiConfig, Config, SystemTcpStackConfig, TcpStackConfig}; -pub use exec::run_exec; -pub use runtime::{bootstrap_client, spawn, spawn_with_client, TorHandle}; diff --git a/burrow/src/tor/runtime.rs b/burrow/src/tor/runtime.rs deleted file mode 100644 index 45690ee..0000000 --- a/burrow/src/tor/runtime.rs +++ /dev/null @@ -1,126 +0,0 @@ -use std::{sync::Arc, time::Duration}; - -use anyhow::{Context, Result}; -use arti_client::{config::TorClientConfigBuilder, TorClient}; -use tokio::{ - sync::watch, - task::{JoinError, JoinSet}, -}; -use tokio_util::compat::FuturesAsyncReadCompatExt; -use tor_rtcompat::PreferredRuntime; -use tracing::{debug, error, info, warn}; - -use super::{system::SystemTcpStackRuntime, Config, TcpStackConfig}; - -#[derive(Debug)] -pub struct TorHandle { - shutdown: watch::Sender, - task: tokio::task::JoinHandle<()>, -} - -impl TorHandle { - pub async fn shutdown(self) -> Result<()> { - let _ = self.shutdown.send(true); - match self.task.await { - Ok(()) => Ok(()), - Err(err) if err.is_cancelled() => Ok(()), - Err(err) => Err(join_error(err)), - } - } -} - -pub async fn bootstrap_client(config: &Config) -> Result>> { - let builder = - TorClientConfigBuilder::from_directories(&config.arti.state_dir, &config.arti.cache_dir); - let tor_config = builder.build().context("failed to build arti config")?; - let tor_client = TorClient::create_bootstrapped(tor_config) - .await - .context("failed to bootstrap arti client")?; - Ok(Arc::new(tor_client)) -} - -pub async fn spawn(config: Config) -> Result { - let tor_client = bootstrap_client(&config).await?; - spawn_with_client(config, tor_client).await -} - -pub async fn spawn_with_client( - config: Config, - tor_client: Arc>, -) -> Result { - let (shutdown_tx, mut shutdown_rx) = watch::channel(false); - let task = match config.tcp_stack.clone() { - TcpStackConfig::System(system_config) => tokio::spawn(async move { - let stack = match SystemTcpStackRuntime::bind(&system_config).await { - Ok(stack) => stack, - Err(err) => { - error!(?err, "failed to bind system tcp stack listener"); - return; - } - }; - info!( - listen = %stack.local_addr(), - "system tcp stack listener bound for tor transparent proxy" - ); - - let mut connections = JoinSet::new(); - loop { - tokio::select! { - changed = shutdown_rx.changed() => { - match changed { - Ok(()) if *shutdown_rx.borrow() => break, - Ok(()) => continue, - Err(_) => break, - } - } - Some(res) = connections.join_next(), if !connections.is_empty() => { - match res { - Ok(Ok(())) => {} - Ok(Err(err)) => warn!(?err, "transparent proxy task failed"), - Err(err) => warn!(?err, "transparent proxy task panicked"), - } - } - accepted = stack.accept() => { - let (mut inbound, original_dst) = match accepted { - Ok(pair) => pair, - Err(err) => { - warn!(?err, "failed to accept transparent tcp connection"); - tokio::time::sleep(Duration::from_millis(50)).await; - continue; - } - }; - - let tor_client = tor_client.clone(); - connections.spawn(async move { - debug!(%original_dst, "accepted transparent tcp connection"); - let tor_stream = tor_client - .connect((original_dst.ip().to_string(), original_dst.port())) - .await - .with_context(|| format!("failed to connect to {original_dst} over tor"))?; - let mut tor_stream = tor_stream.compat(); - tokio::io::copy_bidirectional(&mut inbound, &mut tor_stream) - .await - .with_context(|| format!("failed to bridge tor stream for {original_dst}"))?; - Result::<()>::Ok(()) - }); - } - } - } - - connections.abort_all(); - while let Some(res) = connections.join_next().await { - match res { - Ok(Ok(())) => {} - Ok(Err(err)) => debug!(?err, "transparent proxy task failed during shutdown"), - Err(err) => debug!(?err, "transparent proxy task exited during shutdown"), - } - } - }), - }; - - Ok(TorHandle { shutdown: shutdown_tx, task }) -} - -fn join_error(err: JoinError) -> anyhow::Error { - anyhow::anyhow!("tor runtime task failed: {err}") -} diff --git a/burrow/src/tor/system.rs b/burrow/src/tor/system.rs deleted file mode 100644 index 74f8157..0000000 --- a/burrow/src/tor/system.rs +++ /dev/null @@ -1,143 +0,0 @@ -use std::net::SocketAddr; - -use anyhow::{Context, Result}; -use tokio::net::{TcpListener, TcpStream}; - -use super::SystemTcpStackConfig; - -pub struct SystemTcpStackRuntime { - listener: TcpListener, -} - -impl SystemTcpStackRuntime { - pub async fn bind(config: &SystemTcpStackConfig) -> Result { - let listener = TcpListener::bind(&config.listen) - .await - .with_context(|| format!("failed to bind transparent listener on {}", config.listen))?; - Ok(Self { listener }) - } - - pub fn local_addr(&self) -> SocketAddr { - self.listener - .local_addr() - .expect("listener should always have a local address") - } - - pub async fn accept(&self) -> Result<(TcpStream, SocketAddr)> { - let (stream, _) = self - .listener - .accept() - .await - .context("failed to accept transparent listener connection")?; - let original_dst = original_destination(&stream)?; - Ok((stream, original_dst)) - } -} - -#[cfg(target_os = "linux")] -fn original_destination(stream: &TcpStream) -> Result { - use std::{ - mem::{size_of, MaybeUninit}, - os::fd::AsRawFd, - }; - - let level = if stream.local_addr()?.is_ipv6() { - libc::SOL_IPV6 - } else { - libc::SOL_IP - }; - - let mut addr = MaybeUninit::::zeroed(); - let mut len = size_of::() as libc::socklen_t; - let rc = unsafe { - libc::getsockopt( - stream.as_raw_fd(), - level, - 80, - addr.as_mut_ptr().cast(), - &mut len, - ) - }; - if rc != 0 { - return Err(std::io::Error::last_os_error()).context("SO_ORIGINAL_DST lookup failed"); - } - - socket_addr_from_storage(unsafe { &addr.assume_init() }, len as usize) -} - -#[cfg(not(target_os = "linux"))] -fn original_destination(_stream: &TcpStream) -> Result { - anyhow::bail!("system tcp stack transparent destination lookup is only implemented on linux") -} - -fn socket_addr_from_storage(addr: &libc::sockaddr_storage, len: usize) -> Result { - use std::net::{Ipv4Addr, Ipv6Addr, SocketAddrV4, SocketAddrV6}; - - if len < std::mem::size_of::() { - anyhow::bail!("socket address buffer was too short"); - } - - match addr.ss_family as i32 { - libc::AF_INET => { - let addr_in = unsafe { *(addr as *const _ as *const libc::sockaddr_in) }; - let ip = Ipv4Addr::from(u32::from_be(addr_in.sin_addr.s_addr)); - let port = u16::from_be(addr_in.sin_port); - Ok(SocketAddr::V4(SocketAddrV4::new(ip, port))) - } - libc::AF_INET6 => { - let addr_in = unsafe { *(addr as *const _ as *const libc::sockaddr_in6) }; - let ip = Ipv6Addr::from(addr_in.sin6_addr.s6_addr); - let port = u16::from_be(addr_in.sin6_port); - Ok(SocketAddr::V6(SocketAddrV6::new( - ip, - port, - addr_in.sin6_flowinfo, - addr_in.sin6_scope_id, - ))) - } - family => anyhow::bail!("unsupported socket address family {family}"), - } -} - -#[cfg(all(test, target_os = "linux"))] -mod tests { - use super::*; - use std::{ - mem::size_of, - net::{Ipv4Addr, Ipv6Addr, SocketAddrV4, SocketAddrV6}, - }; - - #[test] - fn parses_ipv4_socket_addr() { - let mut storage = unsafe { std::mem::zeroed::() }; - let addr_in = unsafe { &mut *(&mut storage as *mut _ as *mut libc::sockaddr_in) }; - addr_in.sin_family = libc::AF_INET as libc::sa_family_t; - addr_in.sin_port = u16::to_be(9040); - addr_in.sin_addr = libc::in_addr { - s_addr: u32::to_be(u32::from(Ipv4Addr::new(127, 0, 0, 1))), - }; - - let parsed = socket_addr_from_storage(&storage, size_of::()).unwrap(); - assert_eq!( - parsed, - SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::LOCALHOST, 9040)) - ); - } - - #[test] - fn parses_ipv6_socket_addr() { - let mut storage = unsafe { std::mem::zeroed::() }; - let addr_in = unsafe { &mut *(&mut storage as *mut _ as *mut libc::sockaddr_in6) }; - addr_in.sin6_family = libc::AF_INET6 as libc::sa_family_t; - addr_in.sin6_port = u16::to_be(9150); - addr_in.sin6_addr = libc::in6_addr { - s6_addr: Ipv6Addr::LOCALHOST.octets(), - }; - - let parsed = socket_addr_from_storage(&storage, size_of::()).unwrap(); - assert_eq!( - parsed, - SocketAddr::V6(SocketAddrV6::new(Ipv6Addr::LOCALHOST, 9150, 0, 0)) - ); - } -} diff --git a/burrow/src/tracing.rs b/burrow/src/tracing.rs index 8a245ef..861b41f 100644 --- a/burrow/src/tracing.rs +++ b/burrow/src/tracing.rs @@ -3,7 +3,8 @@ use std::sync::Once; use tracing::{error, info}; use tracing_subscriber::{ layer::{Layer, SubscriberExt}, - EnvFilter, Registry, + EnvFilter, + Registry, }; static TRACING: Once = Once::new(); @@ -14,55 +15,36 @@ pub fn initialize() { error!("Failed to initialize LogTracer: {}", e); } - let make_stderr = || { + #[cfg(target_os = "windows")] + let system_log = Some(tracing_subscriber::fmt::layer()); + + #[cfg(target_os = "linux")] + let system_log = match tracing_journald::layer() { + Ok(layer) => Some(layer), + Err(e) => { + if e.kind() != std::io::ErrorKind::NotFound { + error!("Failed to initialize journald: {}", e); + } + None + } + }; + + #[cfg(target_vendor = "apple")] + let system_log = Some(tracing_oslog::OsLogger::new( + "com.hackclub.burrow", + "tracing", + )); + + let stderr = (console::user_attended_stderr() || system_log.is_none()).then(|| { tracing_subscriber::fmt::layer() .with_level(true) .with_writer(std::io::stderr) .with_line_number(true) .compact() .with_filter(EnvFilter::from_default_env()) - }; + }); - #[cfg(target_os = "windows")] - let subscriber = { - let system_log = Some(tracing_subscriber::fmt::layer()); - let stderr = (console::user_attended_stderr() || system_log.is_none()).then(make_stderr); - Registry::default().with(stderr).with(system_log) - }; - - #[cfg(target_os = "linux")] - let subscriber = { - let system_log = match tracing_journald::layer() { - Ok(layer) => Some(layer), - Err(e) => { - if e.kind() != std::io::ErrorKind::NotFound { - error!("Failed to initialize journald: {}", e); - } - None - } - }; - let stderr = (console::user_attended_stderr() || system_log.is_none()).then(make_stderr); - Registry::default().with(stderr).with(system_log) - }; - - #[cfg(target_os = "macos")] - let subscriber = { - // `tracing_oslog` is crashing under Tokio/h2 span churn in the host daemon on - // current macOS. Keep logging on stderr by default and allow opt-in OSLog - // only when explicitly requested for local debugging. - let enable_oslog = matches!( - std::env::var("BURROW_ENABLE_OSLOG").as_deref(), - Ok("1" | "true" | "TRUE" | "yes" | "YES") - ); - let system_log = enable_oslog.then(|| { - tracing_oslog::OsLogger::new("com.hackclub.burrow", "tracing") - }); - let stderr = (console::user_attended_stderr() || system_log.is_none()).then(make_stderr); - Registry::default().with(stderr).with(system_log) - }; - - #[cfg(not(any(target_os = "windows", target_os = "linux", target_os = "macos")))] - let subscriber = Registry::default().with(Some(make_stderr())); + let subscriber = Registry::default().with(stderr).with(system_log); #[cfg(feature = "tokio-console")] let subscriber = subscriber.with( diff --git a/burrow/src/usernet/mod.rs b/burrow/src/usernet/mod.rs deleted file mode 100644 index 12de810..0000000 --- a/burrow/src/usernet/mod.rs +++ /dev/null @@ -1,935 +0,0 @@ -use std::{ - collections::HashMap, - env, - net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}, - os::fd::{AsRawFd, FromRawFd, RawFd}, - os::unix::net::UnixStream as StdUnixStream, - os::unix::process::ExitStatusExt, - path::{Path, PathBuf}, - process::{Command as StdCommand, ExitStatus}, - str, - sync::Arc, - time::Duration, -}; - -use anyhow::{anyhow, bail, Context, Result}; -use clap::ValueEnum; -use futures::{SinkExt, StreamExt}; -use ipnetwork::IpNetwork; -use netstack_smoltcp::{ - StackBuilder, TcpListener as StackTcpListener, TcpStream as StackTcpStream, - UdpSocket as StackUdpSocket, -}; -use nix::{ - cmsg_space, - fcntl::{fcntl, FcntlArg, FdFlag}, - sys::socket::{recvmsg, sendmsg, ControlMessage, ControlMessageOwned, MsgFlags}, -}; -use serde::{Deserialize, Serialize}; -use tokio::{ - io::copy_bidirectional, - net::{TcpStream, UdpSocket}, - process::{Child, Command}, - sync::{mpsc, Mutex, RwLock}, - task::JoinSet, -}; -use tokio_util::compat::FuturesAsyncReadCompatExt; -use tracing::{debug, warn}; -use tun::{tokio::TunInterface as TokioTunInterface, TunOptions}; - -use crate::{ - tor::{bootstrap_client, dns::build_response as build_tor_dns_response, Config as TorConfig}, - wireguard::{Config as WireGuardConfig, Interface as WireGuardInterface}, -}; - -const INNER_ENV: &str = "BURROW_USERNET_INNER"; -const INNER_CONTROL_FD_ENV: &str = "BURROW_USERNET_CONTROL_FD"; -const INNER_TUN_CONFIG_ENV: &str = "BURROW_USERNET_TUN_CONFIG"; -const DEFAULT_MTU: u32 = 1500; -const DEFAULT_TUN_V4: &str = "100.64.0.2/24"; -const DEFAULT_TUN_V6: &str = "fd00:64::2/64"; -const UDP_IDLE_TIMEOUT: Duration = Duration::from_secs(30); -const READY_ACK: &[u8; 1] = b"1"; - -#[derive(Clone, Debug, Eq, PartialEq, ValueEnum)] -pub enum ExecBackendKind { - Direct, - Tor, - Wireguard, -} - -impl ExecBackendKind { - fn cli_name(&self) -> &'static str { - match self { - Self::Direct => "direct", - Self::Tor => "tor", - Self::Wireguard => "wireguard", - } - } -} - -#[derive(Clone, Debug)] -pub struct ExecInvocation { - pub backend: ExecBackendKind, - pub payload_path: Option, - pub command: Vec, -} - -#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct DirectConfig { - #[serde(default)] - pub address: Vec, - #[serde(default)] - pub dns: Vec, - #[serde(default)] - pub mtu: Option, - #[serde(default)] - pub tun_name: Option, -} - -impl DirectConfig { - pub fn from_payload(payload: &[u8]) -> Result { - if payload.is_empty() { - return Ok(Self::default()); - } - - if let Ok(config) = serde_json::from_slice(payload) { - return Ok(config); - } - - let payload = str::from_utf8(payload).context("direct payload must be valid UTF-8")?; - toml::from_str(payload).context("failed to parse direct payload as JSON or TOML") - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -struct TunNetworkConfig { - tun_name: String, - addresses: Vec, - mtu: u32, -} - -enum PreparedBackend { - Socket { - backend: SocketBackend, - tun_config: TunNetworkConfig, - }, - Wireguard { - config: WireGuardConfig, - tun_config: TunNetworkConfig, - }, -} - -impl PreparedBackend { - fn tun_config(&self) -> &TunNetworkConfig { - match self { - Self::Socket { tun_config, .. } => tun_config, - Self::Wireguard { tun_config, .. } => tun_config, - } - } -} - -struct NamespaceChild { - child: Child, - control: StdUnixStream, -} - -#[derive(Clone)] -enum SocketBackend { - Direct, - Tor(Arc>), -} - -#[derive(Debug)] -struct UdpReply { - payload: Vec, - source: SocketAddr, - destination: SocketAddr, -} - -#[derive(Debug, Clone, Eq, Hash, PartialEq)] -struct UdpFlowKey { - local: SocketAddr, - remote: SocketAddr, -} - -pub async fn run_exec(invocation: ExecInvocation) -> Result { - if invocation.command.is_empty() { - bail!("exec requires a command to run"); - } - - if env::var_os(INNER_ENV).is_some() { - run_inner(invocation.command).await - } else { - run_supervisor(invocation).await - } -} - -async fn run_supervisor(invocation: ExecInvocation) -> Result { - let prepared = prepare_backend(&invocation).await?; - let mut child = spawn_namespaced_child(&invocation, prepared.tun_config())?; - let tun = child.receive_tun().await?; - - match prepared { - PreparedBackend::Socket { backend, .. } => run_socket_backend(backend, tun, child).await, - PreparedBackend::Wireguard { config, .. } => { - run_wireguard_backend(config, tun, child).await - } - } -} - -async fn prepare_backend(invocation: &ExecInvocation) -> Result { - match invocation.backend { - ExecBackendKind::Direct => { - let payload = read_optional_payload(invocation.payload_path.as_deref()).await?; - let config = DirectConfig::from_payload(&payload)?; - let tun_config = socket_tun_config( - &config.address, - config.mtu, - config.tun_name.as_deref(), - "burrow-direct", - )?; - Ok(PreparedBackend::Socket { - backend: SocketBackend::Direct, - tun_config, - }) - } - ExecBackendKind::Tor => { - let payload = read_required_payload(invocation.payload_path.as_deref(), "tor").await?; - let mut config = TorConfig::from_payload(&payload)?; - let (state_dir, cache_dir) = config.runtime_dirs(std::process::id() as i32); - config.arti.state_dir = state_dir; - config.arti.cache_dir = cache_dir; - let tun_config = socket_tun_config( - &config.address, - config.mtu, - config.tun_name.as_deref(), - "burrow-tor", - )?; - let tor_client = bootstrap_client(&config).await?; - Ok(PreparedBackend::Socket { - backend: SocketBackend::Tor(tor_client), - tun_config, - }) - } - ExecBackendKind::Wireguard => { - let payload = - read_required_payload(invocation.payload_path.as_deref(), "wireguard").await?; - let config = parse_wireguard_payload(&payload, invocation.payload_path.as_deref())?; - let tun_config = wireguard_tun_config(&config)?; - Ok(PreparedBackend::Wireguard { config, tun_config }) - } - } -} - -fn spawn_namespaced_child( - invocation: &ExecInvocation, - tun_config: &TunNetworkConfig, -) -> Result { - ensure_tool("unshare")?; - ensure_tool("ip")?; - - let (parent_control, child_control) = - StdUnixStream::pair().context("failed to create namespace control socket")?; - set_inheritable(child_control.as_raw_fd())?; - - let current_exe = env::current_exe().context("failed to locate current burrow binary")?; - let mut cmd = Command::new("unshare"); - cmd.args([ - "--user", - "--map-root-user", - "--net", - "--mount", - "--pid", - "--fork", - "--kill-child", - "--mount-proc", - ]); - cmd.env(INNER_ENV, "1"); - cmd.env(INNER_CONTROL_FD_ENV, child_control.as_raw_fd().to_string()); - cmd.env( - INNER_TUN_CONFIG_ENV, - serde_json::to_string(tun_config).context("failed to encode namespace tun config")?, - ); - cmd.arg(current_exe); - cmd.arg("exec"); - cmd.args(["--backend", invocation.backend.cli_name()]); - if let Some(payload_path) = &invocation.payload_path { - cmd.arg("--payload"); - cmd.arg(payload_path); - } - cmd.arg("--"); - cmd.args(&invocation.command); - - let child = cmd - .spawn() - .context("failed to enter unshared Linux namespace")?; - drop(child_control); - - Ok(NamespaceChild { child, control: parent_control }) -} - -async fn run_inner(command: Vec) -> Result { - run_ip(["link", "set", "lo", "up"])?; - let tun_config = read_inner_tun_config()?; - let tun = open_tun_device(&tun_config)?; - configure_tun_addresses(&tun, &tun_config.addresses, tun_config.mtu)?; - let name = tun.name().context("failed to retrieve tun device name")?; - run_ip(["link", "set", "dev", &name, "up"])?; - install_default_routes(&name, &tun_config.addresses)?; - - let control_fd = env::var(INNER_CONTROL_FD_ENV) - .context("missing namespace control fd")? - .parse::() - .context("invalid namespace control fd")?; - send_tun_fd(control_fd, tun.as_raw_fd())?; - await_parent_ready(control_fd).await?; - drop(tun); - - let status = spawn_child(&command).await?; - child_exit_code(status) -} - -impl NamespaceChild { - async fn receive_tun(&mut self) -> Result { - let control = self - .control - .try_clone() - .context("failed to clone namespace control socket")?; - let fd = tokio::task::spawn_blocking(move || recv_tun_fd(&control)) - .await - .context("failed to join namespace tun receive task")??; - tokio_tun_from_fd(fd) - } - - async fn signal_ready(&self) -> Result<()> { - let mut control = self - .control - .try_clone() - .context("failed to clone namespace control socket")?; - tokio::task::spawn_blocking(move || -> Result<()> { - std::io::Write::write_all(&mut control, READY_ACK) - .context("failed to acknowledge namespace readiness")?; - Ok(()) - }) - .await - .context("failed to join namespace ready task")??; - Ok(()) - } - - async fn wait(mut self) -> Result { - self.child - .wait() - .await - .context("failed to wait for namespace child") - } -} - -async fn run_socket_backend( - backend: SocketBackend, - tun: TokioTunInterface, - child: NamespaceChild, -) -> Result { - let tun = Arc::new(tun); - let (stack, runner, udp_socket, tcp_listener) = StackBuilder::default() - .stack_buffer_size(1024) - .udp_buffer_size(1024) - .tcp_buffer_size(1024) - .enable_udp(true) - .enable_tcp(true) - .enable_icmp(true) - .build() - .context("failed to build userspace netstack")?; - let (mut stack_sink, mut stack_stream) = stack.split(); - - let mut tasks = JoinSet::new(); - if let Some(runner) = runner { - tasks.spawn(async move { runner.await.map_err(anyhow::Error::from) }); - } - - { - let tun = tun.clone(); - tasks.spawn(async move { - let mut buf = vec![0u8; 65_535]; - loop { - let len = tun - .recv(&mut buf) - .await - .context("failed to read packet from tun")?; - if len == 0 { - continue; - } - stack_sink - .send(buf[..len].to_vec()) - .await - .context("failed to send tun packet into userspace stack")?; - } - #[allow(unreachable_code)] - Result::<()>::Ok(()) - }); - } - - { - let tun = tun.clone(); - tasks.spawn(async move { - while let Some(packet) = stack_stream.next().await { - let packet = packet.context("failed to receive packet from userspace stack")?; - tun.send(&packet) - .await - .context("failed to write userspace stack packet to tun")?; - } - Result::<()>::Ok(()) - }); - } - - if let Some(tcp_listener) = tcp_listener { - let backend = backend.clone(); - tasks.spawn(async move { tcp_dispatch_loop(tcp_listener, backend).await }); - } - - if let Some(udp_socket) = udp_socket { - tasks.spawn(async move { udp_dispatch_loop(udp_socket, backend).await }); - } - - child.signal_ready().await?; - let status = child.wait().await?; - - tasks.abort_all(); - while let Some(joined) = tasks.join_next().await { - match joined { - Ok(Ok(())) => {} - Ok(Err(err)) => debug!(?err, "usernet background task exited with error"), - Err(err) if err.is_cancelled() => {} - Err(err) => debug!(?err, "usernet background task panicked"), - } - } - - child_exit_code(status) -} - -async fn run_wireguard_backend( - config: WireGuardConfig, - tun: TokioTunInterface, - child: NamespaceChild, -) -> Result { - let interface: WireGuardInterface = config.try_into()?; - interface.set_tun(tun).await; - let interface = Arc::new(interface); - let runner = { - let interface = interface.clone(); - tokio::spawn(async move { interface.run().await }) - }; - - child.signal_ready().await?; - let status = child.wait().await?; - - interface.remove_tun().await; - match runner.await { - Ok(Ok(())) => {} - Ok(Err(err)) => debug!(?err, "wireguard exec runtime exited with error"), - Err(err) if err.is_cancelled() => {} - Err(err) => debug!(?err, "wireguard exec runtime panicked"), - } - - child_exit_code(status) -} - -async fn tcp_dispatch_loop(mut listener: StackTcpListener, backend: SocketBackend) -> Result<()> { - let mut tasks = JoinSet::new(); - loop { - tokio::select! { - Some(result) = tasks.join_next(), if !tasks.is_empty() => { - match result { - Ok(Ok(())) => {} - Ok(Err(err)) => warn!(?err, "tcp bridge task failed"), - Err(err) if err.is_cancelled() => {} - Err(err) => warn!(?err, "tcp bridge task panicked"), - } - } - next = listener.next() => match next { - Some((stream, local_addr, remote_addr)) => { - debug!(%local_addr, %remote_addr, "accepted userspace tcp stream"); - let backend = backend.clone(); - tasks.spawn(async move { - bridge_tcp(backend, stream, local_addr, remote_addr).await - }); - } - None => break, - } - } - } - - tasks.abort_all(); - while let Some(result) = tasks.join_next().await { - match result { - Ok(Ok(())) => {} - Ok(Err(err)) => debug!(?err, "tcp bridge task exited during shutdown"), - Err(err) if err.is_cancelled() => {} - Err(err) => debug!(?err, "tcp bridge task panicked during shutdown"), - } - } - Ok(()) -} - -async fn bridge_tcp( - backend: SocketBackend, - mut inbound: StackTcpStream, - _local_addr: SocketAddr, - remote_addr: SocketAddr, -) -> Result<()> { - match backend { - SocketBackend::Direct => { - debug!(%remote_addr, "dialing direct outbound tcp"); - let mut outbound = TcpStream::connect(remote_addr) - .await - .with_context(|| format!("failed to connect to {remote_addr}"))?; - copy_bidirectional(&mut inbound, &mut outbound) - .await - .with_context(|| format!("failed to bridge tcp stream for {remote_addr}"))?; - } - SocketBackend::Tor(tor_client) => { - debug!(%remote_addr, "dialing tor outbound tcp"); - let tor_stream = tor_client - .connect((remote_addr.ip().to_string(), remote_addr.port())) - .await - .with_context(|| format!("failed to connect to {remote_addr} over tor"))?; - let mut tor_stream = tor_stream.compat(); - copy_bidirectional(&mut inbound, &mut tor_stream) - .await - .with_context(|| format!("failed to bridge tor stream for {remote_addr}"))?; - } - } - Ok(()) -} - -async fn udp_dispatch_loop(socket: StackUdpSocket, backend: SocketBackend) -> Result<()> { - let (mut udp_reader, mut udp_writer) = socket.split(); - let (reply_tx, mut reply_rx) = mpsc::channel::(128); - let direct_sessions = Arc::new(Mutex::new( - HashMap::>>::new(), - )); - let mut session_tasks = JoinSet::new(); - - loop { - tokio::select! { - Some(result) = session_tasks.join_next(), if !session_tasks.is_empty() => { - match result { - Ok(Ok(())) => {} - Ok(Err(err)) => warn!(?err, "udp session task failed"), - Err(err) if err.is_cancelled() => {} - Err(err) => warn!(?err, "udp session task panicked"), - } - } - maybe_reply = reply_rx.recv() => match maybe_reply { - Some(reply) => { - udp_writer - .send((reply.payload, reply.source, reply.destination)) - .await - .context("failed to write udp reply into userspace stack")?; - } - None => break, - }, - maybe_datagram = udp_reader.next() => match maybe_datagram { - Some((payload, local_addr, remote_addr)) => { - match &backend { - SocketBackend::Direct => { - dispatch_direct_udp( - payload, - local_addr, - remote_addr, - reply_tx.clone(), - direct_sessions.clone(), - &mut session_tasks, - ).await?; - } - SocketBackend::Tor(tor_client) => { - if remote_addr.port() != 53 { - debug!(%remote_addr, "dropping non-DNS UDP datagram for tor backend"); - continue; - } - let response = build_tor_dns_response(&payload, tor_client.as_ref()).await?; - reply_tx - .send(UdpReply { - payload: response, - source: remote_addr, - destination: local_addr, - }) - .await - .context("failed to enqueue tor dns response")?; - } - } - } - None => break, - } - } - } - - session_tasks.abort_all(); - while let Some(result) = session_tasks.join_next().await { - match result { - Ok(Ok(())) => {} - Ok(Err(err)) => debug!(?err, "udp session task exited during shutdown"), - Err(err) if err.is_cancelled() => {} - Err(err) => debug!(?err, "udp session task panicked during shutdown"), - } - } - Ok(()) -} - -async fn dispatch_direct_udp( - payload: Vec, - local_addr: SocketAddr, - remote_addr: SocketAddr, - reply_tx: mpsc::Sender, - sessions: Arc>>>>, - session_tasks: &mut JoinSet>, -) -> Result<()> { - let key = UdpFlowKey { - local: local_addr, - remote: remote_addr, - }; - let existing = { sessions.lock().await.get(&key).cloned() }; - if let Some(sender) = existing { - if sender.send(payload.clone()).await.is_ok() { - return Ok(()); - } - sessions.lock().await.remove(&key); - } - - let (tx, rx) = mpsc::channel::>(32); - tx.send(payload) - .await - .context("failed to enqueue outbound udp payload")?; - sessions.lock().await.insert(key.clone(), tx); - - session_tasks.spawn(async move { run_direct_udp_session(key, rx, reply_tx, sessions).await }); - Ok(()) -} - -async fn run_direct_udp_session( - key: UdpFlowKey, - mut outbound_rx: mpsc::Receiver>, - reply_tx: mpsc::Sender, - sessions: Arc>>>>, -) -> Result<()> { - let bind_addr = match key.remote { - SocketAddr::V4(_) => SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0), - SocketAddr::V6(_) => SocketAddr::new(IpAddr::V6(Ipv6Addr::UNSPECIFIED), 0), - }; - let socket = UdpSocket::bind(bind_addr) - .await - .with_context(|| format!("failed to bind udp socket for {}", key.remote))?; - socket - .connect(key.remote) - .await - .with_context(|| format!("failed to connect udp socket to {}", key.remote))?; - - let mut buf = vec![0u8; 65_535]; - loop { - tokio::select! { - maybe_payload = outbound_rx.recv() => match maybe_payload { - Some(payload) => { - socket - .send(&payload) - .await - .with_context(|| format!("failed to send udp payload to {}", key.remote))?; - } - None => break, - }, - recv = tokio::time::timeout(UDP_IDLE_TIMEOUT, socket.recv(&mut buf)) => match recv { - Ok(Ok(len)) => { - reply_tx - .send(UdpReply { - payload: buf[..len].to_vec(), - source: key.remote, - destination: key.local, - }) - .await - .context("failed to enqueue inbound udp reply")?; - } - Ok(Err(err)) => return Err(err).with_context(|| format!("failed to receive udp response from {}", key.remote)), - Err(_) => break, - } - } - } - - sessions.lock().await.remove(&key); - Ok(()) -} - -fn wireguard_tun_config(config: &WireGuardConfig) -> Result { - parse_tun_config( - &config.interface.address, - config.interface.mtu, - Some("burrow-wireguard"), - ) -} - -fn socket_tun_config( - addresses: &[String], - mtu: Option, - tun_name: Option<&str>, - default_name: &str, -) -> Result { - let default_addresses; - let addresses = if addresses.is_empty() { - default_addresses = vec![DEFAULT_TUN_V4.to_string(), DEFAULT_TUN_V6.to_string()]; - default_addresses.as_slice() - } else { - addresses - }; - parse_tun_config(addresses, mtu, Some(tun_name.unwrap_or(default_name))) -} - -fn parse_tun_config( - addresses: &[String], - mtu: Option, - tun_name: Option<&str>, -) -> Result { - let addresses = addresses - .iter() - .map(|addr| { - addr.parse::() - .with_context(|| format!("invalid tunnel address '{addr}'")) - }) - .collect::>>()?; - - Ok(TunNetworkConfig { - tun_name: tun_name.unwrap_or("burrow-exec").to_string(), - addresses, - mtu: mtu.unwrap_or(DEFAULT_MTU), - }) -} - -fn open_tun_device(config: &TunNetworkConfig) -> Result { - let tun = TunOptions::new() - .name(&config.tun_name) - .no_pi(true) - .tun_excl(true) - .open() - .context("failed to create tun device")?; - Ok(tun.inner.into_inner()) -} - -fn tokio_tun_from_fd(fd: RawFd) -> Result { - let tun = unsafe { tun::TunInterface::from_raw_fd(fd) }; - TokioTunInterface::new(tun).context("failed to wrap tun fd in tokio interface") -} - -fn read_inner_tun_config() -> Result { - let raw = env::var(INNER_TUN_CONFIG_ENV).context("missing namespace tun config")?; - serde_json::from_str(&raw).context("invalid namespace tun config") -} - -fn configure_tun_addresses( - iface: &tun::TunInterface, - networks: &[IpNetwork], - mtu: u32, -) -> Result<()> { - for network in networks { - match network { - IpNetwork::V4(net) => { - iface.set_ipv4_addr(net.ip())?; - let netmask = prefix_to_netmask_v4(net.prefix()); - iface.set_netmask(netmask)?; - iface.set_broadcast_addr(broadcast_v4(net.ip(), netmask))?; - } - IpNetwork::V6(net) => iface.add_ipv6_addr(net.ip(), net.prefix())?, - } - } - iface.set_mtu(mtu as i32)?; - Ok(()) -} - -fn install_default_routes(name: &str, networks: &[IpNetwork]) -> Result<()> { - if networks - .iter() - .any(|network| matches!(network, IpNetwork::V4(_))) - { - run_ip(["route", "replace", "default", "dev", name])?; - } - if networks - .iter() - .any(|network| matches!(network, IpNetwork::V6(_))) - { - run_ip(["-6", "route", "replace", "default", "dev", name])?; - } - Ok(()) -} - -fn run_ip(args: [&str; N]) -> Result<()> { - let status = StdCommand::new("ip") - .args(args) - .status() - .context("failed to execute ip command")?; - if !status.success() { - bail!("ip {} failed with status {}", args.join(" "), status); - } - Ok(()) -} - -fn set_inheritable(fd: RawFd) -> Result<()> { - let flags = FdFlag::from_bits_truncate( - fcntl(fd, FcntlArg::F_GETFD).context("failed to query descriptor flags")?, - ); - let flags = flags & !FdFlag::FD_CLOEXEC; - fcntl(fd, FcntlArg::F_SETFD(flags)).context("failed to clear close-on-exec")?; - Ok(()) -} - -async fn await_parent_ready(control_fd: RawFd) -> Result<()> { - tokio::task::spawn_blocking(move || -> Result<()> { - let mut control = unsafe { StdUnixStream::from_raw_fd(control_fd) }; - let mut ack = [0u8; 1]; - std::io::Read::read_exact(&mut control, &mut ack) - .context("failed to read namespace ready ack")?; - if ack != *READY_ACK { - bail!("unexpected namespace ready ack"); - } - Ok(()) - }) - .await - .context("failed to join namespace ready wait task")??; - Ok(()) -} - -fn send_tun_fd(control_fd: RawFd, tun_fd: RawFd) -> Result<()> { - let buf = [0u8; 1]; - let iov = [std::io::IoSlice::new(&buf)]; - let fds = [tun_fd]; - sendmsg::<()>( - control_fd, - &iov, - &[ControlMessage::ScmRights(&fds)], - MsgFlags::empty(), - None, - ) - .context("failed to send tun fd to parent")?; - Ok(()) -} - -fn recv_tun_fd(control: &StdUnixStream) -> Result { - let mut buf = [0u8; 1]; - let mut iov = [std::io::IoSliceMut::new(&mut buf)]; - let mut cmsgspace = cmsg_space!([RawFd; 1]); - let msg = recvmsg::<()>( - control.as_raw_fd(), - &mut iov, - Some(&mut cmsgspace), - MsgFlags::empty(), - ) - .context("failed to receive tun fd from namespace child")?; - for cmsg in msg.cmsgs() { - if let ControlMessageOwned::ScmRights(fds) = cmsg { - if let Some(fd) = fds.first() { - return Ok(*fd); - } - } - } - bail!("namespace child did not send a tun fd") -} - -fn ensure_tool(tool: &str) -> Result<()> { - let status = StdCommand::new("sh") - .args(["-lc", &format!("command -v {tool} >/dev/null")]) - .status() - .with_context(|| format!("failed to probe required tool '{tool}'"))?; - if !status.success() { - bail!("required host tool '{tool}' is not available"); - } - Ok(()) -} - -async fn read_optional_payload(path: Option<&Path>) -> Result> { - match path { - Some(path) => tokio::fs::read(path) - .await - .with_context(|| format!("failed to read payload from {}", path.display())), - None => Ok(Vec::new()), - } -} - -async fn read_required_payload(path: Option<&Path>, backend: &str) -> Result> { - let path = path.ok_or_else(|| anyhow!("{backend} exec requires --payload"))?; - tokio::fs::read(path) - .await - .with_context(|| format!("failed to read payload from {}", path.display())) -} - -fn parse_wireguard_payload(payload: &[u8], path: Option<&Path>) -> Result { - let payload = str::from_utf8(payload).context("wireguard payload must be valid UTF-8")?; - if let Some(path) = path { - if let Some(ext) = path.extension().and_then(|ext| ext.to_str()) { - return WireGuardConfig::from_content_fmt(payload, ext); - } - } - - WireGuardConfig::from_toml(payload).or_else(|_| WireGuardConfig::from_ini(payload)) -} - -async fn spawn_child(command: &[String]) -> Result { - let mut cmd = Command::new(&command[0]); - if command.len() > 1 { - cmd.args(&command[1..]); - } - cmd.stdin(std::process::Stdio::inherit()); - cmd.stdout(std::process::Stdio::inherit()); - cmd.stderr(std::process::Stdio::inherit()); - cmd.kill_on_drop(true); - cmd.status() - .await - .with_context(|| format!("failed to spawn '{}'", command[0])) -} - -fn child_exit_code(status: ExitStatus) -> Result { - if let Some(code) = status.code() { - return Ok(code); - } - if let Some(signal) = status.signal() { - return Ok(128 + signal); - } - bail!("child process terminated without an exit code"); -} - -fn prefix_to_netmask_v4(prefix: u8) -> Ipv4Addr { - if prefix == 0 { - Ipv4Addr::new(0, 0, 0, 0) - } else { - let mask = (!0u32) << (32 - prefix); - Ipv4Addr::from(mask) - } -} - -fn broadcast_v4(ip: Ipv4Addr, netmask: Ipv4Addr) -> Ipv4Addr { - let ip_u32 = u32::from(ip); - let mask = u32::from(netmask); - Ipv4Addr::from(ip_u32 | !mask) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn parses_direct_json_payload() { - let payload = br#"{"address":["10.0.0.2/24"],"mtu":1400,"tun_name":"burrow0"}"#; - let config = DirectConfig::from_payload(payload).unwrap(); - assert_eq!(config.address, vec!["10.0.0.2/24"]); - assert_eq!(config.mtu, Some(1400)); - assert_eq!(config.tun_name.as_deref(), Some("burrow0")); - } - - #[test] - fn socket_tun_config_uses_dual_stack_defaults() { - let config = socket_tun_config(&[], None, None, "burrow-test").unwrap(); - assert_eq!(config.tun_name, "burrow-test"); - assert!(config - .addresses - .iter() - .any(|network| matches!(network, IpNetwork::V4(_)))); - assert!(config - .addresses - .iter() - .any(|network| matches!(network, IpNetwork::V6(_)))); - } -} diff --git a/burrow/src/wireguard/config.rs b/burrow/src/wireguard/config.rs index 5766675..ed7b3cd 100644 --- a/burrow/src/wireguard/config.rs +++ b/burrow/src/wireguard/config.rs @@ -3,12 +3,9 @@ use std::{net::ToSocketAddrs, str::FromStr}; use anyhow::{anyhow, Error, Result}; use base64::{engine::general_purpose, Engine}; use fehler::throws; -use ini::{Ini, Properties}; use ip_network::IpNetwork; -use serde::{Deserialize, Serialize}; use x25519_dalek::{PublicKey, StaticSecret}; -use super::inifield::IniField; use crate::wireguard::{Interface as WgInterface, Peer as WgPeer}; #[throws] @@ -34,7 +31,6 @@ fn parse_public_key(string: &str) -> PublicKey { /// A raw version of Peer Config that can be used later to reflect configuration files. /// This should be later converted to a `WgPeer`. /// Refers to https://github.com/pirate/wireguard-docs?tab=readme-ov-file#overview -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Peer { pub public_key: String, pub preshared_key: Option, @@ -44,18 +40,15 @@ pub struct Peer { pub name: Option, } -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Interface { pub private_key: String, pub address: Vec, - pub listen_port: Option, + pub listen_port: u32, pub dns: Vec, pub mtu: Option, } -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct Config { - #[serde(rename = "Peer")] pub peers: Vec, pub interface: Interface, // Support for multiple interfaces? } @@ -102,7 +95,7 @@ impl Default for Config { interface: Interface { private_key: "OEPVdomeLTxTIBvv3TYsJRge0Hp9NMiY0sIrhT8OWG8=".into(), address: vec!["10.13.13.2/24".into()], - listen_port: Some(51820), + listen_port: 51820, dns: Default::default(), mtu: Default::default(), }, @@ -117,83 +110,3 @@ impl Default for Config { } } } - -fn props_get(props: &Properties, key: &str) -> Result -where - T: TryFrom, -{ - IniField::try_from(props.get(key))?.try_into() -} - -impl TryFrom<&Properties> for Interface { - type Error = anyhow::Error; - - fn try_from(props: &Properties) -> Result { - Ok(Self { - private_key: props_get(props, "PrivateKey")?, - address: props_get(props, "Address")?, - listen_port: props_get(props, "ListenPort")?, - dns: props_get(props, "DNS")?, - mtu: props_get(props, "MTU")?, - }) - } -} - -impl TryFrom<&Properties> for Peer { - type Error = anyhow::Error; - - fn try_from(props: &Properties) -> Result { - Ok(Self { - public_key: props_get(props, "PublicKey")?, - preshared_key: props_get(props, "PresharedKey")?, - allowed_ips: props_get(props, "AllowedIPs")?, - endpoint: props_get(props, "Endpoint")?, - persistent_keepalive: props_get(props, "PersistentKeepalive")?, - name: props_get(props, "Name")?, - }) - } -} - -impl Config { - pub fn from_toml(toml: &str) -> Result { - toml::from_str(toml).map_err(Into::into) - } - - pub fn from_ini(ini: &str) -> Result { - let ini = Ini::load_from_str(ini)?; - let interface = ini - .section(Some("Interface")) - .ok_or(anyhow!("Interface section not found"))?; - let peers = ini.section_all(Some("Peer")); - Ok(Self { - interface: Interface::try_from(interface)?, - peers: peers - .into_iter() - .map(|v| Peer::try_from(v)) - .collect::>>()?, - }) - } - - pub fn from_content_fmt(content: &str, fmt: &str) -> Result { - match fmt { - "toml" => Self::from_toml(content), - "ini" | "conf" => Self::from_ini(content), - _ => Err(anyhow::anyhow!("Unsupported format: {}", fmt)), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn tst_config_toml() { - let cfig = Config::default(); - let toml = toml::to_string(&cfig).unwrap(); - println!("{}", &toml); - insta::assert_snapshot!(toml); - let cfig2: Config = toml::from_str(&toml).unwrap(); - assert_eq!(cfig, cfig2); - } -} diff --git a/burrow/src/wireguard/iface.rs b/burrow/src/wireguard/iface.rs index 5b61861..620c96c 100755 --- a/burrow/src/wireguard/iface.rs +++ b/burrow/src/wireguard/iface.rs @@ -1,26 +1,20 @@ -use std::{net::IpAddr, ops::Deref, sync::Arc}; +use std::{net::IpAddr, sync::Arc}; use anyhow::Error; use fehler::throws; use futures::future::join_all; use ip_network_table::IpNetworkTable; -use tokio::sync::{Notify, RwLock}; +use tokio::sync::RwLock; use tracing::{debug, error}; use tun::tokio::TunInterface; use super::{noise::Tunnel, Peer, PeerPcb}; -pub struct IndexedPcbs { +struct IndexedPcbs { pcbs: Vec>, allowed_ips: IpNetworkTable, } -impl Default for IndexedPcbs { - fn default() -> Self { - Self::new() - } -} - impl IndexedPcbs { pub fn new() -> Self { Self { @@ -52,21 +46,9 @@ impl FromIterator for IndexedPcbs { } } -enum IfaceStatus { - Running, - Idle, -} - pub struct Interface { - pub tun: Arc>>, - pub pcbs: Arc, - status: Arc>, - stop_notifier: Arc, -} - -async fn is_running(status: Arc>) -> bool { - let st = status.read().await; - matches!(st.deref(), IfaceStatus::Running) + tun: Option>>, + pcbs: Arc, } impl Interface { @@ -78,77 +60,49 @@ impl Interface { .collect::>()?; let pcbs = Arc::new(pcbs); - Self { - pcbs, - tun: Arc::new(RwLock::new(None)), - status: Arc::new(RwLock::new(IfaceStatus::Idle)), - stop_notifier: Arc::new(Notify::new()), - } + Self { pcbs, tun: None } } - pub async fn set_tun(&self, tun: TunInterface) { - debug!("Setting tun interface"); - self.tun.write().await.replace(tun); - let mut st = self.status.write().await; - *st = IfaceStatus::Running; - } - - pub async fn set_tun_ref(&mut self, tun: Arc>>) { - self.tun = tun; - let mut st = self.status.write().await; - *st = IfaceStatus::Running; - } - - pub fn get_tun(&self) -> Arc>> { - self.tun.clone() - } - - pub async fn remove_tun(&self) { - let mut st = self.status.write().await; - self.stop_notifier.notify_waiters(); - *st = IfaceStatus::Idle; + pub fn set_tun(&mut self, tun: Arc>) { + self.tun = Some(tun); } pub async fn run(&self) -> anyhow::Result<()> { let pcbs = self.pcbs.clone(); - let tun = self.tun.clone(); - let status = self.status.clone(); - let stop_notifier = self.stop_notifier.clone(); + let tun = self + .tun + .clone() + .ok_or(anyhow::anyhow!("tun interface does not exist"))?; log::info!("Starting interface"); let outgoing = async move { - while is_running(status.clone()).await { + loop { let mut buf = [0u8; 3000]; let src = { - let t = tun.read().await; - let Some(_tun) = t.as_ref() else { - continue; + let src = match tun.read().await.recv(&mut buf[..]).await { + Ok(len) => &buf[..len], + Err(e) => { + error!("Failed to read from interface: {}", e); + continue + } }; - tokio::select! { - _ = stop_notifier.notified() => continue, - pkg = _tun.recv(&mut buf[..]) => match pkg { - Ok(len) => &buf[..len], - Err(e) => { - error!("Failed to read from interface: {}", e); - continue - } - }, - } + debug!("Read {} bytes from interface", src.len()); + src }; let dst_addr = match Tunnel::dst_address(src) { Some(addr) => addr, None => { debug!("No destination found"); - continue; + continue } }; debug!("Routing packet to {}", dst_addr); let Some(idx) = pcbs.find(dst_addr) else { - continue; + continue }; debug!("Found peer:{}", idx); @@ -160,14 +114,17 @@ impl Interface { } Err(e) => { log::error!("Failed to send packet {}", e); - continue; + continue } }; } }; let mut tsks = vec![]; - let tun = self.tun.clone(); + let tun = self + .tun + .clone() + .ok_or(anyhow::anyhow!("tun interface does not exist"))?; let outgoing = tokio::task::spawn(outgoing); tsks.push(outgoing); debug!("preparing to spawn read tasks"); @@ -181,7 +138,7 @@ impl Interface { let main_tsk = async move { if let Err(e) = pcb.open_if_closed().await { log::error!("failed to open pcb: {}", e); - return; + return } let r2 = pcb.run(tun).await; if let Err(e) = r2 { @@ -192,25 +149,23 @@ impl Interface { }; let pcb = pcbs.pcbs[i].clone(); - let status = self.status.clone(); let update_timers_tsk = async move { let mut buf = [0u8; 65535]; - while is_running(status.clone()).await { + loop { tokio::time::sleep(tokio::time::Duration::from_millis(250)).await; match pcb.update_timers(&mut buf).await { Ok(..) => (), Err(e) => { error!("Failed to update timers: {}", e); - return; + return } } } }; let pcb = pcbs.pcbs[i].clone(); - let status = self.status.clone(); let reset_rate_limiter_tsk = async move { - while is_running(status.clone()).await { + loop { tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; pcb.reset_rate_limiter().await; } diff --git a/burrow/src/wireguard/inifield.rs b/burrow/src/wireguard/inifield.rs deleted file mode 100644 index 946868d..0000000 --- a/burrow/src/wireguard/inifield.rs +++ /dev/null @@ -1,81 +0,0 @@ -use std::str::FromStr; - -use anyhow::{Error, Result}; - -pub struct IniField(String); - -impl FromStr for IniField { - type Err = Error; - - fn from_str(s: &str) -> Result { - Ok(Self(s.to_string())) - } -} - -impl TryFrom for Vec { - type Error = Error; - - fn try_from(field: IniField) -> Result { - Ok(field.0.split(',').map(|s| s.trim().to_string()).collect()) - } -} - -impl TryFrom for u32 { - type Error = Error; - - fn try_from(value: IniField) -> Result { - value.0.parse().map_err(Error::from) - } -} - -impl TryFrom for Option { - type Error = Error; - - fn try_from(value: IniField) -> Result { - if value.0.is_empty() { - Ok(None) - } else { - value.0.parse().map(Some).map_err(Error::from) - } - } -} - -impl TryFrom for String { - type Error = Error; - - fn try_from(value: IniField) -> Result { - Ok(value.0) - } -} - -impl TryFrom for Option { - type Error = Error; - - fn try_from(value: IniField) -> Result { - if value.0.is_empty() { - Ok(None) - } else { - Ok(Some(value.0)) - } - } -} - -impl TryFrom> for IniField -where - T: ToString, -{ - type Error = Error; - - fn try_from(value: Option) -> Result { - Ok(match value { - Some(v) => Self(v.to_string()), - None => Self(String::new()), - }) - } -} - -impl IniField { - fn new(value: &str) -> Self { - Self(value.to_string()) - } -} diff --git a/burrow/src/wireguard/mod.rs b/burrow/src/wireguard/mod.rs index cfb4585..15563fb 100755 --- a/burrow/src/wireguard/mod.rs +++ b/burrow/src/wireguard/mod.rs @@ -1,6 +1,5 @@ -pub mod config; +mod config; mod iface; -mod inifield; mod noise; mod pcb; mod peer; diff --git a/burrow/src/wireguard/noise/handshake.rs b/burrow/src/wireguard/noise/handshake.rs index 65136bc..2ec0c6a 100755 --- a/burrow/src/wireguard/noise/handshake.rs +++ b/burrow/src/wireguard/noise/handshake.rs @@ -9,15 +9,20 @@ use std::{ use aead::{Aead, Payload}; use blake2::{ digest::{FixedOutput, KeyInit}, - Blake2s256, Blake2sMac, Digest, + Blake2s256, + Blake2sMac, + Digest, }; use chacha20poly1305::XChaCha20Poly1305; use rand_core::OsRng; use ring::aead::{Aad, LessSafeKey, Nonce, UnboundKey, CHACHA20_POLY1305}; -use subtle::ConstantTimeEq; use super::{ - errors::WireGuardError, session::Session, x25519, HandshakeInit, HandshakeResponse, + errors::WireGuardError, + session::Session, + x25519, + HandshakeInit, + HandshakeResponse, PacketCookieReply, }; @@ -204,7 +209,7 @@ impl Tai64N { /// Parse a timestamp from a 12 byte u8 slice fn parse(buf: &[u8; 12]) -> Result { if buf.len() < 12 { - return Err(WireGuardError::InvalidTai64nTimestamp); + return Err(WireGuardError::InvalidTai64nTimestamp) } let (sec_bytes, nano_bytes) = buf.split_at(std::mem::size_of::()); @@ -529,14 +534,11 @@ impl Handshake { &hash, )?; - if !bool::from( - self.params - .peer_static_public - .as_bytes() - .ct_eq(&peer_static_public_decrypted), - ) { - return Err(WireGuardError::WrongKey); - } + ring::constant_time::verify_slices_are_equal( + self.params.peer_static_public.as_bytes(), + &peer_static_public_decrypted, + ) + .map_err(|_| WireGuardError::WrongKey)?; // initiator.hash = HASH(initiator.hash || msg.encrypted_static) hash = b2s_hash(&hash, packet.encrypted_static); @@ -554,22 +556,19 @@ impl Handshake { let timestamp = Tai64N::parse(×tamp)?; if !timestamp.after(&self.last_handshake_timestamp) { // Possibly a replay - return Err(WireGuardError::WrongTai64nTimestamp); + return Err(WireGuardError::WrongTai64nTimestamp) } self.last_handshake_timestamp = timestamp; // initiator.hash = HASH(initiator.hash || msg.encrypted_timestamp) hash = b2s_hash(&hash, packet.encrypted_timestamp); - self.previous = std::mem::replace( - &mut self.state, - HandshakeState::InitReceived { - chaining_key, - hash, - peer_ephemeral_public, - peer_index, - }, - ); + self.previous = std::mem::replace(&mut self.state, HandshakeState::InitReceived { + chaining_key, + hash, + peer_ephemeral_public, + peer_index, + }); self.format_handshake_response(dst) } @@ -670,7 +669,7 @@ impl Handshake { let local_index = self.cookies.index; if packet.receiver_idx != local_index { - return Err(WireGuardError::WrongIndex); + return Err(WireGuardError::WrongIndex) } // msg.encrypted_cookie = XAEAD(HASH(LABEL_COOKIE || responder.static_public), // msg.nonce, cookie, last_received_msg.mac1) @@ -726,7 +725,7 @@ impl Handshake { dst: &'a mut [u8], ) -> Result<&'a mut [u8], WireGuardError> { if dst.len() < super::HANDSHAKE_INIT_SZ { - return Err(WireGuardError::DestinationBufferTooSmall); + return Err(WireGuardError::DestinationBufferTooSmall) } let (message_type, rest) = dst.split_at_mut(4); @@ -809,7 +808,7 @@ impl Handshake { dst: &'a mut [u8], ) -> Result<(&'a mut [u8], Session), WireGuardError> { if dst.len() < super::HANDSHAKE_RESP_SZ { - return Err(WireGuardError::DestinationBufferTooSmall); + return Err(WireGuardError::DestinationBufferTooSmall) } let state = std::mem::replace(&mut self.state, HandshakeState::None); diff --git a/burrow/src/wireguard/noise/mod.rs b/burrow/src/wireguard/noise/mod.rs index 86bcc73..aa06652 100755 --- a/burrow/src/wireguard/noise/mod.rs +++ b/burrow/src/wireguard/noise/mod.rs @@ -133,9 +133,9 @@ pub enum Packet<'a> { impl Tunnel { #[inline(always)] - pub fn parse_incoming_packet(src: &[u8]) -> Result, WireGuardError> { + pub fn parse_incoming_packet(src: &[u8]) -> Result { if src.len() < 4 { - return Err(WireGuardError::InvalidPacket); + return Err(WireGuardError::InvalidPacket) } // Checks the type, as well as the reserved zero fields @@ -177,7 +177,7 @@ impl Tunnel { pub fn dst_address(packet: &[u8]) -> Option { if packet.is_empty() { - return None; + return None } match packet[0] >> 4 { @@ -201,7 +201,7 @@ impl Tunnel { pub fn src_address(packet: &[u8]) -> Option { if packet.is_empty() { - return None; + return None } match packet[0] >> 4 { @@ -296,7 +296,7 @@ impl Tunnel { self.timer_tick(TimerName::TimeLastDataPacketSent); } self.tx_bytes += src.len(); - return TunnResult::WriteToNetwork(packet); + return TunnResult::WriteToNetwork(packet) } // If there is no session, queue the packet for future retry @@ -320,7 +320,7 @@ impl Tunnel { ) -> TunnResult<'a> { if datagram.is_empty() { // Indicates a repeated call - return self.send_queued_packet(dst); + return self.send_queued_packet(dst) } let mut cookie = [0u8; COOKIE_REPLY_SZ]; @@ -331,7 +331,7 @@ impl Tunnel { Ok(packet) => packet, Err(TunnResult::WriteToNetwork(cookie)) => { dst[..cookie.len()].copy_from_slice(cookie); - return TunnResult::WriteToNetwork(&mut dst[..cookie.len()]); + return TunnResult::WriteToNetwork(&mut dst[..cookie.len()]) } Err(TunnResult::Err(e)) => return TunnResult::Err(e), _ => unreachable!(), @@ -435,7 +435,7 @@ impl Tunnel { let cur_idx = self.current; if cur_idx == new_idx { // There is nothing to do, already using this session, this is the common case - return; + return } if self.sessions[cur_idx % N_SESSIONS].is_none() || self.timers.session_timers[new_idx % N_SESSIONS] @@ -481,7 +481,7 @@ impl Tunnel { force_resend: bool, ) -> TunnResult<'a> { if self.handshake.is_in_progress() && !force_resend { - return TunnResult::Done; + return TunnResult::Done } if self.handshake.is_expired() { @@ -540,7 +540,7 @@ impl Tunnel { }; if computed_len > packet.len() { - return TunnResult::Err(WireGuardError::InvalidPacket); + return TunnResult::Err(WireGuardError::InvalidPacket) } self.timer_tick(TimerName::TimeLastDataPacketReceived); diff --git a/burrow/src/wireguard/noise/rate_limiter.rs b/burrow/src/wireguard/noise/rate_limiter.rs index e4fde02..ff19efd 100755 --- a/burrow/src/wireguard/noise/rate_limiter.rs +++ b/burrow/src/wireguard/noise/rate_limiter.rs @@ -8,13 +8,23 @@ use aead::{generic_array::GenericArray, AeadInPlace, KeyInit}; use chacha20poly1305::{Key, XChaCha20Poly1305}; use parking_lot::Mutex; use rand_core::{OsRng, RngCore}; -use subtle::ConstantTimeEq; +use ring::constant_time::verify_slices_are_equal; use super::{ handshake::{ - b2s_hash, b2s_keyed_mac_16, b2s_keyed_mac_16_2, b2s_mac_24, LABEL_COOKIE, LABEL_MAC1, + b2s_hash, + b2s_keyed_mac_16, + b2s_keyed_mac_16_2, + b2s_mac_24, + LABEL_COOKIE, + LABEL_MAC1, }, - HandshakeInit, HandshakeResponse, Packet, TunnResult, Tunnel, WireGuardError, + HandshakeInit, + HandshakeResponse, + Packet, + TunnResult, + Tunnel, + WireGuardError, }; const COOKIE_REFRESH: u64 = 128; // Use 128 and not 120 so the compiler can optimize out the division @@ -126,7 +136,7 @@ impl RateLimiter { dst: &'a mut [u8], ) -> Result<&'a mut [u8], WireGuardError> { if dst.len() < super::COOKIE_REPLY_SZ { - return Err(WireGuardError::DestinationBufferTooSmall); + return Err(WireGuardError::DestinationBufferTooSmall) } let (message_type, rest) = dst.split_at_mut(4); @@ -175,9 +185,8 @@ impl RateLimiter { let (mac1, mac2) = macs.split_at(16); let computed_mac1 = b2s_keyed_mac_16(&self.mac1_key, msg); - if !bool::from(computed_mac1[..16].ct_eq(mac1)) { - return Err(TunnResult::Err(WireGuardError::InvalidMac)); - } + verify_slices_are_equal(&computed_mac1[..16], mac1) + .map_err(|_| TunnResult::Err(WireGuardError::InvalidMac))?; if self.is_under_load() { let addr = match src_addr { @@ -189,11 +198,11 @@ impl RateLimiter { let cookie = self.current_cookie(addr); let computed_mac2 = b2s_keyed_mac_16_2(&cookie, msg, mac1); - if !bool::from(computed_mac2[..16].ct_eq(mac2)) { + if verify_slices_are_equal(&computed_mac2[..16], mac2).is_err() { let cookie_packet = self .format_cookie_reply(sender_idx, cookie, mac1, dst) .map_err(TunnResult::Err)?; - return Err(TunnResult::WriteToNetwork(cookie_packet)); + return Err(TunnResult::WriteToNetwork(cookie_packet)) } } } diff --git a/burrow/src/wireguard/noise/session.rs b/burrow/src/wireguard/noise/session.rs index 14c191b..8988728 100755 --- a/burrow/src/wireguard/noise/session.rs +++ b/burrow/src/wireguard/noise/session.rs @@ -88,11 +88,11 @@ impl ReceivingKeyCounterValidator { fn will_accept(&self, counter: u64) -> Result<(), WireGuardError> { if counter >= self.next { // As long as the counter is growing no replay took place for sure - return Ok(()); + return Ok(()) } if counter + N_BITS < self.next { // Drop if too far back - return Err(WireGuardError::InvalidCounter); + return Err(WireGuardError::InvalidCounter) } if !self.check_bit(counter) { Ok(()) @@ -107,22 +107,22 @@ impl ReceivingKeyCounterValidator { fn mark_did_receive(&mut self, counter: u64) -> Result<(), WireGuardError> { if counter + N_BITS < self.next { // Drop if too far back - return Err(WireGuardError::InvalidCounter); + return Err(WireGuardError::InvalidCounter) } if counter == self.next { // Usually the packets arrive in order, in that case we simply mark the bit and // increment the counter self.set_bit(counter); self.next += 1; - return Ok(()); + return Ok(()) } if counter < self.next { // A packet arrived out of order, check if it is valid, and mark if self.check_bit(counter) { - return Err(WireGuardError::InvalidCounter); + return Err(WireGuardError::InvalidCounter) } self.set_bit(counter); - return Ok(()); + return Ok(()) } // Packets where dropped, or maybe reordered, skip them and mark unused if counter - self.next >= N_BITS { @@ -247,7 +247,7 @@ impl Session { panic!("The destination buffer is too small"); } if packet.receiver_idx != self.receiving_index { - return Err(WireGuardError::WrongIndex); + return Err(WireGuardError::WrongIndex) } // Don't reuse counters, in case this is a replay attack we want to quickly // check the counter without running expensive decryption diff --git a/burrow/src/wireguard/noise/timers.rs b/burrow/src/wireguard/noise/timers.rs index f713e6f..1d0cf1f 100755 --- a/burrow/src/wireguard/noise/timers.rs +++ b/burrow/src/wireguard/noise/timers.rs @@ -190,7 +190,7 @@ impl Tunnel { { if self.handshake.is_expired() { - return TunnResult::Err(WireGuardError::ConnectionExpired); + return TunnResult::Err(WireGuardError::ConnectionExpired) } // Clear cookie after COOKIE_EXPIRATION_TIME @@ -206,7 +206,7 @@ impl Tunnel { tracing::error!("CONNECTION_EXPIRED(REJECT_AFTER_TIME * 3)"); self.handshake.set_expired(); self.clear_all(); - return TunnResult::Err(WireGuardError::ConnectionExpired); + return TunnResult::Err(WireGuardError::ConnectionExpired) } if let Some(time_init_sent) = self.handshake.timer() { @@ -219,7 +219,7 @@ impl Tunnel { tracing::error!("CONNECTION_EXPIRED(REKEY_ATTEMPT_TIME)"); self.handshake.set_expired(); self.clear_all(); - return TunnResult::Err(WireGuardError::ConnectionExpired); + return TunnResult::Err(WireGuardError::ConnectionExpired) } if time_init_sent.elapsed() >= REKEY_TIMEOUT { @@ -299,11 +299,11 @@ impl Tunnel { } if handshake_initiation_required { - return self.format_handshake_initiation(dst, true); + return self.format_handshake_initiation(dst, true) } if keepalive_required { - return self.encapsulate(&[], dst); + return self.encapsulate(&[], dst) } TunnResult::Done diff --git a/burrow/src/wireguard/pcb.rs b/burrow/src/wireguard/pcb.rs index 6e5e6c0..db57968 100755 --- a/burrow/src/wireguard/pcb.rs +++ b/burrow/src/wireguard/pcb.rs @@ -54,7 +54,7 @@ impl PeerPcb { Ok(()) } - pub async fn run(&self, tun_interface: Arc>>) -> Result<(), Error> { + pub async fn run(&self, tun_interface: Arc>) -> Result<(), Error> { tracing::debug!("starting read loop for pcb... for {:?}", &self); let rid: i32 = random(); let mut buf: [u8; 3000] = [0u8; 3000]; @@ -64,7 +64,7 @@ impl PeerPcb { let guard = self.socket.read().await; let Some(socket) = guard.as_ref() else { self.open_if_closed().await?; - continue; + continue }; let mut res_buf = [0; 1500]; // tracing::debug!("{} : waiting for readability on {:?}", rid, socket); @@ -72,7 +72,7 @@ impl PeerPcb { Ok(l) => l, Err(e) => { log::error!("{}: error reading from socket: {:?}", rid, e); - continue; + continue } }; let mut res_dat = &res_buf[..len]; @@ -88,7 +88,7 @@ impl PeerPcb { TunnResult::Done => break, TunnResult::Err(e) => { tracing::error!(message = "Decapsulate error", error = ?e); - break; + break } TunnResult::WriteToNetwork(packet) => { tracing::debug!("WriteToNetwork: {:?}", packet); @@ -102,29 +102,17 @@ impl PeerPcb { .await?; tracing::debug!("WriteToNetwork done"); res_dat = &[]; - continue; + continue } TunnResult::WriteToTunnelV4(packet, addr) => { tracing::debug!("WriteToTunnelV4: {:?}, {:?}", packet, addr); - tun_interface - .read() - .await - .as_ref() - .ok_or(anyhow::anyhow!("tun interface does not exist"))? - .send(packet) - .await?; - break; + tun_interface.read().await.send(packet).await?; + break } TunnResult::WriteToTunnelV6(packet, addr) => { tracing::debug!("WriteToTunnelV6: {:?}, {:?}", packet, addr); - tun_interface - .read() - .await - .as_ref() - .ok_or(anyhow::anyhow!("tun interface does not exist"))? - .send(packet) - .await?; - break; + tun_interface.read().await.send(packet).await?; + break } } } @@ -146,7 +134,7 @@ impl PeerPcb { let handle = self.socket.read().await; let Some(socket) = handle.as_ref() else { tracing::error!("No socket for peer"); - return Ok(()); + return Ok(()) }; tracing::debug!("Our Encapsulated packet: {:?}", packet); socket.send(packet).await?; @@ -169,7 +157,7 @@ impl PeerPcb { let handle = self.socket.read().await; let Some(socket) = handle.as_ref() else { tracing::error!("No socket for peer"); - return Ok(()); + return Ok(()) }; socket.send(packet).await?; tracing::debug!("Sent Packet for timer update"); diff --git a/burrow/src/wireguard/snapshots/burrow__wireguard__config__tests__tst_config_toml.snap b/burrow/src/wireguard/snapshots/burrow__wireguard__config__tests__tst_config_toml.snap deleted file mode 100644 index 3800647..0000000 --- a/burrow/src/wireguard/snapshots/burrow__wireguard__config__tests__tst_config_toml.snap +++ /dev/null @@ -1,16 +0,0 @@ ---- -source: burrow/src/wireguard/config.rs -expression: toml ---- -[[Peer]] -public_key = "8GaFjVO6c4luCHG4ONO+1bFG8tO+Zz5/Gy+Geht1USM=" -preshared_key = "ha7j4BjD49sIzyF9SNlbueK0AMHghlj6+u0G3bzC698=" -allowed_ips = ["8.8.8.8/32", "0.0.0.0/0"] -endpoint = "wg.burrow.rs:51820" - -[interface] -private_key = "OEPVdomeLTxTIBvv3TYsJRge0Hp9NMiY0sIrhT8OWG8=" -address = ["10.13.13.2/24"] -listen_port = 51820 -dns = [] - diff --git a/burrow/tmp/conrd.conf b/burrow/tmp/conrd.conf deleted file mode 100644 index 52572d1..0000000 --- a/burrow/tmp/conrd.conf +++ /dev/null @@ -1,8 +0,0 @@ -[Interface] -PrivateKey = gAaK0KFGOpxY7geGo59XXDufcxeoSNXXNC12mCQmlVs= -Address = 10.1.11.2/32 -DNS = 10.1.11.1 -[Peer] -PublicKey = Ab6V2mgPHiCXaAZfQrNts8ha8RkEzC49VnmMQfe5Yg4= -AllowedIPs = 10.1.11.1/32,10.1.11.2/32,0.0.0.0/0 -Endpoint = 172.251.163.175:51820 \ No newline at end of file diff --git a/contributors.nix b/contributors.nix deleted file mode 100644 index 60501d1..0000000 --- a/contributors.nix +++ /dev/null @@ -1,91 +0,0 @@ -{ - groups = { - users = "burrow-users"; - admins = "burrow-admins"; - linear = { - owners = "linear-owners"; - admins = "linear-admins"; - guests = "linear-guests"; - }; - }; - - identities = { - contact = { - displayName = "Burrow"; - canonicalEmail = "contact@burrow.net"; - isAdmin = true; - forgeAuthorized = true; - bootstrapAuthentik = true; - sshPublicKeyPath = ./nixos/keys/contact_at_burrow_net.pub; - roles = [ - "operator" - "forge-admin" - ]; - }; - - conrad = { - displayName = "Conrad Kramer"; - canonicalEmail = "conrad@burrow.net"; - isAdmin = true; - forgeAuthorized = false; - bootstrapAuthentik = true; - roles = [ - "operator" - "founder" - ]; - }; - - jett = { - displayName = "Jett"; - canonicalEmail = "jett@burrow.net"; - isAdmin = true; - forgeAuthorized = false; - forgeUnixUser = true; - bootstrapAuthentik = true; - sshPublicKeyPath = ./nixos/keys/jett_at_burrow_net.pub; - roles = [ - "member" - "operator" - "forge-admin" - ]; - }; - - davnotdev = { - displayName = "David"; - canonicalEmail = "davnotdev@burrow.net"; - isAdmin = true; - forgeAuthorized = false; - bootstrapAuthentik = true; - roles = [ - "member" - "operator" - "forge-admin" - ]; - }; - - agent = { - displayName = "Burrow Agent"; - canonicalEmail = "agent@burrow.net"; - isAdmin = false; - forgeAuthorized = true; - bootstrapAuthentik = false; - sshPublicKeyPath = ./nixos/keys/agent_at_burrow_net.pub; - roles = [ - "automation" - ]; - }; - - ui-test = { - displayName = "Burrow UI Test"; - canonicalEmail = "ui-test@burrow.net"; - isAdmin = false; - forgeAuthorized = false; - bootstrapAuthentik = true; - authentikPasswordSecret = "burrowAuthentikUiTestPassword"; - roles = [ - "testing" - "apple-ui" - ]; - }; - }; -} diff --git a/docs/FORWARDEMAIL.md b/docs/FORWARDEMAIL.md deleted file mode 100644 index 798f3e5..0000000 --- a/docs/FORWARDEMAIL.md +++ /dev/null @@ -1,101 +0,0 @@ -# Forward Email Backups - -Burrow's mail direction is hosted mail on [Forward Email](https://forwardemail.net/), with domain-owned backup retention in our own S3-compatible object storage. - -This is the first mail path to operationalize for `burrow.net` and `burrow.rs`. It keeps SMTP/IMAP hosting off the first forge host while still giving Burrow control over backup retention and object ownership. - -## What Forward Email Requires - -Forward Email exposes custom backup storage per domain. The documented API shape is: - -- `PUT /v1/domains/{domain}` with: - - `has_custom_s3=true` - - `s3_endpoint` - - `s3_access_key_id` - - `s3_secret_access_key` - - `s3_region` - - `s3_bucket` -- `POST /v1/domains/{domain}/test-s3-connection` - -Forward Email also documents these operational constraints: - -- the bucket must remain private -- credentials are validated with `HeadBucket` -- failed or public-bucket configurations fall back to Forward Email's default storage and notify domain administrators -- custom S3 keeps every backup version, so lifecycle expiration is our responsibility - -## Burrow Secret Layout - -Present in `intake/` today: - -- `intake/forwardemail_api_token.txt` -- `intake/hetzner-s3-user.txt` -- `intake/hetzner-s3-secret.txt` -- Hetzner public S3 endpoint for Forward Email: `https://hel1.your-objectstorage.com` -- Hetzner object storage region: `hel1` -- Hetzner bucket used for Forward Email backups: `burrow` - -## Verified Storage State - -As of March 15, 2026, Burrow's Forward Email custom S3 configuration is live: - -- endpoint: `https://hel1.your-objectstorage.com` -- region: `hel1` -- bucket: `burrow` -- `burrow.net` has `has_custom_s3=true` -- `burrow.rs` has `has_custom_s3=true` -- Forward Email's `/test-s3-connection` succeeded for both domains -- the `burrow` bucket enforces lifecycle expiration after `90` days - -Forward Email performs bucket validation with bucket-style addressing. For Hetzner Object Storage, this means the working endpoint is the regional S3 endpoint (`https://hel1.your-objectstorage.com`), not the account alias (`https://burrow.hel1.your-objectstorage.com`). Using the account alias causes TLS hostname mismatches when the vendor prepends the bucket name. - -## Helper - -Use [`Tools/forwardemail-custom-s3.sh`](../Tools/forwardemail-custom-s3.sh) to configure or retest the domain setting without putting secrets on the process list. - -Use [`Tools/forwardemail-hetzner-storage.py`](../Tools/forwardemail-hetzner-storage.py) to ensure the Hetzner backup bucket exists and to apply lifecycle expiry before enabling custom S3 on the Forward Email side. - -Bucket bootstrap example: - -```sh -Tools/forwardemail-hetzner-storage.py \ - --endpoint https://hel1.your-objectstorage.com \ - --bucket burrow \ - --expire-days 90 -``` - -Example: - -```sh -Tools/forwardemail-custom-s3.sh \ - --domain burrow.net \ - --api-token-file intake/forwardemail_api_token.txt \ - --s3-endpoint https://hel1.your-objectstorage.com \ - --s3-region hel1 \ - --s3-bucket burrow \ - --s3-access-key-file intake/hetzner-s3-user.txt \ - --s3-secret-key-file intake/hetzner-s3-secret.txt -``` - -Retest an existing domain configuration without rewriting it: - -```sh -Tools/forwardemail-custom-s3.sh \ - --domain burrow.net \ - --api-token-file intake/forwardemail_api_token.txt \ - --test-only -``` - -## Retention - -Forward Email preserves every backup object when custom S3 is enabled. Configure lifecycle expiration on the bucket itself. A 30-day or 90-day expiry window is the baseline recommendation from the vendor docs; Burrow should choose explicitly per domain instead of letting the bucket grow without bound. The current Burrow bootstrap helper defaults to `90` days. - -## Identity Direction - -Hosted mail and SaaS identity are separate concerns: - -- mail hosting/backups: Forward Email + Burrow-owned S3-compatible storage -- interactive identity: Authentik as the long-term IdP -- future SaaS SSO target: Linear via SAML once the workspace and plan are ready - -This means the forge host does not need to become the first mail server just to give Burrow mailboxes or retention control. diff --git a/docs/GETTING_STARTED.md b/docs/GETTING_STARTED.md index 346f7e7..764c219 100644 --- a/docs/GETTING_STARTED.md +++ b/docs/GETTING_STARTED.md @@ -98,14 +98,10 @@ code burrow You can run burrow on the command line with cargo: ``` -sudo -E cargo run -- start +cargo run ``` -Creating the tunnel requires elevated privileges. Regular checks and tests can run without `sudo`: - -``` -cargo test --workspace --all-features -``` +Cargo will ask for your password because burrow needs permission in order to create a tunnel. diff --git a/docs/GTK_APP.md b/docs/GTK_APP.md index 582b0a2..ef73d2b 100644 --- a/docs/GTK_APP.md +++ b/docs/GTK_APP.md @@ -15,7 +15,7 @@ Note that the flatpak version can compile but will not run properly! 1. Install build dependencies ``` - sudo apt install -y clang meson cmake pkg-config libssl-dev libgtk-4-dev libadwaita-1-dev gettext desktop-file-utils + sudo apt install -y clang meson cmake pkg-config libgtk-4-dev libadwaita-1-dev gettext desktop-file-utils ``` 2. Install flatpak builder (Optional) @@ -38,7 +38,7 @@ Note that the flatpak version can compile but will not run properly! 1. Install build dependencies ``` - sudo dnf install -y clang ninja-build cmake meson openssl-devel gtk4-devel glib2-devel libadwaita-devel desktop-file-utils libappstream-glib + sudo dnf install -y clang ninja-build cmake meson gtk4-devel glib2-devel libadwaita-devel desktop-file-utils libappstream-glib ``` 2. Install flatpak builder (Optional) @@ -61,7 +61,7 @@ Note that the flatpak version can compile but will not run properly! 1. Install build dependencies ``` - sudo xbps-install -Sy gcc clang meson cmake pkg-config openssl-devel gtk4-devel gettext desktop-file-utils gtk4-update-icon-cache appstream-glib + sudo xbps-install -Sy gcc clang meson cmake pkg-config gtk4-devel gettext desktop-file-utils gtk4-update-icon-cache appstream-glib ``` 2. Install flatpak builder (Optional) @@ -88,12 +88,6 @@ flatpak install --user \ ## Building -With Nix, enter the focused GTK shell before running the Meson build: - -```bash -nix develop .#gtk -``` -
General @@ -145,16 +139,6 @@ nix develop .#gtk ## Running -The GTK app mirrors the Apple home surface: a Burrow header, Networks carousel, -Accounts section, Tunnel action, and the same add flows for WireGuard, Tor, and -Tailnet. It talks to the daemon over the same gRPC API used by Apple clients for -network storage, tunnel state, Tailnet discovery, authority probing, browser -sign-in, and Tailnet payloads. - -On Linux the GTK app first looks for a daemon on the configured gRPC socket. If -none is reachable, it starts an embedded user-scoped daemon with a socket under -`XDG_RUNTIME_DIR` and a database under `XDG_DATA_HOME` before refreshing the UI. -
General diff --git a/docs/PROTOCOL_ROADMAP.md b/docs/PROTOCOL_ROADMAP.md deleted file mode 100644 index 37c7228..0000000 --- a/docs/PROTOCOL_ROADMAP.md +++ /dev/null @@ -1,31 +0,0 @@ -# Protocol Roadmap - -Burrow currently has two tunnel paths in-tree: - -- a WireGuard data plane -- a Tor-backed userspace TCP path - -What it does not have yet is a transport-neutral control plane that can honestly claim full MASQUE `CONNECT-IP` or full Tailscale-style negotiation parity. This repository now contains the beginnings of that layer: - -- control-plane data structures in `burrow/src/control/mod.rs` -- local auth bootstrap and persistent node/session storage in `burrow/src/auth/server/` -- governance documents under `evolution/` for the bigger protocol work - -## `CONNECT-IP` - -Full RFC 9484 support requires more than packet forwarding. It needs HTTP/3 session management, Capsule handling, HTTP Datagram context identifiers, address assignment, route advertisement, and request-scope enforcement. Burrow does not implement those end to end yet. - -## Tailscale-Style Negotiation - -Burrow now has register/map request and response types plus persistent node records, but it does not yet implement the full Tailscale capability surface, peer delta protocol, DERP coordination, or Noise-based control transport. - -## Current Direction - -The intended sequence is: - -1. Stabilize the control-plane data model and bootstrap auth. -2. Introduce transport-neutral route and address abstractions. -3. Add MASQUE framing and HTTP/3 transport support. -4. Expand policy, relay, and interoperability testing. - -This keeps Burrow honest about what is running today while creating a clean path for the rest. diff --git a/docs/WIREGUARD_LINEAGE.md b/docs/WIREGUARD_LINEAGE.md deleted file mode 100644 index 63e8839..0000000 --- a/docs/WIREGUARD_LINEAGE.md +++ /dev/null @@ -1,30 +0,0 @@ -# WireGuard Rust Lineage - -Burrow's in-tree WireGuard engine is not a greenfield implementation. It was lifted from the Rust WireGuard lineage around Cloudflare's BoringTun, then cut down and reshaped to fit Burrow's own daemon and tunnel abstractions. - -## What Was Lifted - -- The repository history includes `1b39eca` (`boringtun wip`) and `28af9003` (`merge boringtun into burrow`). -- The current `burrow/src/wireguard/noise/*` files still carry the original Cloudflare copyright and SPDX headers. -- Core protocol machinery such as the Noise handshake, session state, rate limiter, and timer logic came from that imported body of work. - -## What Changed in Burrow - -Burrow does not embed BoringTun unchanged. - -- The original device layer was replaced with Burrow-specific interface and peer control blocks in `burrow/src/wireguard/iface.rs` and `burrow/src/wireguard/pcb.rs`. -- Configuration handling was rewritten around Burrow's own INI parser and config model in `burrow/src/wireguard/config.rs`. -- The daemon now resolves the active runtime from the database-backed network list rather than from a single static WireGuard payload. -- Burrow added its own runtime switching path so WireGuard can share one daemon lifecycle with the rest of the managed runtime system. - -## What Was Improved - -The lifted code has been tightened further in-repo. - -- Deprecated constant-time comparisons were replaced with `subtle`. -- Network ordering and runtime selection are now deterministic and test-covered. -- The Burrow runtime can swap between WireGuard configurations without restarting the daemon process itself. - -## Why This Matters - -This project should be explicit about lineage. Burrow benefits from proven Rust WireGuard work, but it owns the integration surface, runtime behavior, and future maintenance burden. That is why the code should be documented as lifted, modified, and improved rather than described as wholly original. diff --git a/evolution/README.md b/evolution/README.md deleted file mode 100644 index 794b1fe..0000000 --- a/evolution/README.md +++ /dev/null @@ -1,74 +0,0 @@ -# Burrow Evolution - -Burrow Evolution Proposals (BEPs) are the repository's durable design record for protocol work, control-plane changes, forge infrastructure, and operational policy. - -## Goals - -1. Capture intent before implementation outruns the architecture. -2. Give contributors and agents enough context to work safely without re-discovering prior decisions. -3. Tie ambitious work to concrete validation, rollout, and rollback criteria. - -## When a BEP is required - -Open a BEP for: - -- new transports or protocol families -- control-plane and identity changes -- deployment, forge, runner, or secrets changes -- data model migrations -- user-visible behavior that changes security or routing semantics - -Small bug fixes and isolated refactors do not need a BEP unless they materially change one of the areas above. - -## Lifecycle - -1. Pitch - Capture the problem and why it matters now. -2. Draft - Copy `evolution/proposals/0000-template.md` to `evolution/proposals/BEP-XXXX-short-slug.md`. -3. Review - Collect feedback, tighten the design, and document unresolved concerns. -4. Decision - Mark the proposal `Accepted`, `Rejected`, or `Returned for Revision`. -5. Implementation - Link code changes, tests, and rollout evidence. -6. Supersession - Keep historical proposals in-tree and point forward to the replacing BEP. - -## Status Values - -- `Pitch` -- `Draft` -- `In Review` -- `Accepted` -- `Implemented` -- `Rejected` -- `Returned for Revision` -- `Superseded` -- `Archived` - -## Layout - -```text -evolution/ - README.md - proposals/ - 0000-template.md - BEP-0001-... -``` - -Use ASCII Markdown. Keep metadata at the top of each proposal so tooling and future agents can parse it quickly. - -## BEP Helper - -Use the `bep` helper under `Scripts/` to browse or list proposals: - -- `Scripts/bep` opens a quick browser for `evolution/`. -- `Scripts/bep list --status Draft` lists proposals by status. -- `Scripts/bep open BEP-0005` opens a proposal in `$EDITOR`. - -Validate proposal metadata with: - -```bash -python3 Scripts/check-bep-metadata.py -``` diff --git a/evolution/proposals/0000-template.md b/evolution/proposals/0000-template.md deleted file mode 100644 index 66954c6..0000000 --- a/evolution/proposals/0000-template.md +++ /dev/null @@ -1,57 +0,0 @@ -# `BEP-XXXX` - Title Case Summary - -```text -Status: Draft | In Review | Accepted | Implemented | Rejected | Returned for Revision | Superseded | Archived -Proposal: BEP-XXXX -Authors: -Coordinator: -Reviewers: -Constitution Sections: -Implementation PRs: (optional while drafting) -Decision Date: -``` - -## Summary - -One or two paragraphs that state the desired outcome and why it matters. - -## Motivation - -- What problem exists today? -- Why should Burrow solve it now? -- Which issues, incidents, or constraints support the change? - -## Detailed Design - -- Architecture and boundaries -- Data model and migration plan -- Protocol or API changes -- Observability, testing, and failure handling - -## Security and Operational Considerations - -- Access and secret handling -- Abuse, downgrade, or supply-chain risks -- Rollback and kill-switch plans - -## Contributor Playbook - -Give the concrete steps, commands, checks, and evidence a contributor should produce while implementing or rolling out the change. - -## Alternatives Considered - -List alternatives and why they were rejected. - -## Impact on Other Work - -- follow-up tasks -- dependencies -- compatibility constraints - -## Decision - -Record the final call, who made it, and any conditions. - -## References - -Link relevant issues, specs, transcripts, and external research. diff --git a/evolution/proposals/BEP-0001-sovereign-forge-and-governance.md b/evolution/proposals/BEP-0001-sovereign-forge-and-governance.md deleted file mode 100644 index f48a7a9..0000000 --- a/evolution/proposals/BEP-0001-sovereign-forge-and-governance.md +++ /dev/null @@ -1,61 +0,0 @@ -# `BEP-0001` - Sovereign Forge and Governance Bootstrap - -```text -Status: Draft -Proposal: BEP-0001 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: II, III, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow should own its forge, deployment logic, and operational context under `burrow.net`. This proposal establishes the repository-local governance and forge bootstrap required to move build, release, and infrastructure control out of GitHub-centric assumptions and into a self-hosted operating model. - -## Motivation - -- The repository currently keeps CI definitions under `.github/workflows/` but has no first-class self-hosted forge layout. -- Infrastructure changes and protocol work are already entangled; without a design record, the project risks landing irreversible operations without enough context. -- A self-hosted forge is a prerequisite for durable autonomy over source, runners, and release pipelines. - -## Detailed Design - -- Add a project constitution and BEP process under `evolution/`. -- Introduce a Nix flake and NixOS host/module layout for `burrow-forge`. -- Add Forgejo-native workflows under `.forgejo/workflows/` for repository-local CI. -- Bootstrap the initial forge identity around `contact@burrow.net` and an agent-owned SSH workflow. - -## Security and Operational Considerations - -- Initial bootstrap may read credentials from local intake, but production must converge on encrypted secret handling. -- The first forge host replacement must preserve rollback information before deleting any existing VM. -- DNS for `burrow.net` is currently pending activation; the forge rollout must not assume public reachability until nameserver cutover completes. - -## Contributor Playbook - -- Keep destructive host operations behind explicit verification of the current Hetzner state. -- Build and test repository-local workflows before using them for deployment. -- Record the active server id, image, IPs, and SSH path before replacement. - -## Alternatives Considered - -- Continue relying on GitHub Actions while separately hosting services. Rejected because it leaves source authority and CI policy split across systems. -- Stand up Forgejo without a repository-local operating model. Rejected because the repo would still be missing deployment truth. - -## Impact on Other Work - -- Blocks long-term migration of workflows away from GitHub. -- Provides the governance anchor for protocol and control-plane proposals. - -## Decision - -Pending. - -## References - -- `CONSTITUTION.md` -- `.github/workflows/` -- `.forgejo/workflows/` diff --git a/evolution/proposals/BEP-0002-control-plane-bootstrap-and-local-auth.md b/evolution/proposals/BEP-0002-control-plane-bootstrap-and-local-auth.md deleted file mode 100644 index 2558d09..0000000 --- a/evolution/proposals/BEP-0002-control-plane-bootstrap-and-local-auth.md +++ /dev/null @@ -1,60 +0,0 @@ -# `BEP-0002` - Control-Plane Bootstrap and Local Auth - -```text -Status: Draft -Proposal: BEP-0002 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: I, II, III, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow needs a repository-owned control-plane model instead of ad hoc network payload storage plus third-party-only auth. This proposal introduces a local username/password bootstrap for `contact@burrow.net`, plus a register/map data model shaped to support a Tailscale-style control server without claiming full parity yet. - -## Motivation - -- Current auth support is limited and does not provide a plain local bootstrap path for the project's own operator identity. -- The existing database stores network payloads, but not a durable model for users, nodes, sessions, or control-plane negotiation state. -- Future work on route policy, device coordination, and richer negotiation needs a real data model now. - -## Detailed Design - -- Add control-plane types for users, nodes, register requests, and map responses. -- Extend the auth server schema with local credentials, sessions, provider logins, and control nodes. -- Expose JSON endpoints for local login, node registration, and map retrieval. -- Seed the initial operator account from intake-backed bootstrap credentials. - -## Security and Operational Considerations - -- Passwords are stored with Argon2id hashes only. -- Session tokens are bearer credentials and must be treated as sensitive. -- The bootstrap credential path is a short-term path; follow-up work should move it into encrypted secret management before public deployment. - -## Contributor Playbook - -- Verify bootstrap account creation in an isolated test database. -- Exercise login, register, and map end to end with integration tests. -- Do not advertise protocol parity beyond the implemented request/response contract. - -## Alternatives Considered - -- Wait for full external identity-provider integration first. Rejected because the forge needs an operator account now. -- Keep control-plane state implicit in daemon-local configuration. Rejected because it cannot express multi-device coordination. - -## Impact on Other Work - -- Unblocks forge bootstrap and future device control-plane work. -- Creates the storage model needed for richer policy and transport proposals. - -## Decision - -Pending. - -## References - -- `burrow/src/auth/server/` -- `burrow/src/control/` diff --git a/evolution/proposals/BEP-0003-connect-ip-and-negotiation-roadmap.md b/evolution/proposals/BEP-0003-connect-ip-and-negotiation-roadmap.md deleted file mode 100644 index 99ddedf..0000000 --- a/evolution/proposals/BEP-0003-connect-ip-and-negotiation-roadmap.md +++ /dev/null @@ -1,61 +0,0 @@ -# `BEP-0003` - CONNECT-IP and Negotiation Roadmap - -```text -Status: Draft -Proposal: BEP-0003 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: I, II, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow should grow from a WireGuard-first tunnel runner into a transport stack that can support HTTP/3 MASQUE `CONNECT-IP` and a richer node negotiation model. This proposal stages that work so Burrow can adopt the right abstractions instead of stapling QUIC-era semantics onto a WireGuard-only daemon. - -## Motivation - -- `CONNECT-IP` introduces HTTP/3 sessions, context identifiers, address assignment, and route advertisements that do not fit the current daemon model. -- A Tailscale-style control plane requires explicit node, endpoint, and session state rather than raw network blobs. -- The project needs a roadmap that distinguishes data-model work, control-plane work, and actual transport implementation. - -## Detailed Design - -- Stage 1: land control-plane types and persistent auth/session/node storage. -- Stage 2: add transport-agnostic route, address-assignment, and policy abstractions in Burrow. -- Stage 3: implement MASQUE `CONNECT-IP` framing and HTTP Datagram handling. -- Stage 4: connect the transport layer to real relay, policy, and observability paths. - -## Security and Operational Considerations - -- `CONNECT-IP` changes the trust boundary from WireGuard peers to HTTP/3 peers and relays; authentication, replay handling, and scope restriction must be explicit. -- Route advertisements and delegated prefixes must be validated before touching the data plane. -- Control-plane capability claims must not imply support that the transport layer does not yet implement. - -## Contributor Playbook - -- Keep protocol codecs independently testable before integrating them into live transports. -- Add interoperability tests for every new capsule or datagram type. -- Separate request parsing, policy validation, and packet forwarding so regressions stay localized. - -## Alternatives Considered - -- Implement MASQUE directly in the daemon without control-plane refactoring. Rejected because the current daemon has no transport-neutral contract for routes or prefixes. -- Treat Tailscale negotiation as a one-off compatibility shim. Rejected because Burrow needs first-class control-plane concepts either way. - -## Impact on Other Work - -- Depends on BEP-0002. -- Informs future relay, policy, and node coordination work. - -## Decision - -Pending. - -## References - -- RFC 9484 -- `burrow/src/daemon/` -- `burrow/src/control/` diff --git a/evolution/proposals/BEP-0004-hosted-mail-and-saas-identity.md b/evolution/proposals/BEP-0004-hosted-mail-and-saas-identity.md deleted file mode 100644 index d633f37..0000000 --- a/evolution/proposals/BEP-0004-hosted-mail-and-saas-identity.md +++ /dev/null @@ -1,68 +0,0 @@ -# `BEP-0004` - Hosted Mail Backups and SaaS Identity - -```text -Status: Draft -Proposal: BEP-0004 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: II, III, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow should start with hosted mail on Forward Email instead of self-hosting SMTP and IMAP on the first forge machine. Backup retention should still be controlled by Burrow through custom S3-compatible storage backed by Burrow-owned object storage. In parallel, Burrow should treat SaaS identity as a separate track and converge on Authentik as the long-term IdP, with Linear SAML SSO as a planned downstream integration rather than an immediate bootstrap dependency. - -## Motivation - -- The first forge host already carries source control, CI, and deployment bootstrap risk. Adding a self-hosted mail stack increases operational scope before the forge is stable. -- Forward Email already exposes SMTP and IMAP while allowing per-domain custom S3 backup storage, which preserves Burrow's data ownership over mailbox backups. -- The repository needs a durable decision record that separates hosted mail operations from future SaaS SSO work. - -## Detailed Design - -- Use Forward Email as the operational mail provider for `burrow.net` and `burrow.rs`. -- Configure custom S3-compatible storage per domain using Burrow-controlled object storage credentials. -- Keep one backup bucket per domain and enforce lifecycle expiration at the bucket layer. -- Add repository-owned tooling and documentation for applying and testing the Forward Email custom S3 configuration. -- Treat Authentik as the future identity authority for SaaS applications, but keep Linear SAML as a later rollout once the workspace and vendor prerequisites are available. Linear's current docs place SAML and SCIM behind higher-tier workspace security settings, so Burrow should treat plan availability as an explicit precondition. - -## Security and Operational Considerations - -- Forward Email API tokens and S3 credentials must stay in secret files and must not be passed directly on the shell command line. -- Buckets must remain private. Public bucket detection by the vendor should be treated as a hard failure, not a warning. -- Backup growth is unbounded without lifecycle rules. Retention policy is part of the rollout, not optional cleanup. -- Hosted mail reduces MTA attack surface on the forge host, but it adds third-party dependency risk; keeping backups in Burrow-owned storage limits that blast radius. - -## Contributor Playbook - -- Put the Forward Email API token in `intake/forwardemail_api_token.txt`. -- Use `Tools/forwardemail-custom-s3.sh` to configure `burrow.net` and `burrow.rs`. -- Run the helper again with `--test-only` after any credential rotation. -- Record the chosen endpoint, region, bucket names, and lifecycle policy alongside rollout evidence. -- Do not claim Linear SAML is live until the Authentik app, Linear workspace settings, workspace plan prerequisites, and end-to-end login flow are verified. - -## Alternatives Considered - -- Self-host Stalwart on the forge host immediately. Rejected for the first rollout because it expands host scope before source control and CI are stable. -- Rely on Forward Email default backup storage only. Rejected because it gives Burrow less control over retention and data location. -- Delay all SaaS identity planning until after forge cutover. Rejected because Linear and other SaaS integrations will otherwise accrete without an agreed authority. - -## Impact on Other Work - -- Narrows the first forge host scope. -- Creates a clean mail path for `contact@burrow.net` without requiring self-hosted SMTP and IMAP. -- Leaves Authentik and Linear SAML as explicit follow-up work instead of hidden assumptions. - -## Decision - -Pending. - -## References - -- `docs/FORWARDEMAIL.md` -- `Tools/forwardemail-custom-s3.sh` -- Forward Email FAQ: custom S3-compatible storage for backups -- Linear docs: SAML SSO diff --git a/evolution/proposals/BEP-0005-daemon-ipc-and-apple-boundary.md b/evolution/proposals/BEP-0005-daemon-ipc-and-apple-boundary.md deleted file mode 100644 index a34a609..0000000 --- a/evolution/proposals/BEP-0005-daemon-ipc-and-apple-boundary.md +++ /dev/null @@ -1,81 +0,0 @@ -# `BEP-0005` - Daemon IPC and Apple Boundary - -```text -Status: Draft -Proposal: BEP-0005 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: II, III, IV, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow should formalize one Apple/runtime boundary: Apple clients speak only to the daemon over gRPC on the app-group Unix socket, and the daemon owns all external control-plane, helper-process, and runtime coordination work. This prevents UI code from accreting side HTTP paths or ad hoc control-plane integrations that bypass the system Burrow is supposed to own. - -## Motivation - -- The current Tailnet work already showed the failure mode: Swift UI code started reaching around the daemon boundary to talk to helper HTTP endpoints directly. -- Apple-specific process ownership is easy to blur between the app, the network extension, and helper daemons unless the contract is explicit. -- If Burrow wants a durable multi-runtime architecture, the daemon must remain the only orchestration boundary between clients and control/data-plane behavior. - -## Detailed Design - -- Apple UI and Apple support libraries may call only daemon gRPC methods over the declared Burrow Unix socket. -- Direct Swift calls to external control-plane HTTP APIs, localhost helper HTTP servers, or runtime-specific subprocesses are forbidden. -- The daemon is responsible for: - - discovery of Tailnet authorities and related metadata - - control-plane session setup and tracking - - login/session lifecycle brokering - - runtime start/stop/reconcile - - translating helper or bridge processes into stable daemon RPCs -- `burrow/src/control/` owns transport-neutral control-plane semantics such as discovery, authority normalization, and request/response shaping. -- Apple UI owns presentation only: - - forms - - local state - - presenting returned auth URLs or statuses - - surfacing daemon availability and errors -- Any new Apple-facing runtime capability requires a daemon RPC first. - -## Security and Operational Considerations - -- Keeping control-plane I/O out of Swift UI reduces accidental secret, token, and callback sprawl across app code. -- The daemon boundary makes testing and kill-switch behavior tractable because runtime integration is localized. -- Apple daemon lifecycle ownership must be explicit: either the app ensures the daemon is running before RPC or the extension owns it and the UI surfaces daemon-unavailable state clearly. -- Non-Apple presentation clients should follow the same daemon-first lifecycle pattern: connect to a managed daemon when present, or start a user-scoped embedded daemon before issuing RPCs, without adding platform-local control-plane paths. - -## Contributor Playbook - -- Before adding a new Apple-side workflow, identify the daemon RPC that should own it. -- If the RPC does not exist, add the protocol shape in `proto/burrow.proto`, implement it in the daemon, and only then wire Swift UI. -- Verify that no Swift UI or support code calls external control-plane HTTP endpoints directly. -- For Tailnet and similar flows, test: - - daemon unavailable behavior - - successful RPC path - - error propagation through the UI -- Keep Linux GTK and Apple clients visually and functionally aligned around the same daemon-backed home surface: Networks, Accounts, Tunnel, and add flows should remain corresponding views over the daemon API. - -## Alternatives Considered - -- Let Apple UI call control-plane endpoints directly for convenience. Rejected because it creates parallel orchestration paths and breaks the daemon contract. -- Allow one-off exceptions for login helpers. Rejected because those exceptions become the architecture. - -## Impact on Other Work - -- Governs the Tailnet refactor and future Apple runtime work. -- Governs Linux GTK daemon startup parity where the same daemon API is reused from a user-scoped presentation process. -- Interacts with BEP-0002 control-plane bootstrap and BEP-0003 transport refactoring. - -## Decision - -Pending. - -## References - -- `Apple/UI/` -- `Apple/Core/` -- `Apple/NetworkExtension/` -- `burrow/src/daemon/` -- `burrow/src/control/` diff --git a/evolution/proposals/BEP-0006-tailnet-authority-first-control-plane.md b/evolution/proposals/BEP-0006-tailnet-authority-first-control-plane.md deleted file mode 100644 index 36458ef..0000000 --- a/evolution/proposals/BEP-0006-tailnet-authority-first-control-plane.md +++ /dev/null @@ -1,74 +0,0 @@ -# `BEP-0006` - Tailnet Authority-First Control Plane - -```text -Status: Draft -Proposal: BEP-0006 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: I, II, IV, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow should treat Tailnet as one protocol family. Tailscale-managed and self-hosted Headscale-style deployments differ by authority, policy, and auth details, not by a distinct user-facing protocol. Burrow’s config and UI should therefore be authority-first rather than provider-first. - -## Motivation - -- Splitting Tailscale and Headscale into separate user-facing providers causes fake architectural divergence. -- Discovery already naturally returns an authority and optional issuer; that is the stable contract users actually need. -- Future managed or enterprise deployments should fit the same model without requiring another protocol picker. - -## Detailed Design - -- Tailnet configuration is centered on: - - account - - identity - - authority/login server URL - - optional tailnet name - - optional hostname - - auth method/material -- User-facing surfaces should not force a protocol choice between Tailscale and Headscale. -- Provider inference may remain internal metadata for compatibility and diagnostics: - - default managed Tailscale authority - - custom self-hosted authority - - Burrow-owned authority when explicitly applicable -- Discovery returns authority and related metadata; editing the authority is the mechanism that moves a configuration from managed default to custom control server. -- The daemon and control layer own provider inference; the UI should primarily present “Tailnet” plus the selected authority. -- Platform clients consume the same daemon gRPC surface for Tailnet discovery, authority probing, browser sign-in, and saved network payloads. macOS/iOS SwiftUI and Linux GTK may differ in presentation and local credential stores, but neither should introduce a second control-plane path. - -## Security and Operational Considerations - -- Authority-first config reduces UI complexity and makes misconfiguration easier to reason about. -- Provider-specific assumptions must not leak into packet or control-plane semantics unless the authority actually requires them. -- Auth material must remain authority-scoped and identity-scoped in daemon storage. - -## Contributor Playbook - -- Remove provider pickers from Tailnet UI unless a concrete protocol difference requires one. -- Store the authority explicitly in payloads and infer provider internally only when needed. -- Keep Linux GTK and Apple clients at functional parity by routing Tailnet add/discover/probe/login through `TailnetControl` and `Networks` RPCs instead of platform-local HTTP or legacy JSON daemon commands. -- Prefer tests that validate authority normalization and discovery behavior over UI-provider branching. - -## Alternatives Considered - -- Keep separate user-facing providers for Tailscale and Headscale. Rejected because it models deployment shape as protocol shape. -- Collapse all control planes into one opaque Burrow provider. Rejected because the authority still matters operationally and diagnostically. - -## Impact on Other Work - -- Refines BEP-0002’s Tailscale-shaped control-plane work. -- Constrains the Tailnet Apple and Linux GTK refactors plus future daemon control-plane storage. - -## Decision - -Pending. - -## References - -- `burrow/src/control/` -- `Apple/UI/Networks/` -- `burrow-gtk/src/` -- `proto/burrow.proto` diff --git a/evolution/proposals/BEP-0007-identity-registry-and-operator-bootstrap.md b/evolution/proposals/BEP-0007-identity-registry-and-operator-bootstrap.md deleted file mode 100644 index 1fde0fb..0000000 --- a/evolution/proposals/BEP-0007-identity-registry-and-operator-bootstrap.md +++ /dev/null @@ -1,73 +0,0 @@ -# `BEP-0007` - Identity Registry and Operator Bootstrap - -```text -Status: Draft -Proposal: BEP-0007 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: II, III, IV, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow should maintain one canonical registry for project identities, aliases, bootstrap users, SSH keys, and admin-group mappings. Forgejo, Authentik, and related bootstrap configuration should derive from that registry instead of hardcoding overlapping identity facts in multiple modules. - -## Motivation - -- Burrow currently hardcodes operator and admin/bootstrap user facts directly in host configuration. -- Multi-account and self-hosted identity are becoming core architecture, not incidental infra details. -- A single registry reduces drift across Forgejo, Authentik, Headscale, SSH authorization, and future control-plane bootstrap. - -## Detailed Design - -- Add a root-level identity registry (`contributors.nix`) as the canonical source of truth for: - - usernames - - display names - - canonical emails - - external source emails or aliases - - admin scope - - bootstrap eligibility - - forge authorized SSH keys - - named roles -- Consume that registry from host configuration for: - - Forgejo authorized keys - - Forgejo bootstrap admin defaults - - Authentik bootstrap users - - Burrow user/admin group names -- Future work may derive contributor docs, OIDC bootstrap, and additional runtime configuration from the same registry. - -## Security and Operational Considerations - -- Identity drift is a security bug when it affects admin groups, bootstrap accounts, or SSH authorization. -- The registry stores metadata only; secrets remain in agenix or other declared secret paths. -- Changes to the registry should receive explicit review because they affect access and governance. - -## Contributor Playbook - -- Edit `contributors.nix` first when changing operator, admin, alias, or bootstrap identity state. -- Derive runtime configuration from the registry instead of duplicating the same facts elsewhere. -- Keep secret references separate from identity metadata. - -## Alternatives Considered - -- Continue hardcoding users in module options. Rejected because drift is inevitable once Forgejo, Authentik, and Headscale all depend on the same identities. -- Create separate per-service user lists. Rejected because it duplicates governance facts and weakens review. - -## Impact on Other Work - -- Supports forge auth, Authentik group sync, and future multi-account Burrow control-plane work. -- Creates the basis for stronger contributor and operator provenance later. - -## Decision - -Pending. - -## References - -- `contributors.nix` -- `nixos/hosts/burrow-forge/default.nix` -- `nixos/modules/burrow-authentik.nix` -- `nixos/modules/burrow-forge.nix` diff --git a/evolution/proposals/BEP-0008-authentik-backed-team-chat-and-workspace-sso.md b/evolution/proposals/BEP-0008-authentik-backed-team-chat-and-workspace-sso.md deleted file mode 100644 index 0ce03a6..0000000 --- a/evolution/proposals/BEP-0008-authentik-backed-team-chat-and-workspace-sso.md +++ /dev/null @@ -1,169 +0,0 @@ -# `BEP-0008` - Authentik-Backed Team Chat and Workspace Identity - -```text -Status: Draft -Proposal: BEP-0008 -Authors: gpt-5.4 -Coordinator: gpt-5.4 -Reviewers: Pending -Constitution Sections: II, III, V -Implementation PRs: Pending -Decision Date: Pending -``` - -## Summary - -Burrow should add a self-hosted team chat surface at `chat.burrow.net` and -continue the project-wide move toward Authentik as the identity authority for -external work systems. The immediate targets are a self-hosted Zulip -deployment rooted in Authentik SAML, a Linear SAML configuration when the -workspace plan supports it, and a 1Password Unlock-with-SSO deployment rooted -in the same Authentik-backed OIDC authority. - -This keeps Burrow's day-to-day coordination surfaces aligned with the same -admin groups, canonical users, and secret-handling model already used for -Forgejo, Headscale, and Tailscale. It also avoids fragmenting login state -across vendor-native Google auth flows when Burrow already operates an IdP. - -## Motivation - -- Forge, Tailnet, operator identity, and Tailscale custom OIDC are already - rooted in Authentik. Team chat, work tracking, and password-manager access - should not become separate authority islands. -- Zulip provides a self-hosted chat system under Burrow's control, which fits - the constitution better than adding another hosted chat dependency. -- Linear remains a SaaS dependency, but its workspace access should still be - derived from Burrow-managed identities and domains when the vendor plan - exposes SAML configuration. -- 1Password Business is another external work surface where Burrow-controlled - identities are preferable to vendor-native Google-only auth. Its current - vendor flow is OIDC-based Unlock with SSO rather than SAML, so the proposal - needs to preserve protocol accuracy instead of flattening everything into - one SAML bucket. -- Burrow already has a canonical public identity registry and a secret-backed - external-email alias map. Reusing that structure is lower-risk than - inventing per-app user bootstrap logic. - -## Detailed Design - -- Add a Burrow-managed Zulip workload on the forge host at `chat.burrow.net`. - The deployment should be repo-owned and rebuildable from Nix, even if the - runtime uses vendor-supported container images internally. -- Prefer host-managed NixOS services for Zulip's stateful dependencies - (PostgreSQL, Redis, RabbitMQ, memcached, backups) so Burrow owns the - operational surface directly rather than composing a container-side service - mesh. -- Zulip should authenticate through Authentik SAML rather than local passwords - as the primary path. Initial bootstrap may still keep an operational escape - hatch while the deployment is being validated. -- Add Authentik-managed SAML applications for: - - Zulip at `chat.burrow.net` - - Linear using Burrow's claimed domains and Authentik metadata -- Add an Authentik-managed SCIM backchannel for Linear so Burrow can push - role groups declaratively instead of hand-maintaining workspace roles. -- Add an Authentik-managed OIDC application for 1Password Business under the - Burrow team sign-in address. -- Treat Zulip and Linear as downstream applications of the same identity - authority, and treat 1Password as part of that same authority even though - its vendor protocol is OIDC rather than SAML. The source of truth remains: - - public identities and admin intent in `contributors.nix` - - private alias mappings and external accounts in agenix-encrypted secrets -- Keep app-specific configuration in dedicated reconciliation code or module - options instead of hand-edited UI state. -- Prefer service-specific reconciliation over ad hoc manual setup so rebuilds - and host replacement converge automatically. -- When Burrow wants an external-user launcher surface in Authentik, configure - the brand's `default_application` explicitly instead of relying on - `/if/user/`, which otherwise remains internal-user-only. -- Derive Linear SCIM role groups from Burrow's canonical identity metadata. - If Burrow-wide admin intent says a user is an operator/admin, the repo-owned - configuration should map that intent onto the Linear push group without a - second manual roster. -- Model 1Password according to the vendor's actual integration contract: - - OIDC Authorization Code Flow with PKCE - - public client rather than a confidential client - - no Burrow-side dependence on a stored client secret unless the vendor flow - changes - -## Security and Operational Considerations - -- Do not store external personal email mappings in public registry files. - Public tree data may include Burrow usernames and canonical `@burrow.net` - addresses, but external aliases must stay in encrypted secrets. -- Zulip internal service credentials, Django secret material, and any mail - credentials must have explicit storage and rotation paths. -- Linear SAML must not become Burrow's only admin recovery path. At least one - owner login path outside the enforced SAML flow should remain available until - rollout is proven. -- Linear SCIM group push should be role-scoped and explicit. Burrow should - avoid blanket ownership mapping unless that intent is recorded in the repo. -- 1Password Owners cannot be forced onto Unlock with SSO during initial setup. - Burrow should preserve the owner recovery path and treat OIDC rollout as a - scoped migration for non-owner users first. -- If Zulip is deployed without production-grade outbound email at first, that - limitation must be documented and treated as an operational constraint, not a - hidden assumption. -- Rollback should be straightforward: - - disable or stop the Zulip module - - remove the Authentik SAML apps - - remove the Authentik OIDC app used for 1Password if necessary - - leave the underlying Burrow identities unchanged - -## Contributor Playbook - -- Define the app and identity intent in the repository before modifying the - forge host. -- Add or update Nix modules so `burrow-forge` can rebuild Zulip and the - corresponding Authentik SAML configuration from the tree. -- Verify: - - `chat.burrow.net` serves a working Zulip login surface - - Authentik exposes working metadata for Zulip and Linear -- Authentik exposes a working OIDC issuer for 1Password - - users in Burrow admin groups receive the expected access on first login - - external Burrow users landing on `auth.burrow.net` reach the intended - app launcher target instead of the internal-only Authentik user interface -- Record concrete evidence for: - - host deployment generation - - Authentik reconciliation success - - Zulip login success - - Linear SAML configuration state - - 1Password Unlock with SSO configuration state - -## Alternatives Considered - -- Use Zulip Cloud instead of self-hosting. Rejected because the ask is to host - chat under `chat.burrow.net`, and Burrow already operates a forge host with a - self-managed identity plane. -- Keep Linear on Google-native login. Rejected because it leaves Burrow work - access outside the project's operator and group model. -- Treat 1Password as a SAML app for consistency. Rejected because the live - vendor flow is OIDC and Burrow should not pretend otherwise in repo-owned - infrastructure. -- Add per-app manual Authentik configuration without repository automation. - Rejected because it violates Burrow's infrastructure-in-repo commitment. - -## Impact on Other Work - -- Extends Burrow's Authentik role from control-plane identity into team-work - surfaces. -- Introduces a persistent chat workload on the forge host, with resource and - monitoring implications. -- Creates a likely follow-up for SCIM or richer group synchronization if Linear - or Zulip role mapping needs to become fully declarative later. -- Adds a second OIDC relying party beyond Forgejo, Headscale, and Tailscale, - which raises the importance of keeping Burrow's Authentik scope mappings and - redirect handling consistent across applications. - -## Decision - -Pending. - -## References - -- `CONSTITUTION.md` -- `contributors.nix` -- `evolution/proposals/BEP-0004-hosted-mail-and-saas-identity.md` -- Authentik docs: SAML provider and metadata endpoints -- Zulip docs: SAML authentication and docker deployment -- Linear docs: SAML and access control -- 1Password docs: Unlock with SSO using OpenID Connect diff --git a/flake.lock b/flake.lock deleted file mode 100644 index 0067dab..0000000 --- a/flake.lock +++ /dev/null @@ -1,192 +0,0 @@ -{ - "nodes": { - "agenix": { - "inputs": { - "darwin": "darwin", - "home-manager": "home-manager", - "nixpkgs": [ - "nixpkgs" - ], - "systems": "systems" - }, - "locked": { - "lastModified": 1770165109, - "narHash": "sha256-9VnK6Oqai65puVJ4WYtCTvlJeXxMzAp/69HhQuTdl/I=", - "owner": "ryantm", - "repo": "agenix", - "rev": "b027ee29d959fda4b60b57566d64c98a202e0feb", - "type": "github" - }, - "original": { - "owner": "ryantm", - "repo": "agenix", - "type": "github" - } - }, - "darwin": { - "inputs": { - "nixpkgs": [ - "agenix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1744478979, - "narHash": "sha256-dyN+teG9G82G+m+PX/aSAagkC+vUv0SgUw3XkPhQodQ=", - "owner": "lnl7", - "repo": "nix-darwin", - "rev": "43975d782b418ebf4969e9ccba82466728c2851b", - "type": "github" - }, - "original": { - "owner": "lnl7", - "ref": "master", - "repo": "nix-darwin", - "type": "github" - } - }, - "disko": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1773889306, - "narHash": "sha256-PAqwnsBSI9SVC2QugvQ3xeYCB0otOwCacB1ueQj2tgw=", - "type": "tarball", - "url": "https://codeload.github.com/nix-community/disko/tar.gz/master" - }, - "original": { - "type": "tarball", - "url": "https://codeload.github.com/nix-community/disko/tar.gz/master" - } - }, - "flake-utils": { - "inputs": { - "systems": "systems_2" - }, - "locked": { - "lastModified": 1731533236, - "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", - "type": "tarball", - "url": "https://codeload.github.com/numtide/flake-utils/tar.gz/main" - }, - "original": { - "type": "tarball", - "url": "https://codeload.github.com/numtide/flake-utils/tar.gz/main" - } - }, - "hcloud-upload-image-src": { - "flake": false, - "locked": { - "lastModified": 1766413232, - "narHash": "sha256-1u9tpzciYjB/EgBI81pg9w0kez7hHZON7+AHvfKW7k0=", - "type": "tarball", - "url": "https://codeload.github.com/apricote/hcloud-upload-image/tar.gz/v1.3.0" - }, - "original": { - "type": "tarball", - "url": "https://codeload.github.com/apricote/hcloud-upload-image/tar.gz/v1.3.0" - } - }, - "home-manager": { - "inputs": { - "nixpkgs": [ - "agenix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1745494811, - "narHash": "sha256-YZCh2o9Ua1n9uCvrvi5pRxtuVNml8X2a03qIFfRKpFs=", - "owner": "nix-community", - "repo": "home-manager", - "rev": "abfad3d2958c9e6300a883bd443512c55dfeb1be", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "home-manager", - "type": "github" - } - }, - "nixpkgs": { - "locked": { - "lastModified": 1773389992, - "narHash": "sha256-wvfdLLWJ2I9oEpDd9PfMA8osfIZicoQ5MT1jIwNs9Tk=", - "type": "tarball", - "url": "https://codeload.github.com/NixOS/nixpkgs/tar.gz/nixos-unstable" - }, - "original": { - "type": "tarball", - "url": "https://codeload.github.com/NixOS/nixpkgs/tar.gz/nixos-unstable" - } - }, - "nsc-autoscaler": { - "inputs": { - "flake-utils": [ - "flake-utils" - ], - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1775221037, - "narHash": "sha256-tv6Y3cqn76PEyZpSMMItVW96KKIboovBWTOv5Lt7PXg=", - "ref": "refs/heads/main", - "rev": "2c485752fde28ec3be2f228b571d1906f4bcf917", - "revCount": 10, - "type": "git", - "url": "https://compatible.systems/conrad/nsc-autoscaler.git" - }, - "original": { - "type": "git", - "url": "https://compatible.systems/conrad/nsc-autoscaler.git" - } - }, - "root": { - "inputs": { - "agenix": "agenix", - "disko": "disko", - "flake-utils": "flake-utils", - "hcloud-upload-image-src": "hcloud-upload-image-src", - "nixpkgs": "nixpkgs", - "nsc-autoscaler": "nsc-autoscaler" - } - }, - "systems": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - }, - "systems_2": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - } - }, - "root": "root", - "version": 7 -} diff --git a/flake.nix b/flake.nix deleted file mode 100644 index e842fba..0000000 --- a/flake.nix +++ /dev/null @@ -1,234 +0,0 @@ -{ - description = "Burrow development shell and forge host configuration"; - - inputs = { - nixpkgs.url = "tarball+https://codeload.github.com/NixOS/nixpkgs/tar.gz/nixos-unstable"; - flake-utils.url = "tarball+https://codeload.github.com/numtide/flake-utils/tar.gz/main"; - agenix = { - url = "github:ryantm/agenix"; - inputs.nixpkgs.follows = "nixpkgs"; - }; - disko = { - url = "tarball+https://codeload.github.com/nix-community/disko/tar.gz/master"; - inputs.nixpkgs.follows = "nixpkgs"; - }; - nsc-autoscaler = { - url = "git+https://compatible.systems/conrad/nsc-autoscaler.git"; - inputs.nixpkgs.follows = "nixpkgs"; - inputs.flake-utils.follows = "flake-utils"; - }; - hcloud-upload-image-src = { - url = "tarball+https://codeload.github.com/apricote/hcloud-upload-image/tar.gz/v1.3.0"; - flake = false; - }; - }; - - outputs = { self, nixpkgs, flake-utils, agenix, disko, nsc-autoscaler, hcloud-upload-image-src }: - let - supportedSystems = [ - "x86_64-linux" - "aarch64-linux" - "x86_64-darwin" - "aarch64-darwin" - ]; - in - (flake-utils.lib.eachSystem supportedSystems (system: - let - pkgs = import nixpkgs { - inherit system; - }; - lib = pkgs.lib; - commonPackages = with pkgs; [ - cargo - rustc - rustfmt - clippy - protobuf - pkg-config - sqlite - git - openssh - curl - jq - nodejs_20 - python3 - rsync - ]; - nscPkg = - if pkgs.stdenv.isLinux || pkgs.stdenv.isDarwin then - let - version = "0.0.452"; - osName = - if pkgs.stdenv.isLinux then - "linux" - else if pkgs.stdenv.isDarwin then - "darwin" - else - throw "nsc: unsupported host OS ${pkgs.stdenv.hostPlatform.system}"; - archInfo = - if pkgs.stdenv.hostPlatform.isx86_64 then - { - arch = "amd64"; - hash = - if pkgs.stdenv.isLinux then - "sha256-FBqOJ0UQWTv2r4HWMHrR/aqFzDa0ej/mS8dSoaCe6fY=" - else - "sha256-3fRKWO0SCCa5PEym5yCB7dtyEx3xSxXSHfJYz8B+/4M="; - } - else if pkgs.stdenv.hostPlatform.isAarch64 then - { - arch = "arm64"; - hash = - if pkgs.stdenv.isLinux then - "sha256-A6twO8Ievbu7Gi5Hqon4ug5rCGOm/uHhlCya3px6+io=" - else - "sha256-n363xLaGhy+a6lw2F+WicQYGXnGYnqRW8aTQCSppwcw="; - } - else - throw "nsc: unsupported host platform ${pkgs.stdenv.hostPlatform.system}"; - src = pkgs.fetchurl { - url = "https://github.com/namespacelabs/foundation/releases/download/v${version}/nsc_${version}_${osName}_${archInfo.arch}.tar.gz"; - sha256 = archInfo.hash; - }; - in - pkgs.stdenvNoCC.mkDerivation { - pname = "nsc"; - inherit version src; - meta.mainProgram = "nsc"; - dontConfigure = true; - dontBuild = true; - unpackPhase = '' - tar -xzf "$src" - ''; - installPhase = '' - install -d "$out/bin" - install -m 0555 nsc "$out/bin/nsc" - install -m 0555 docker-credential-nsc "$out/bin/docker-credential-nsc" - install -m 0555 bazel-credential-nsc "$out/bin/bazel-credential-nsc" - ''; - } - else - null; - hcloudUploadImagePkg = pkgs.buildGoModule { - pname = "hcloud-upload-image"; - version = "1.3.0"; - src = hcloud-upload-image-src; - vendorHash = "sha256-IdOAUBPg0CEuHd2rdc7jOlw0XtnAhr3PVPJbnFs2+x4="; - subPackages = [ "." ]; - env.GOWORK = "off"; - ldflags = [ - "-s" - "-w" - ]; - }; - forgejoNscSrc = lib.cleanSourceWith { - src = ./services/forgejo-nsc; - filter = path: type: - let - p = toString path; - name = builtins.baseNameOf path; - hasDir = dir: lib.hasInfix "/${dir}/" p || lib.hasSuffix "/${dir}" p; - in - !(hasDir ".git" || hasDir "vendor" || hasDir "node_modules" || name == "result"); - }; - forgejoNscDispatcher = pkgs.buildGoModule { - pname = "forgejo-nsc-dispatcher"; - version = "0.1.0"; - src = forgejoNscSrc; - subPackages = [ "./cmd/forgejo-nsc-dispatcher" ]; - vendorHash = "sha256-Kpr+5Q7Dy4JiLuJVZbFeJAzLR7PLPYxhtJqfxMEytcs="; - }; - forgejoNscAutoscaler = pkgs.buildGoModule { - pname = "forgejo-nsc-autoscaler"; - version = "0.1.0"; - src = forgejoNscSrc; - subPackages = [ "./cmd/forgejo-nsc-autoscaler" ]; - vendorHash = "sha256-Kpr+5Q7Dy4JiLuJVZbFeJAzLR7PLPYxhtJqfxMEytcs="; - }; - burrowSrc = lib.cleanSourceWith { - src = ./.; - filter = path: type: - let - p = toString path; - name = builtins.baseNameOf path; - hasDir = dir: lib.hasInfix "/${dir}/" p || lib.hasSuffix "/${dir}" p; - in - !(hasDir ".git" || hasDir "target" || hasDir "node_modules" || name == "result"); - }; - burrowPkg = pkgs.rustPlatform.buildRustPackage { - pname = "burrow"; - version = "0.1.0"; - src = burrowSrc; - cargoLock = { - lockFile = ./Cargo.lock; - outputHashes = { - "tracing-oslog-0.1.2" = "sha256-DjJDiPCTn43zJmmOfuRnyti8iQf9qoXICMKIx4bAG3I="; - }; - }; - cargoBuildFlags = [ - "-p" - "burrow" - "--bin" - "burrow" - ]; - nativeBuildInputs = [ pkgs.protobuf ]; - meta.mainProgram = "burrow"; - }; - in - { - devShells.default = pkgs.mkShell { - packages = - commonPackages - ++ [ - hcloudUploadImagePkg - forgejoNscDispatcher - forgejoNscAutoscaler - ] - ++ lib.optionals (nscPkg != null) [ nscPkg ]; - }; - - devShells.ci = pkgs.mkShell { - packages = - commonPackages - ++ [ - hcloudUploadImagePkg - ] - ++ lib.optionals (nscPkg != null) [ nscPkg ]; - }; - - formatter = pkgs.nixpkgs-fmt; - - packages = - { - agenix = agenix.packages.${system}.agenix; - burrow = burrowPkg; - hcloud-upload-image = hcloudUploadImagePkg; - forgejo-nsc-dispatcher = forgejoNscDispatcher; - forgejo-nsc-autoscaler = forgejoNscAutoscaler; - } - // lib.optionalAttrs (nscPkg != null) { nsc = nscPkg; }; - })) - // { - nixosModules.burrow-forge = import ./nixos/modules/burrow-forge.nix; - nixosModules.burrow-forge-runner = import ./nixos/modules/burrow-forge-runner.nix; - nixosModules.burrow-forgejo-nsc = nsc-autoscaler.nixosModules.default; - nixosModules.burrow-authentik = import ./nixos/modules/burrow-authentik.nix; - nixosModules.burrow-headscale = import ./nixos/modules/burrow-headscale.nix; - nixosModules.burrow-zulip = import ./nixos/modules/burrow-zulip.nix; - nixosConfigurations.burrow-forge = nixpkgs.lib.nixosSystem { - system = "x86_64-linux"; - specialArgs = { - inherit self; - }; - modules = [ - agenix.nixosModules.default - disko.nixosModules.disko - ./nixos/hosts/burrow-forge/default.nix - ]; - }; - - images = { - burrow-forge-raw = self.nixosConfigurations.burrow-forge.config.system.build.diskoImages; - }; - }; -} diff --git a/nixos/README.md b/nixos/README.md deleted file mode 100644 index 23907f3..0000000 --- a/nixos/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# Burrow Forge Runbook - -This directory contains the Burrow forge host definition and the Hetzner bootstrap shape for `burrow-forge`. - -Mail hosting is intentionally not part of this NixOS host in the current plan. Burrow's first mail path is Forward Email with Burrow-owned custom S3 backups; see [`docs/FORWARDEMAIL.md`](../docs/FORWARDEMAIL.md). - -## Files - -- `hosts/burrow-forge/default.nix`: host entrypoint -- `modules/burrow-forge.nix`: Forgejo, Caddy, PostgreSQL, and admin bootstrap module -- `modules/burrow-forge-runner.nix`: Forgejo Actions runner and agent identity bootstrap -- upstream `compatible.systems/conrad/nsc-autoscaler`: Namespace-backed ephemeral Forgejo runner module consumed via the Burrow flake input -- `modules/burrow-authentik.nix`: minimal Authentik IdP for Burrow control planes -- `modules/burrow-headscale.nix`: Headscale control plane rooted in Authentik OIDC -- `../secrets.nix`: agenix recipient map for tracked Burrow forge secrets -- `hetzner-cloud-config.yaml`: desired Hetzner host shape -- `keys/contact_at_burrow_net.pub`: initial operator SSH public key -- `keys/agent_at_burrow_net.pub`: automation SSH public key -- `../Scripts/hetzner-forge.sh`: Hetzner inventory and replace workflow -- `../Scripts/nsc-build-and-upload-image.sh`: temporary Namespace builder -> raw image -> Hetzner snapshot -- `../Scripts/bootstrap-forge-intake.sh`: copy the Forgejo bootstrap password and agent SSH key into `/var/lib/burrow/intake/` -- `../Scripts/check-forge-host.sh`: verify Forgejo, Caddy, the local runner, optional NSC services, and optional Tailnet services after boot -- `../Scripts/cloudflare-upsert-a-record.sh`: upsert DNS-only Cloudflare `A` records for Burrow host cutovers -- `../Scripts/forge-deploy.sh`: remote `nixos-rebuild` entrypoint for the forge host -- `../Scripts/provision-forgejo-nsc.sh`: render Burrow Namespace dispatcher/autoscaler runtime inputs and ensure the default Forgejo scope exists -- `../Scripts/seal-forgejo-nsc-secrets.sh`: encrypt forgejo-nsc runtime inputs into the agenix secrets consumed by `burrow-forge` - -## Intended Flow - -1. Build and upload the raw NixOS image with `Scripts/hetzner-forge.sh build-image` or `Scripts/nsc-build-and-upload-image.sh`. -2. Recreate `burrow-forge` from the latest labeled snapshot with `Scripts/hetzner-forge.sh recreate-from-image --yes`. -3. Run `Scripts/bootstrap-forge-intake.sh` to place the Forgejo bootstrap password file and automation SSH key under `/var/lib/burrow/intake/`. -4. Let `burrow-forgejo-bootstrap.service` create or rotate the initial Forgejo admin account. -5. Let `burrow-forgejo-runner-bootstrap.service` register the self-hosted Forgejo runner and seed Git identity as `agent `. -6. Run `Scripts/provision-forgejo-nsc.sh` locally to refresh `intake/forgejo_nsc_token.txt`, `intake/forgejo_nsc_dispatcher.yaml`, and `intake/forgejo_nsc_autoscaler.yaml`. -7. Run `Scripts/seal-forgejo-nsc-secrets.sh` to encrypt those runtime inputs into the agenix secrets used by `burrow-forge`. -8. Ensure `/var/lib/agenix/agenix.key` exists on the host, encrypt `secrets/infra/authentik.env.age`, `secrets/infra/authentik-google-client-id.age`, `secrets/infra/authentik-google-client-secret.age`, `secrets/infra/forgejo-oidc-client-secret.age`, `secrets/infra/headscale-oidc-client-secret.age`, `secrets/infra/forgejo-nsc-token.age`, `secrets/infra/forgejo-nsc-dispatcher-config.age`, and `secrets/infra/forgejo-nsc-autoscaler-config.age`, and let agenix materialize them under `/run/agenix/`. -9. Use `Scripts/cloudflare-upsert-a-record.sh` to point `git.burrow.net`, `burrow.net`, `auth.burrow.net`, `ts.burrow.net`, and `nsc-autoscaler.burrow.net` at the host with Cloudflare proxying disabled for ACME. -10. Use `Scripts/forge-deploy.sh --allow-dirty` for subsequent remote `nixos-rebuild` runs from the live workspace. -11. Configure Forward Email custom S3 backups for `burrow.net` and `burrow.rs` out-of-band with `Tools/forwardemail-custom-s3.sh`. - -## Current Constraints - -- `burrow-forge` is live on NixOS in `hel1` at `89.167.47.21`. -- `services.forgejo-nsc` now expects agenix-backed runtime inputs at `/run/agenix/burrowForgejoNscToken`, `/run/agenix/burrowForgejoNscDispatcherConfig`, and `/run/agenix/burrowForgejoNscAutoscalerConfig`. -- Authentik and Headscale secrets now live in tracked agenix blobs under `secrets/infra/` and decrypt to `/run/agenix/` on the forge host. -- Public Burrow forge cutover completed on March 15, 2026: - - `burrow.net`, `git.burrow.net`, and `nsc-autoscaler.burrow.net` now publish public `A` records to `89.167.47.21` - - HTTP redirects to HTTPS on all three names - - `https://burrow.net` returns the root forge landing response - - `https://git.burrow.net` returns the live Forgejo front door - - `https://nsc-autoscaler.burrow.net` terminates TLS on Caddy and returns the expected application-level `404` for `/` -- The Cloudflare token currently in `intake/cloudflare-token.txt` is an account-scoped token: `POST /accounts//tokens/verify` succeeds, while `POST /user/tokens/verify` returns `Invalid API Token`. -- `burrow.rs` still resolves publicly to a Vercel `DEPLOYMENT_NOT_FOUND` response. -- Both domains publish Forward Email MX/TXT records. -- Forward Email custom S3 is live on both domains against the Hetzner `burrow` bucket and the public regional endpoint `https://hel1.your-objectstorage.com`. -- The current Hetzner account contains both: - - the older Ubuntu bootstrap server in `hil` - - the live `burrow-forge` NixOS server in `hel1` -- The remaining forge work is follow-on product/integration work, not host bring-up, mail backup wiring, or public DNS cutover. diff --git a/nixos/hetzner-cloud-config.yaml b/nixos/hetzner-cloud-config.yaml deleted file mode 100644 index 7334b3a..0000000 --- a/nixos/hetzner-cloud-config.yaml +++ /dev/null @@ -1,10 +0,0 @@ -name: burrow-forge -server_type: ccx23 -location: hel1 -image: ubuntu-24.04 -ssh_keys: - - contact@burrow.net - - agent@burrow.net -labels: - project: burrow - role: forge diff --git a/nixos/hosts/burrow-forge/default.nix b/nixos/hosts/burrow-forge/default.nix deleted file mode 100644 index c4fc92e..0000000 --- a/nixos/hosts/burrow-forge/default.nix +++ /dev/null @@ -1,275 +0,0 @@ -{ config, lib, pkgs, self, ... }: - -let - contributors = import ../../../contributors.nix; - identities = contributors.identities; - linearGroups = contributors.groups.linear; - stripNewline = value: lib.replaceStrings [ "\n" ] [ "" ] value; - authentikPasswordSecretPath = identity: - if identity ? authentikPasswordSecret - then config.age.secrets.${identity.authentikPasswordSecret}.path - else null; - bootstrapUsers = lib.mapAttrsToList - ( - username: identity: { - inherit username; - name = identity.displayName; - email = identity.canonicalEmail; - isAdmin = identity.isAdmin or false; - groups = lib.optionals (identity.isAdmin or false) [ linearGroups.owners ]; - passwordFile = authentikPasswordSecretPath identity; - } - ) - (lib.filterAttrs (_: identity: identity.bootstrapAuthentik or false) identities); - headscaleBootstrapUsers = lib.mapAttrsToList - ( - username: identity: { - name = username; - displayName = identity.displayName; - email = identity.canonicalEmail; - } - ) - (lib.filterAttrs (_: identity: identity.bootstrapAuthentik or false) identities); - forgeUnixUsernames = - builtins.attrNames (lib.filterAttrs (_: identity: identity.forgeUnixUser or false) identities); - forgeUnixUsers = lib.genAttrs forgeUnixUsernames (username: - let - identity = identities.${username}; - sshKeys = lib.optional (identity ? sshPublicKeyPath) (stripNewline (builtins.readFile identity.sshPublicKeyPath)); - in - { - isNormalUser = true; - createHome = true; - home = "/home/${username}"; - shell = pkgs.bashInteractive; - extraGroups = lib.optional (identity.isAdmin or false) "wheel"; - openssh.authorizedKeys.keys = sshKeys; - }); - forgeUnixAdminUsernames = - builtins.attrNames (lib.filterAttrs (_: identity: (identity.forgeUnixUser or false) && (identity.isAdmin or false)) identities); - forgeAuthorizedKeys = map - (username: builtins.readFile identities.${username}.sshPublicKeyPath) - (builtins.attrNames (lib.filterAttrs (_: identity: identity.forgeAuthorized or false) identities)); -in - -{ - imports = [ - ./hardware-configuration.nix - ./disko-config.nix - self.nixosModules.burrow-forge - self.nixosModules.burrow-forge-runner - self.nixosModules.burrow-forgejo-nsc - self.nixosModules.burrow-authentik - self.nixosModules.burrow-headscale - self.nixosModules.burrow-zulip - ]; - - system.stateVersion = "24.11"; - - time.timeZone = "America/Los_Angeles"; - - nix.settings.experimental-features = [ - "nix-command" - "flakes" - ]; - - users.users = forgeUnixUsers; - - security.sudo.extraRules = lib.map (username: { - users = [ username ]; - commands = [ - { - command = "ALL"; - options = [ "NOPASSWD" ]; - } - ]; - }) forgeUnixAdminUsernames; - - environment.systemPackages = lib.optionals config.services.forgejo-nsc.enable [ - self.packages.${pkgs.stdenv.hostPlatform.system}.nsc - ]; - - age.identityPaths = [ "/var/lib/agenix/agenix.key" ]; - age.secrets.burrowAuthentikEnv = { - file = ../../../secrets/infra/authentik.env.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowHeadscaleOidcClientSecret = { - file = ../../../secrets/infra/headscale-oidc-client-secret.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowForgejoOidcClientSecret = { - file = ../../../secrets/infra/forgejo-oidc-client-secret.age; - owner = "forgejo"; - group = "forgejo"; - mode = "0440"; - }; - age.secrets.burrowTailscaleOidcClientSecret = { - file = ../../../secrets/infra/tailscale-oidc-client-secret.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowLinearScimToken = { - file = ../../../secrets/infra/linear-scim-token.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowAuthentikGoogleClientId = { - file = ../../../secrets/infra/authentik-google-client-id.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowAuthentikGoogleClientSecret = { - file = ../../../secrets/infra/authentik-google-client-secret.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowAuthentikGoogleAccountMap = { - file = ../../../secrets/infra/authentik-google-account-map.json.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowAuthentikUiTestPassword = { - file = ../../../secrets/infra/authentik-ui-test-password.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - age.secrets.burrowForgejoNscToken = { - file = ../../../secrets/infra/forgejo-nsc-token.age; - owner = "forgejo-nsc"; - group = "forgejo-nsc"; - mode = "0400"; - }; - age.secrets.burrowForgejoNscDispatcherConfig = { - file = ../../../secrets/infra/forgejo-nsc-dispatcher-config.age; - owner = "forgejo-nsc"; - group = "forgejo-nsc"; - mode = "0400"; - }; - age.secrets.burrowForgejoNscAutoscalerConfig = { - file = ../../../secrets/infra/forgejo-nsc-autoscaler-config.age; - owner = "forgejo-nsc"; - group = "forgejo-nsc"; - mode = "0400"; - }; - - age.secrets.burrowZulipPostgresPassword = { - file = ../../../secrets/infra/zulip-postgres-password.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - - age.secrets.burrowZulipRabbitmqPassword = { - file = ../../../secrets/infra/zulip-rabbitmq-password.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - - age.secrets.burrowZulipRedisPassword = { - file = ../../../secrets/infra/zulip-redis-password.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - - age.secrets.burrowZulipSecretKey = { - file = ../../../secrets/infra/zulip-secret-key.age; - owner = "root"; - group = "root"; - mode = "0400"; - }; - - networking.extraHosts = '' - 127.0.0.1 burrow.net git.burrow.net auth.burrow.net ts.burrow.net chat.burrow.net nsc-autoscaler.burrow.net - ::1 burrow.net git.burrow.net auth.burrow.net ts.burrow.net chat.burrow.net nsc-autoscaler.burrow.net - ''; - - services.burrow.forge = { - enable = true; - contactEmail = identities.contact.canonicalEmail; - adminUsername = "contact"; - adminEmail = identities.contact.canonicalEmail; - adminPasswordFile = "/var/lib/burrow/intake/forgejo_pass_contact_at_burrow_net.txt"; - oidcAdminGroup = contributors.groups.admins; - oidcRestrictedGroup = contributors.groups.users; - oidcClientSecretFile = config.age.secrets.burrowForgejoOidcClientSecret.path; - authorizedKeys = forgeAuthorizedKeys; - }; - - services.burrow.forgeRunner = { - enable = true; - sshPrivateKeyFile = "/var/lib/burrow/intake/agent_at_burrow_net_ed25519"; - labels = [ - "self-hosted" - "linux" - "x86_64" - "burrow-forge" - ]; - }; - - services.forgejo-nsc = { - enable = true; - nscTokenFile = config.age.secrets.burrowForgejoNscToken.path; - dispatcher = { - configFile = config.age.secrets.burrowForgejoNscDispatcherConfig.path; - }; - autoscaler = { - enable = true; - configFile = config.age.secrets.burrowForgejoNscAutoscalerConfig.path; - }; - }; - - services.burrow.authentik = { - enable = true; - envFile = config.age.secrets.burrowAuthentikEnv.path; - forgejoClientSecretFile = config.age.secrets.burrowForgejoOidcClientSecret.path; - headscaleClientSecretFile = config.age.secrets.burrowHeadscaleOidcClientSecret.path; - tailscaleClientSecretFile = config.age.secrets.burrowTailscaleOidcClientSecret.path; - defaultExternalApplicationSlug = "tailscale"; - googleClientIDFile = config.age.secrets.burrowAuthentikGoogleClientId.path; - googleClientSecretFile = config.age.secrets.burrowAuthentikGoogleClientSecret.path; - googleAccountMapFile = config.age.secrets.burrowAuthentikGoogleAccountMap.path; - googleLoginMode = "redirect"; - userGroupName = contributors.groups.users; - adminGroupName = contributors.groups.admins; - tailscaleAccessGroupName = contributors.groups.users; - bootstrapUsers = bootstrapUsers; - linearAcsUrl = "https://api.linear.app/auth/sso/d0ca13dc-ac41-4824-8aab-e0ca352fc3de/acs"; - linearAudience = "https://auth.linear.app/sso/d0ca13dc-ac41-4824-8aab-e0ca352fc3de"; - linearDefaultRelayState = "https://linear.app/auth/sso/d0ca13dc-ac41-4824-8aab-e0ca352fc3de"; - linearScimUrl = "https://api.linear.app/auth/scim/d0ca13dc-ac41-4824-8aab-e0ca352fc3de"; - linearScimTokenFile = config.age.secrets.burrowLinearScimToken.path; - linearScimUserIdentifier = "email"; - linearOwnerGroupName = linearGroups.owners; - linearAdminGroupName = linearGroups.admins; - linearGuestGroupName = linearGroups.guests; - zulipAccessGroupName = contributors.groups.users; - }; - - services.burrow.headscale = { - enable = true; - oidcClientSecretFile = config.age.secrets.burrowHeadscaleOidcClientSecret.path; - bootstrapUsers = headscaleBootstrapUsers; - }; - - services.burrow.zulip = { - enable = true; - administratorEmail = identities.contact.canonicalEmail; - postgresPasswordFile = config.age.secrets.burrowZulipPostgresPassword.path; - rabbitmqPasswordFile = config.age.secrets.burrowZulipRabbitmqPassword.path; - redisPasswordFile = config.age.secrets.burrowZulipRedisPassword.path; - secretKeyFile = config.age.secrets.burrowZulipSecretKey.path; - }; -} diff --git a/nixos/hosts/burrow-forge/disko-config.nix b/nixos/hosts/burrow-forge/disko-config.nix deleted file mode 100644 index d001422..0000000 --- a/nixos/hosts/burrow-forge/disko-config.nix +++ /dev/null @@ -1,36 +0,0 @@ -{ lib, ... }: - -{ - disko.devices = { - disk.main = { - type = "disk"; - device = lib.mkDefault "/dev/sda"; - imageName = "burrow-forge"; - imageSize = "80G"; - content = { - type = "gpt"; - partitions = { - ESP = { - size = "512M"; - type = "EF00"; - content = { - type = "filesystem"; - format = "vfat"; - mountpoint = "/boot"; - mountOptions = [ "umask=0077" ]; - }; - }; - - root = { - size = "100%"; - content = { - type = "filesystem"; - format = "ext4"; - mountpoint = "/"; - }; - }; - }; - }; - }; - }; -} diff --git a/nixos/hosts/burrow-forge/hardware-configuration.nix b/nixos/hosts/burrow-forge/hardware-configuration.nix deleted file mode 100644 index 27490e4..0000000 --- a/nixos/hosts/burrow-forge/hardware-configuration.nix +++ /dev/null @@ -1,11 +0,0 @@ -{ ... }: - -{ - # Derived from Hetzner Cloud rescue-mode hardware inspection. - boot.initrd.availableKernelModules = [ - "ahci" - "sd_mod" - "virtio_pci" - "virtio_scsi" - ]; -} diff --git a/nixos/keys/agent_at_burrow_net.pub b/nixos/keys/agent_at_burrow_net.pub deleted file mode 100644 index de447b8..0000000 --- a/nixos/keys/agent_at_burrow_net.pub +++ /dev/null @@ -1 +0,0 @@ -ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEN0+tRJy7Y2DW0uGYHb86N2t02WyU5lDNX6FaxBF/G8 agent@burrow.net diff --git a/nixos/keys/contact_at_burrow_net.pub b/nixos/keys/contact_at_burrow_net.pub deleted file mode 100644 index 0daa6a3..0000000 --- a/nixos/keys/contact_at_burrow_net.pub +++ /dev/null @@ -1 +0,0 @@ -ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIO42guJ5QvNMw3k6YKWlQnjcTsc+X4XI9F2GBtl8aHOa diff --git a/nixos/keys/jett_at_burrow_net.pub b/nixos/keys/jett_at_burrow_net.pub deleted file mode 100644 index 36c85ee..0000000 --- a/nixos/keys/jett_at_burrow_net.pub +++ /dev/null @@ -1 +0,0 @@ -ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMe960j6TC869F6RvElpICxlBauIT3E0uLyy0m7n70ZC diff --git a/nixos/modules/burrow-authentik.nix b/nixos/modules/burrow-authentik.nix deleted file mode 100644 index 977b641..0000000 --- a/nixos/modules/burrow-authentik.nix +++ /dev/null @@ -1,1072 +0,0 @@ -{ config, lib, pkgs, ... }: - -let - cfg = config.services.burrow.authentik; - runtimeDir = "/run/burrow-authentik"; - envFile = "${runtimeDir}/authentik.env"; - blueprintDir = "${runtimeDir}/blueprints"; - blueprintFile = "${blueprintDir}/burrow-authentik.yaml"; - postgresVolume = "burrow-authentik-postgresql:/var/lib/postgresql/data"; - dataVolume = "burrow-authentik-data:/data"; - directorySyncScript = ../../Scripts/authentik-sync-burrow-directory.sh; - forgejoOidcSyncScript = ../../Scripts/authentik-sync-forgejo-oidc.sh; - tailscaleOidcSyncScript = ../../Scripts/authentik-sync-tailscale-oidc.sh; - onePasswordOidcSyncScript = ../../Scripts/authentik-sync-1password-oidc.sh; - zulipSamlSyncScript = ../../Scripts/authentik-sync-zulip-saml.sh; - linearSamlSyncScript = ../../Scripts/authentik-sync-linear-saml.sh; - linearScimSyncScript = ../../Scripts/authentik-sync-linear-scim.sh; - googleSourceSyncScript = ../../Scripts/authentik-sync-google-source.sh; - tailnetAuthFlowSyncScript = ../../Scripts/authentik-sync-tailnet-auth-flow.sh; - authentikBlueprint = pkgs.writeText "burrow-authentik-blueprint.yaml" '' - version: 1 - metadata: - name: Burrow Authentik - labels: - blueprints.goauthentik.io/description: Minimal Burrow Authentik applications - entries: - - model: authentik_providers_oauth2.scopemapping - id: burrow-oidc-email - identifiers: - name: Burrow OIDC Email - attrs: - name: Burrow OIDC Email - scope_name: email - description: Verified email mapping for Burrow - expression: | - return { - "email": request.user.email, - "email_verified": True, - } - - - model: authentik_providers_oauth2.scopemapping - id: burrow-oidc-groups - identifiers: - name: Burrow OIDC Groups - attrs: - name: Burrow OIDC Groups - scope_name: groups - description: Group membership mapping for Burrow - expression: | - return { - "groups": [group.name for group in request.user.ak_groups.all()], - } - - - model: authentik_providers_oauth2.oauth2provider - id: burrow-oidc-provider-ts - identifiers: - name: Burrow Tailnet - attrs: - authorization_flow: !Find [authentik_flows.flow, [slug, default-provider-authorization-implicit-consent]] - invalidation_flow: !Find [authentik_flows.flow, [slug, default-provider-invalidation-flow]] - issuer_mode: per_provider - slug: ${cfg.headscaleProviderSlug} - client_type: confidential - client_id: ${cfg.headscaleDomain} - client_secret: !Env [AUTHENTIK_BURROW_TS_CLIENT_SECRET, ""] - include_claims_in_id_token: true - redirect_uris: - - matching_mode: strict - url: https://${cfg.headscaleDomain}/oidc/callback - property_mappings: - - !Find [authentik_providers_oauth2.scopemapping, [managed, goauthentik.io/providers/oauth2/scope-openid]] - - !KeyOf burrow-oidc-email - - !KeyOf burrow-oidc-groups - - !Find [authentik_providers_oauth2.scopemapping, [managed, goauthentik.io/providers/oauth2/scope-profile]] - signing_key: !Find [authentik_crypto.certificatekeypair, [name, authentik Self-signed Certificate]] - - - model: authentik_core.application - identifiers: - slug: ${cfg.headscaleProviderSlug} - attrs: - name: Burrow Tailnet - slug: ${cfg.headscaleProviderSlug} - provider: !KeyOf burrow-oidc-provider-ts - meta_launch_url: https://${cfg.headscaleDomain}/ - ''; -in -{ - options.services.burrow.authentik = { - enable = lib.mkEnableOption "the Burrow Authentik identity provider"; - - domain = lib.mkOption { - type = lib.types.str; - default = "auth.burrow.net"; - description = "Public Authentik domain."; - }; - - port = lib.mkOption { - type = lib.types.port; - default = 9002; - description = "Local Authentik HTTP listen port."; - }; - - image = lib.mkOption { - type = lib.types.str; - default = "ghcr.io/goauthentik/server:2026.2.1"; - description = "Authentik container image reference."; - }; - - envFile = lib.mkOption { - type = lib.types.str; - default = "/var/lib/burrow/intake/authentik.env"; - description = "Host-local Authentik bootstrap environment file."; - }; - - headscaleDomain = lib.mkOption { - type = lib.types.str; - default = "ts.burrow.net"; - description = "Headscale public domain used for the bundled OIDC client."; - }; - - headscaleProviderSlug = lib.mkOption { - type = lib.types.str; - default = "ts"; - description = "Authentik provider slug for Headscale."; - }; - - forgejoDomain = lib.mkOption { - type = lib.types.str; - default = "git.burrow.net"; - description = "Forgejo public domain used for the bundled OIDC client."; - }; - - forgejoProviderSlug = lib.mkOption { - type = lib.types.str; - default = "git"; - description = "Authentik application slug for Forgejo."; - }; - - tailscaleProviderSlug = lib.mkOption { - type = lib.types.str; - default = "tailscale"; - description = "Authentik application slug for Tailscale custom OIDC sign-in."; - }; - - tailscaleClientId = lib.mkOption { - type = lib.types.str; - default = "tailscale.burrow.net"; - description = "Client ID Authentik should present to Tailscale."; - }; - - tailscaleClientSecretFile = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Host-local file containing the Authentik Tailscale OIDC client secret."; - }; - - tailscaleAccessGroupName = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Authentik group that should be allowed to launch the Tailscale application."; - }; - - defaultExternalApplicationSlug = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Authentik application slug that external users should land on instead of /if/user/."; - }; - - onePasswordDomain = lib.mkOption { - type = lib.types.str; - default = "burrow-team.1password.com"; - description = "1Password team sign-in domain used for Burrow Unlock with SSO."; - }; - - onePasswordProviderSlug = lib.mkOption { - type = lib.types.str; - default = "onepassword"; - description = "Authentik application slug for 1Password Unlock with SSO."; - }; - - onePasswordClientId = lib.mkOption { - type = lib.types.str; - default = "1password.burrow.net"; - description = "Public OIDC client ID Authentik should present to 1Password."; - }; - - onePasswordRedirectUris = lib.mkOption { - type = lib.types.listOf lib.types.str; - default = [ - "https://burrow-team.1password.com/sso/oidc/redirect/" - "onepassword://sso/oidc/redirect" - ]; - description = "Allowed 1Password OIDC redirect URIs."; - }; - - linearProviderSlug = lib.mkOption { - type = lib.types.str; - default = "linear"; - description = "Authentik application slug for Linear SAML."; - }; - - zulipDomain = lib.mkOption { - type = lib.types.str; - default = "chat.burrow.net"; - description = "Public Zulip domain exposed through Authentik SAML."; - }; - - zulipProviderSlug = lib.mkOption { - type = lib.types.str; - default = "zulip"; - description = "Authentik application slug for Zulip SAML."; - }; - - zulipAcsUrl = lib.mkOption { - type = lib.types.str; - default = "https://${config.services.burrow.authentik.zulipDomain}/complete/saml/"; - description = "Zulip SAML ACS URL."; - }; - - zulipAudience = lib.mkOption { - type = lib.types.str; - default = "https://${config.services.burrow.authentik.zulipDomain}"; - description = "Zulip SAML audience/entity identifier."; - }; - - zulipLaunchUrl = lib.mkOption { - type = lib.types.str; - default = "https://${config.services.burrow.authentik.zulipDomain}/"; - description = "Zulip URL exposed in Authentik."; - }; - - zulipAccessGroupName = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Authentik group allowed to launch Zulip from Burrow SSO surfaces."; - }; - - linearAcsUrl = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Linear SAML ACS URL."; - }; - - linearAudience = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Linear SAML audience/entity identifier."; - }; - - linearLaunchUrl = lib.mkOption { - type = lib.types.str; - default = "https://linear.app/burrownet"; - description = "Linear workspace URL exposed in Authentik."; - }; - - linearDefaultRelayState = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Optional Linear relay state or login URL for IdP-initiated launches."; - }; - - linearScimUrl = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Linear SCIM base connector URL."; - }; - - linearScimTokenFile = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Host-local file containing the Linear SCIM bearer token."; - }; - - linearScimUserIdentifier = lib.mkOption { - type = lib.types.str; - default = "email"; - description = "Linear SCIM unique identifier field for users."; - }; - - linearOwnerGroupName = lib.mkOption { - type = lib.types.str; - default = "linear-owners"; - description = "Authentik group name that should map to Linear owners."; - }; - - linearAdminGroupName = lib.mkOption { - type = lib.types.str; - default = "linear-admins"; - description = "Authentik group name that should map to Linear admins."; - }; - - linearGuestGroupName = lib.mkOption { - type = lib.types.str; - default = "linear-guests"; - description = "Authentik group name that should map to Linear guests."; - }; - - forgejoClientId = lib.mkOption { - type = lib.types.str; - default = "git.burrow.net"; - description = "Client ID Authentik should present to Forgejo."; - }; - - forgejoClientSecretFile = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Host-local file containing the Authentik Forgejo OIDC client secret."; - }; - - headscaleClientSecretFile = lib.mkOption { - type = lib.types.str; - default = "/var/lib/burrow/intake/authentik_headscale_client_secret.txt"; - description = "Host-local file containing the Authentik Headscale OIDC client secret."; - }; - - googleClientIDFile = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Host-local file containing the Google OAuth client ID for the Authentik source."; - }; - - googleClientSecretFile = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Host-local file containing the Google OAuth client secret for the Authentik source."; - }; - - googleAccountMapFile = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Optional host-local JSON file mapping external Google accounts onto Burrow Authentik users."; - }; - - googleSourceSlug = lib.mkOption { - type = lib.types.str; - default = "google"; - description = "Authentik OAuth source slug used for Google login."; - }; - - googleLoginMode = lib.mkOption { - type = lib.types.enum [ - "promoted" - "redirect" - ]; - default = "redirect"; - description = "Identification-stage behavior for the Google Authentik source."; - }; - - headscaleAuthenticationFlowSlug = lib.mkOption { - type = lib.types.str; - default = "burrow-tailnet-authentication"; - description = "Authentik authentication flow slug used for Burrow Tailnet sign-in."; - }; - - headscaleAuthenticationFlowName = lib.mkOption { - type = lib.types.str; - default = "Burrow Tailnet Authentication"; - description = "Authentik authentication flow name used for Burrow Tailnet sign-in."; - }; - - headscaleIdentificationStageName = lib.mkOption { - type = lib.types.str; - default = "burrow-tailnet-identification-stage"; - description = "Authentik identification stage used for Burrow Tailnet sign-in."; - }; - - headscalePasswordStageName = lib.mkOption { - type = lib.types.str; - default = "burrow-tailnet-password-stage"; - description = "Authentik password stage used for Burrow Tailnet sign-in."; - }; - - headscaleUserLoginStageName = lib.mkOption { - type = lib.types.str; - default = "burrow-tailnet-user-login-stage"; - description = "Authentik user-login stage used for Burrow Tailnet sign-in."; - }; - - userGroupName = lib.mkOption { - type = lib.types.str; - default = "burrow-users"; - description = "Authentik group granted baseline Burrow access."; - }; - - adminGroupName = lib.mkOption { - type = lib.types.str; - default = "burrow-admins"; - description = "Authentik group granted Burrow administrator access."; - }; - - bootstrapUsers = lib.mkOption { - type = with lib.types; listOf (submodule { - options = { - username = lib.mkOption { - type = str; - description = "Authentik username."; - }; - name = lib.mkOption { - type = str; - description = "Display name for the user."; - }; - email = lib.mkOption { - type = str; - description = "Canonical email stored in Authentik."; - }; - sourceEmail = lib.mkOption { - type = nullOr str; - default = null; - description = "External Google account email that should map onto this Authentik user."; - }; - groups = lib.mkOption { - type = listOf str; - default = [ ]; - description = "Additional Authentik groups for this user."; - }; - isAdmin = lib.mkOption { - type = bool; - default = false; - description = "Whether this user should be in the Burrow admin group."; - }; - passwordFile = lib.mkOption { - type = nullOr str; - default = null; - description = "Optional host-local file containing a bootstrap password for this user."; - }; - }; - }); - default = [ ]; - description = "Declarative Burrow users to create in Authentik."; - }; - }; - - config = lib.mkIf cfg.enable { - virtualisation.podman.enable = true; - - systemd.tmpfiles.rules = [ - "d ${runtimeDir} 0750 root root -" - "d ${blueprintDir} 0750 root root -" - ]; - - systemd.services.burrow-authentik-runtime = { - description = "Render the Burrow Authentik runtime environment"; - before = [ - "podman-burrow-authentik-postgresql.service" - "podman-burrow-authentik-server.service" - "podman-burrow-authentik-worker.service" - ]; - wantedBy = [ - "podman-burrow-authentik-postgresql.service" - "podman-burrow-authentik-server.service" - "podman-burrow-authentik-worker.service" - ]; - after = lib.optionals config.services.burrow.headscale.enable [ - "burrow-headscale-client-secret.service" - ]; - wants = lib.optionals config.services.burrow.headscale.enable [ - "burrow-headscale-client-secret.service" - ]; - path = [ pkgs.coreutils ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - RemainAfterExit = true; - }; - script = '' - set -euo pipefail - - if [ ! -s ${lib.escapeShellArg cfg.envFile} ]; then - echo "Authentik env file missing: ${cfg.envFile}" >&2 - exit 1 - fi - - if [ ! -s ${lib.escapeShellArg cfg.headscaleClientSecretFile} ]; then - echo "Headscale client secret missing: ${cfg.headscaleClientSecretFile}" >&2 - exit 1 - fi - - ${lib.optionalString (cfg.forgejoClientSecretFile != null) '' - if [ ! -s ${lib.escapeShellArg cfg.forgejoClientSecretFile} ]; then - echo "Forgejo client secret missing: ${cfg.forgejoClientSecretFile}" >&2 - exit 1 - fi - ''} - - ${lib.optionalString (cfg.tailscaleClientSecretFile != null) '' - if [ ! -s ${lib.escapeShellArg cfg.tailscaleClientSecretFile} ]; then - echo "Tailscale client secret missing: ${cfg.tailscaleClientSecretFile}" >&2 - exit 1 - fi - ''} - - install -d -m 0750 -o root -g root ${runtimeDir} ${blueprintDir} - install -m 0644 -o root -g root ${authentikBlueprint} ${blueprintFile} - - source ${lib.escapeShellArg cfg.envFile} - - read_secret() { - tr -d '\r\n' < "$1" - } - - cat > ${envFile} </dev/null; then - exit 0 - fi - sleep 2 - done - - echo "Authentik did not become ready on ${cfg.domain}" >&2 - exit 1 - ''; - }; - - systemd.services.burrow-authentik-google-source = lib.mkIf ( - cfg.googleClientIDFile != null && cfg.googleClientSecretFile != null - ) { - description = "Reconcile the Burrow Authentik Google OAuth source"; - after = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wants = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - googleSourceSyncScript - cfg.envFile - cfg.googleClientIDFile - cfg.googleClientSecretFile - ] ++ lib.optional (cfg.googleAccountMapFile != null) cfg.googleAccountMapFile; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_GOOGLE_SOURCE_SLUG=${lib.escapeShellArg cfg.googleSourceSlug} - export AUTHENTIK_GOOGLE_LOGIN_MODE=${lib.escapeShellArg cfg.googleLoginMode} - export AUTHENTIK_GOOGLE_USER_MATCHING_MODE=email_link - export AUTHENTIK_GOOGLE_CLIENT_ID="$(tr -d '\r\n' < ${lib.escapeShellArg cfg.googleClientIDFile})" - export AUTHENTIK_GOOGLE_CLIENT_SECRET="$(tr -d '\r\n' < ${lib.escapeShellArg cfg.googleClientSecretFile})" - if [ -n ${lib.escapeShellArg (cfg.googleAccountMapFile or "")} ]; then - export AUTHENTIK_GOOGLE_ACCOUNT_MAP_JSON="$(tr -d '\n' < ${lib.escapeShellArg (cfg.googleAccountMapFile or "/dev/null")})" - else - export AUTHENTIK_GOOGLE_ACCOUNT_MAP_JSON='${builtins.toJSON (map (user: { - source_email = user.sourceEmail; - username = user.username; - email = user.email; - name = user.name; - }) (lib.filter (user: user.sourceEmail != null) cfg.bootstrapUsers))}' - fi - - ${pkgs.bash}/bin/bash ${googleSourceSyncScript} - ''; - }; - - systemd.services.burrow-authentik-directory = lib.mkIf (cfg.bootstrapUsers != [ ]) { - description = "Reconcile Burrow Authentik users and groups"; - after = - [ - "burrow-authentik-ready.service" - "network-online.target" - ] - ++ lib.optionals (cfg.forgejoClientSecretFile != null) [ "burrow-authentik-forgejo-oidc.service" ]; - wants = - [ - "burrow-authentik-ready.service" - "network-online.target" - ] - ++ lib.optionals (cfg.forgejoClientSecretFile != null) [ "burrow-authentik-forgejo-oidc.service" ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - directorySyncScript - cfg.envFile - ] ++ lib.concatMap (user: lib.optional (user.passwordFile != null) user.passwordFile) cfg.bootstrapUsers; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_BURROW_USERS_GROUP=${lib.escapeShellArg cfg.userGroupName} - export AUTHENTIK_BURROW_ADMINS_GROUP=${lib.escapeShellArg cfg.adminGroupName} - export AUTHENTIK_FORGEJO_APPLICATION_SLUG=${lib.escapeShellArg cfg.forgejoProviderSlug} - export AUTHENTIK_BURROW_DIRECTORY_JSON='${builtins.toJSON (map (user: { - inherit (user) username name email isAdmin passwordFile; - groups = user.groups; - }) cfg.bootstrapUsers)}' - - ${pkgs.bash}/bin/bash ${directorySyncScript} - ''; - }; - - systemd.services.burrow-authentik-tailnet-auth-flow = { - description = "Reconcile the Burrow Tailnet authentication flow"; - after = - [ - "burrow-authentik-ready.service" - "network-online.target" - ] - ++ lib.optionals ( - cfg.googleClientIDFile != null && cfg.googleClientSecretFile != null - ) [ "burrow-authentik-google-source.service" ]; - wants = - [ - "burrow-authentik-ready.service" - "network-online.target" - ] - ++ lib.optionals ( - cfg.googleClientIDFile != null && cfg.googleClientSecretFile != null - ) [ "burrow-authentik-google-source.service" ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - tailnetAuthFlowSyncScript - cfg.envFile - ]; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_TAILNET_PROVIDER_SLUG=${lib.escapeShellArg cfg.headscaleProviderSlug} - export AUTHENTIK_TAILNET_PROVIDER_SLUGS_JSON='["${cfg.headscaleProviderSlug}","${cfg.tailscaleProviderSlug}"]' - export AUTHENTIK_TAILNET_AUTHENTICATION_FLOW_NAME=${lib.escapeShellArg cfg.headscaleAuthenticationFlowName} - export AUTHENTIK_TAILNET_AUTHENTICATION_FLOW_SLUG=${lib.escapeShellArg cfg.headscaleAuthenticationFlowSlug} - export AUTHENTIK_TAILNET_IDENTIFICATION_STAGE_NAME=${lib.escapeShellArg cfg.headscaleIdentificationStageName} - export AUTHENTIK_TAILNET_PASSWORD_STAGE_NAME=${lib.escapeShellArg cfg.headscalePasswordStageName} - export AUTHENTIK_TAILNET_USER_LOGIN_STAGE_NAME=${lib.escapeShellArg cfg.headscaleUserLoginStageName} - export AUTHENTIK_TAILNET_GOOGLE_SOURCE_SLUG=${lib.escapeShellArg cfg.googleSourceSlug} - - ${pkgs.bash}/bin/bash ${tailnetAuthFlowSyncScript} - ''; - }; - - systemd.services.burrow-authentik-forgejo-oidc = lib.mkIf (cfg.forgejoClientSecretFile != null) { - description = "Reconcile the Burrow Authentik Forgejo OIDC application"; - after = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wants = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - forgejoOidcSyncScript - cfg.envFile - cfg.forgejoClientSecretFile - ]; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_FORGEJO_APPLICATION_SLUG=${lib.escapeShellArg cfg.forgejoProviderSlug} - export AUTHENTIK_FORGEJO_APPLICATION_NAME=burrow.net - export AUTHENTIK_FORGEJO_PROVIDER_NAME=burrow.net - export AUTHENTIK_FORGEJO_CLIENT_ID=${lib.escapeShellArg cfg.forgejoClientId} - export AUTHENTIK_FORGEJO_CLIENT_SECRET="$(tr -d '\r\n' < ${lib.escapeShellArg cfg.forgejoClientSecretFile})" - export AUTHENTIK_FORGEJO_LAUNCH_URL=https://${cfg.forgejoDomain}/ - export AUTHENTIK_FORGEJO_REDIRECT_URIS_JSON='["https://${cfg.forgejoDomain}/user/oauth2/burrow.net/callback","https://${cfg.forgejoDomain}/user/oauth2/authentik/callback","https://${cfg.forgejoDomain}/user/oauth2/GitHub/callback"]' - - ${pkgs.bash}/bin/bash ${forgejoOidcSyncScript} - ''; - }; - - systemd.services.burrow-authentik-tailscale-oidc = lib.mkIf (cfg.tailscaleClientSecretFile != null) { - description = "Reconcile the Burrow Authentik Tailscale OIDC application"; - after = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wants = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - tailscaleOidcSyncScript - cfg.envFile - cfg.tailscaleClientSecretFile - ]; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_TAILSCALE_APPLICATION_SLUG=${lib.escapeShellArg cfg.tailscaleProviderSlug} - export AUTHENTIK_TAILSCALE_APPLICATION_NAME=Tailscale - export AUTHENTIK_TAILSCALE_PROVIDER_NAME=Tailscale - export AUTHENTIK_TAILSCALE_TEMPLATE_SLUG=${lib.escapeShellArg cfg.headscaleProviderSlug} - export AUTHENTIK_TAILSCALE_CLIENT_ID=${lib.escapeShellArg cfg.tailscaleClientId} - export AUTHENTIK_TAILSCALE_CLIENT_SECRET="$(tr -d '\r\n' < ${lib.escapeShellArg cfg.tailscaleClientSecretFile})" - export AUTHENTIK_TAILSCALE_LAUNCH_URL=https://login.tailscale.com/start/oidc - export AUTHENTIK_TAILSCALE_REDIRECT_URIS_JSON='["https://login.tailscale.com/a/oauth_response"]' - ${lib.optionalString (cfg.tailscaleAccessGroupName != null) '' - export AUTHENTIK_TAILSCALE_ACCESS_GROUP=${lib.escapeShellArg cfg.tailscaleAccessGroupName} - ''} - ${lib.optionalString (cfg.defaultExternalApplicationSlug != null) '' - export AUTHENTIK_DEFAULT_EXTERNAL_APPLICATION_SLUG=${lib.escapeShellArg cfg.defaultExternalApplicationSlug} - ''} - - ${pkgs.bash}/bin/bash ${tailscaleOidcSyncScript} - ''; - }; - - systemd.services.burrow-authentik-1password-oidc = { - description = "Reconcile the Burrow Authentik 1Password OIDC application"; - after = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wants = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - onePasswordOidcSyncScript - cfg.envFile - ]; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_ONEPASSWORD_APPLICATION_SLUG=${lib.escapeShellArg cfg.onePasswordProviderSlug} - export AUTHENTIK_ONEPASSWORD_APPLICATION_NAME=1Password - export AUTHENTIK_ONEPASSWORD_PROVIDER_NAME=1Password - export AUTHENTIK_ONEPASSWORD_TEMPLATE_SLUG=${lib.escapeShellArg cfg.headscaleProviderSlug} - export AUTHENTIK_ONEPASSWORD_CLIENT_ID=${lib.escapeShellArg cfg.onePasswordClientId} - export AUTHENTIK_ONEPASSWORD_LAUNCH_URL=https://${cfg.onePasswordDomain}/ - export AUTHENTIK_ONEPASSWORD_REDIRECT_URIS_JSON='${builtins.toJSON cfg.onePasswordRedirectUris}' - - ${pkgs.bash}/bin/bash ${onePasswordOidcSyncScript} - ''; - }; - - systemd.services.burrow-authentik-zulip-saml = { - description = "Reconcile the Burrow Authentik Zulip SAML application"; - after = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wants = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - zulipSamlSyncScript - cfg.envFile - ]; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_ZULIP_APPLICATION_SLUG=${lib.escapeShellArg cfg.zulipProviderSlug} - export AUTHENTIK_ZULIP_APPLICATION_NAME=Zulip - export AUTHENTIK_ZULIP_PROVIDER_NAME=Zulip - export AUTHENTIK_ZULIP_ACS_URL=${lib.escapeShellArg cfg.zulipAcsUrl} - export AUTHENTIK_ZULIP_AUDIENCE=${lib.escapeShellArg cfg.zulipAudience} - export AUTHENTIK_ZULIP_LAUNCH_URL=${lib.escapeShellArg cfg.zulipLaunchUrl} - ${lib.optionalString (cfg.zulipAccessGroupName != null) '' - export AUTHENTIK_ZULIP_ACCESS_GROUP=${lib.escapeShellArg cfg.zulipAccessGroupName} - ''} - export AUTHENTIK_ZULIP_ADMIN_GROUP=${lib.escapeShellArg cfg.adminGroupName} - - ${pkgs.bash}/bin/bash ${zulipSamlSyncScript} - ''; - }; - - systemd.services.burrow-authentik-linear-saml = lib.mkIf ( - cfg.linearAcsUrl != null && cfg.linearAudience != null - ) { - description = "Reconcile the Burrow Authentik Linear SAML application"; - after = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wants = [ - "burrow-authentik-ready.service" - "network-online.target" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - linearSamlSyncScript - cfg.envFile - ]; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_LINEAR_APPLICATION_SLUG=${lib.escapeShellArg cfg.linearProviderSlug} - export AUTHENTIK_LINEAR_APPLICATION_NAME=Linear - export AUTHENTIK_LINEAR_PROVIDER_NAME=Linear - export AUTHENTIK_LINEAR_ACS_URL=${lib.escapeShellArg cfg.linearAcsUrl} - export AUTHENTIK_LINEAR_AUDIENCE=${lib.escapeShellArg cfg.linearAudience} - export AUTHENTIK_LINEAR_LAUNCH_URL=${lib.escapeShellArg cfg.linearLaunchUrl} - ${lib.optionalString (cfg.linearDefaultRelayState != null) '' - export AUTHENTIK_LINEAR_DEFAULT_RELAY_STATE=${lib.escapeShellArg cfg.linearDefaultRelayState} - ''} - - ${pkgs.bash}/bin/bash ${linearSamlSyncScript} - ''; - }; - - systemd.services.burrow-authentik-linear-scim = lib.mkIf ( - cfg.linearScimUrl != null && cfg.linearScimTokenFile != null - ) { - description = "Reconcile the Burrow Authentik Linear SCIM provider"; - after = [ - "burrow-authentik-ready.service" - "burrow-authentik-directory.service" - "burrow-authentik-linear-saml.service" - "network-online.target" - ]; - wants = [ - "burrow-authentik-ready.service" - "burrow-authentik-directory.service" - "burrow-authentik-linear-saml.service" - "network-online.target" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - linearScimSyncScript - cfg.envFile - cfg.linearScimTokenFile - ]; - path = [ - pkgs.bash - pkgs.coreutils - pkgs.curl - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - set -a - source ${lib.escapeShellArg cfg.envFile} - set +a - - export AUTHENTIK_URL=https://${cfg.domain} - export AUTHENTIK_LINEAR_APPLICATION_SLUG=${lib.escapeShellArg cfg.linearProviderSlug} - export AUTHENTIK_LINEAR_SCIM_PROVIDER_NAME="Linear SCIM" - export AUTHENTIK_LINEAR_SCIM_URL=${lib.escapeShellArg cfg.linearScimUrl} - export AUTHENTIK_LINEAR_SCIM_TOKEN_FILE=${lib.escapeShellArg cfg.linearScimTokenFile} - export AUTHENTIK_LINEAR_SCIM_USER_IDENTIFIER=${lib.escapeShellArg cfg.linearScimUserIdentifier} - export AUTHENTIK_LINEAR_OWNER_GROUP=${lib.escapeShellArg cfg.linearOwnerGroupName} - export AUTHENTIK_LINEAR_ADMIN_GROUP=${lib.escapeShellArg cfg.linearAdminGroupName} - export AUTHENTIK_LINEAR_GUEST_GROUP=${lib.escapeShellArg cfg.linearGuestGroupName} - - ${pkgs.bash}/bin/bash ${linearScimSyncScript} - ''; - }; - - services.caddy.virtualHosts."${cfg.domain}".extraConfig = '' - encode gzip zstd - reverse_proxy 127.0.0.1:${toString cfg.port} - ''; - }; -} diff --git a/nixos/modules/burrow-forge-runner.nix b/nixos/modules/burrow-forge-runner.nix deleted file mode 100644 index 034fb38..0000000 --- a/nixos/modules/burrow-forge-runner.nix +++ /dev/null @@ -1,231 +0,0 @@ -{ config, lib, pkgs, ... }: - -let - cfg = config.services.burrow.forgeRunner; - runnerPkg = pkgs.forgejo-runner; - stateDir = cfg.stateDir; - runnerFile = "${stateDir}/.runner"; - registrationFingerprintFile = "${stateDir}/.runner-registration-fingerprint"; - configFile = "${stateDir}/runner.yaml"; - labelsCsv = lib.concatStringsSep "," (map (label: "${label}:host") cfg.labels); - registrationFingerprint = builtins.hashString "sha256" "${cfg.instanceUrl}\n${cfg.name}\n${labelsCsv}"; - sshPrivateKeyFile = cfg.sshPrivateKeyFile or ""; -in -{ - options.services.burrow.forgeRunner = { - enable = lib.mkEnableOption "the Burrow Forgejo Actions runner"; - - instanceUrl = lib.mkOption { - type = lib.types.str; - default = "http://127.0.0.1:3000"; - description = "Forgejo base URL used by the local runner for registration and job polling."; - }; - - labels = lib.mkOption { - type = with lib.types; listOf str; - default = [ "burrow-forge" ]; - description = "Runner labels exposed to Forgejo Actions."; - }; - - name = lib.mkOption { - type = lib.types.str; - default = "burrow-forge-agent"; - description = "Runner name shown in Forgejo."; - }; - - capacity = lib.mkOption { - type = lib.types.int; - default = 1; - description = "Maximum concurrent jobs on this runner."; - }; - - stateDir = lib.mkOption { - type = lib.types.str; - default = "/var/lib/forgejo-runner-agent"; - description = "Persistent runner state directory."; - }; - - user = lib.mkOption { - type = lib.types.str; - default = "forgejo-runner-agent"; - description = "System user that runs the Forgejo runner."; - }; - - group = lib.mkOption { - type = lib.types.str; - default = "forgejo-runner-agent"; - description = "System group that runs the Forgejo runner."; - }; - - forgejoConfigFile = lib.mkOption { - type = lib.types.str; - default = "/var/lib/forgejo/custom/conf/app.ini"; - description = "Forgejo app.ini path used to generate runner tokens."; - }; - - gitUserName = lib.mkOption { - type = lib.types.str; - default = "agent"; - description = "Git commit author name for automation on the forge host."; - }; - - gitUserEmail = lib.mkOption { - type = lib.types.str; - default = "agent@burrow.net"; - description = "Git commit author email for automation on the forge host."; - }; - - sshPrivateKeyFile = lib.mkOption { - type = with lib.types; nullOr str; - default = null; - description = "Optional host-local path to the agent SSH private key copied into the runner home."; - }; - }; - - config = lib.mkIf cfg.enable { - users.groups.${cfg.group} = { }; - - users.users.${cfg.user} = { - isSystemUser = true; - group = cfg.group; - description = "Burrow Forgejo Actions runner"; - home = cfg.stateDir; - createHome = true; - shell = pkgs.bashInteractive; - }; - - environment.systemPackages = with pkgs; [ - runnerPkg - bash - coreutils - findutils - git - git-lfs - openssh - python3 - rsync - ]; - - systemd.tmpfiles.rules = [ - "d ${stateDir} 0750 ${cfg.user} ${cfg.group} - -" - ]; - - systemd.services.burrow-forgejo-runner-bootstrap = { - description = "Bootstrap Burrow Forgejo runner registration"; - after = [ "forgejo.service" "network-online.target" "systemd-tmpfiles-setup.service" ]; - wants = [ "forgejo.service" "network-online.target" "systemd-tmpfiles-setup.service" ]; - before = [ "burrow-forgejo-runner.service" ]; - wantedBy = [ "multi-user.target" ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - umask 077 - - install -d -m 0750 -o ${cfg.user} -g ${cfg.group} ${stateDir} - cat > ${configFile} <> ${configFile} - done - cat >> ${configFile} <<'EOF' -cache: - enabled: false -EOF - chown ${cfg.user}:${cfg.group} ${configFile} - chmod 0640 ${configFile} - - expected_fingerprint=${lib.escapeShellArg registrationFingerprint} - if [ -s ${runnerFile} ]; then - current_fingerprint="" - if [ -s ${registrationFingerprintFile} ]; then - current_fingerprint="$(tr -d '\r\n' < ${registrationFingerprintFile})" - fi - if [ "${"$"}current_fingerprint" != "${"$"}expected_fingerprint" ]; then - rm -f ${runnerFile} ${registrationFingerprintFile} - fi - fi - - install -d -m 0700 -o ${cfg.user} -g ${cfg.group} ${stateDir}/.ssh - ${pkgs.util-linux}/bin/runuser -u ${cfg.user} -- \ - ${pkgs.git}/bin/git config --global user.name ${lib.escapeShellArg cfg.gitUserName} - ${pkgs.util-linux}/bin/runuser -u ${cfg.user} -- \ - ${pkgs.git}/bin/git config --global user.email ${lib.escapeShellArg cfg.gitUserEmail} - - if [ -n ${lib.escapeShellArg sshPrivateKeyFile} ] && [ -s ${lib.escapeShellArg sshPrivateKeyFile} ]; then - install -m 0600 -o ${cfg.user} -g ${cfg.group} \ - ${lib.escapeShellArg sshPrivateKeyFile} \ - ${stateDir}/.ssh/id_ed25519 - cat > ${stateDir}/.ssh/config <&2 - exit 1 - fi - - ${pkgs.util-linux}/bin/runuser -u ${cfg.user} -- \ - ${runnerPkg}/bin/forgejo-runner register \ - --no-interactive \ - --instance ${lib.escapeShellArg cfg.instanceUrl} \ - --token "${"$"}token" \ - --name ${lib.escapeShellArg cfg.name} \ - --labels ${lib.escapeShellArg labelsCsv} \ - --config ${configFile} - - printf '%s\n' "${"$"}expected_fingerprint" > ${registrationFingerprintFile} - chown ${cfg.user}:${cfg.group} ${registrationFingerprintFile} - chmod 0640 ${registrationFingerprintFile} - fi - ''; - }; - - systemd.services.burrow-forgejo-runner = { - description = "Burrow Forgejo Actions runner"; - after = [ "burrow-forgejo-runner-bootstrap.service" ]; - wants = [ "burrow-forgejo-runner-bootstrap.service" ]; - wantedBy = [ "multi-user.target" ]; - serviceConfig = { - Type = "simple"; - User = cfg.user; - Group = cfg.group; - WorkingDirectory = stateDir; - Environment = [ "BURROW_RUNNER_REGISTRATION_FINGERPRINT=${registrationFingerprint}" ]; - Restart = "on-failure"; - RestartSec = 2; - ExecStart = pkgs.writeShellScript "burrow-forgejo-runner" '' - set -euo pipefail - export PATH="/run/wrappers/bin:/run/current-system/sw/bin:${"$"}{PATH:-}" - tmp="$(${pkgs.coreutils}/bin/mktemp)" - set +e - ${runnerPkg}/bin/forgejo-runner daemon --config ${configFile} 2>&1 | ${pkgs.coreutils}/bin/tee "${"$"}tmp" - rc="${"$"}{PIPESTATUS[0]}" - set -e - if ${pkgs.gnugrep}/bin/grep -qi "unregistered runner" "${"$"}tmp"; then - rm -f ${runnerFile} - fi - rm -f "${"$"}tmp" - exit "${"$"}rc" - ''; - }; - }; - }; -} diff --git a/nixos/modules/burrow-forge.nix b/nixos/modules/burrow-forge.nix deleted file mode 100644 index d733135..0000000 --- a/nixos/modules/burrow-forge.nix +++ /dev/null @@ -1,449 +0,0 @@ -{ config, lib, pkgs, ... }: - -let - cfg = config.services.burrow.forge; - forgejoCfg = config.services.forgejo; - forgejoExe = lib.getExe forgejoCfg.package; - forgejoWorkPath = forgejoCfg.stateDir; - forgejoCustomPath = "${forgejoWorkPath}/custom"; - forgejoConfigFile = "${forgejoCustomPath}/conf/app.ini"; - forgejoAdminArgs = "--config ${lib.escapeShellArg forgejoConfigFile} --work-path ${lib.escapeShellArg forgejoWorkPath} --custom-path ${lib.escapeShellArg forgejoCustomPath}"; - homeRepoPath = "/${cfg.homeOwner}/${cfg.homeRepo}"; - homeRepoUrl = "https://${cfg.gitDomain}${homeRepoPath}"; -in -{ - options.services.burrow.forge = { - enable = lib.mkEnableOption "the Burrow Forge host"; - - gitDomain = lib.mkOption { - type = lib.types.str; - default = "git.burrow.net"; - description = "Public Forgejo domain."; - }; - - siteDomain = lib.mkOption { - type = lib.types.str; - default = "burrow.net"; - description = "Root site domain."; - }; - - homeOwner = lib.mkOption { - type = lib.types.str; - default = "hackclub"; - description = "Canonical Forgejo org/user for the Burrow home repository."; - }; - - homeRepo = lib.mkOption { - type = lib.types.str; - default = "burrow"; - description = "Canonical Forgejo repository name for the Burrow home repository."; - }; - - contactEmail = lib.mkOption { - type = lib.types.str; - default = "contact@burrow.net"; - description = "Operator contact email."; - }; - - nscAutoscalerDomain = lib.mkOption { - type = lib.types.str; - default = "nsc-autoscaler.burrow.net"; - description = "Public webhook domain for the Forgejo Namespace autoscaler."; - }; - - adminUsername = lib.mkOption { - type = lib.types.str; - default = "contact"; - description = "Initial Forgejo admin username."; - }; - - adminEmail = lib.mkOption { - type = lib.types.str; - default = "contact@burrow.net"; - description = "Initial Forgejo admin email."; - }; - - adminPasswordFile = lib.mkOption { - type = lib.types.str; - description = "Host-local path to the plaintext bootstrap password file for the initial Forgejo admin."; - }; - - oidcDisplayName = lib.mkOption { - type = lib.types.str; - default = "burrow.net"; - description = "Login button label for the Forgejo OIDC provider."; - }; - - oidcClientId = lib.mkOption { - type = lib.types.str; - default = "git.burrow.net"; - description = "OIDC client ID that Forgejo should use against Authentik."; - }; - - oidcClientSecretFile = lib.mkOption { - type = lib.types.nullOr lib.types.str; - default = null; - description = "Host-local path to the Forgejo OIDC client secret."; - }; - - oidcDiscoveryUrl = lib.mkOption { - type = lib.types.str; - default = "https://auth.burrow.net/application/o/git/.well-known/openid-configuration"; - description = "OpenID Connect discovery URL for the Forgejo login source."; - }; - - oidcScopes = lib.mkOption { - type = with lib.types; listOf str; - default = [ - "openid" - "profile" - "email" - "groups" - ]; - description = "OIDC scopes requested from Authentik."; - }; - - oidcGroupClaimName = lib.mkOption { - type = lib.types.str; - default = "groups"; - description = "OIDC claim name that carries group membership."; - }; - - oidcAdminGroup = lib.mkOption { - type = lib.types.str; - default = "burrow-admins"; - description = "OIDC group that should grant Forgejo admin access."; - }; - - oidcRestrictedGroup = lib.mkOption { - type = lib.types.str; - default = "burrow-users"; - description = "OIDC group that is required to log into Forgejo."; - }; - - oidcAutoRegistration = lib.mkOption { - type = lib.types.bool; - default = true; - description = "Whether Forgejo should automatically create users for new OIDC sign-ins."; - }; - - oidcAccountLinking = lib.mkOption { - type = lib.types.enum [ "disabled" "login" "auto" ]; - default = "auto"; - description = "How Forgejo should link existing local accounts for OIDC sign-ins."; - }; - - oidcUsernameSource = lib.mkOption { - type = lib.types.enum [ "userid" "nickname" "email" ]; - default = "email"; - description = "Which OIDC claim Forgejo should use to derive usernames for auto-registration."; - }; - - authorizedKeys = lib.mkOption { - type = with lib.types; listOf str; - default = [ ]; - description = "SSH keys allowed for root login and operational bootstrap."; - }; - }; - - config = lib.mkIf cfg.enable { - networking.hostName = "burrow-forge"; - networking.useDHCP = lib.mkDefault true; - - services.qemuGuest.enable = true; - - boot.loader.grub = { - enable = true; - efiSupport = true; - efiInstallAsRemovable = true; - device = "nodev"; - }; - - fileSystems."/boot".neededForBoot = true; - - services.postgresql = { - enable = true; - package = pkgs.postgresql_16; - }; - - services.openssh = { - enable = true; - settings = { - PasswordAuthentication = false; - KbdInteractiveAuthentication = false; - PermitRootLogin = "prohibit-password"; - }; - }; - - users.users.root.openssh.authorizedKeys.keys = cfg.authorizedKeys; - - networking.firewall.allowedTCPPorts = [ - 22 - 80 - 443 - 2222 - ]; - - services.forgejo = { - enable = true; - database = { - type = "postgres"; - createDatabase = true; - }; - lfs.enable = true; - settings = { - server = { - DOMAIN = cfg.gitDomain; - ROOT_URL = "https://${cfg.gitDomain}/"; - HTTP_PORT = 3000; - SSH_DOMAIN = cfg.gitDomain; - SSH_PORT = 2222; - START_SSH_SERVER = true; - }; - - service = { - DISABLE_REGISTRATION = true; - ENABLE_INTERNAL_SIGNIN = false; - ENABLE_BASIC_AUTHENTICATION = false; - SHOW_REGISTRATION_BUTTON = false; - REQUIRE_SIGNIN_VIEW = false; - DEFAULT_ALLOW_CREATE_ORGANIZATION = false; - ENABLE_NOTIFY_MAIL = false; - NO_REPLY_ADDRESS = cfg.adminEmail; - }; - - session = { - COOKIE_SECURE = true; - SAME_SITE = "strict"; - }; - - openid = { - ENABLE_OPENID_SIGNIN = false; - ENABLE_OPENID_SIGNUP = false; - }; - - oauth2_client = { - OPENID_CONNECT_SCOPES = lib.concatStringsSep " " (lib.subtractLists [ "openid" ] cfg.oidcScopes); - ENABLE_AUTO_REGISTRATION = cfg.oidcAutoRegistration; - ACCOUNT_LINKING = cfg.oidcAccountLinking; - USERNAME = cfg.oidcUsernameSource; - }; - - actions = { - ENABLED = true; - }; - - repository = { - DEFAULT_BRANCH = "main"; - ENABLE_PUSH_CREATE_USER = false; - }; - - ui = { - DEFAULT_THEME = "forgejo-auto"; - }; - }; - }; - - services.caddy = { - enable = true; - email = cfg.contactEmail; - virtualHosts = - { - "${cfg.gitDomain}".extraConfig = '' - encode gzip zstd - @root path / - redir @root ${homeRepoPath} 308 - reverse_proxy 127.0.0.1:${toString config.services.forgejo.settings.server.HTTP_PORT} - ''; - "${cfg.siteDomain}".extraConfig = '' - encode gzip zstd - @oidcConfig path /.well-known/openid-configuration - redir @oidcConfig https://${config.services.burrow.authentik.domain}/application/o/${config.services.burrow.authentik.tailscaleProviderSlug}/.well-known/openid-configuration 308 - @tailnetConfig path /.well-known/burrow-tailnet - header @tailnetConfig Content-Type application/json - respond @tailnetConfig "{\"domain\":\"${cfg.siteDomain}\",\"provider\":\"headscale\",\"authority\":\"https://${config.services.burrow.headscale.domain}\",\"oidc_issuer\":\"https://${config.services.burrow.authentik.domain}/application/o/${config.services.burrow.authentik.headscaleProviderSlug}/\"}" 200 - @webfinger path /.well-known/webfinger - header @webfinger Content-Type application/jrd+json - respond @webfinger "{\"subject\":\"{query.resource}\",\"links\":[{\"rel\":\"http://openid.net/specs/connect/1.0/issuer\",\"href\":\"https://${config.services.burrow.authentik.domain}/application/o/${config.services.burrow.authentik.tailscaleProviderSlug}/\"},{\"rel\":\"https://burrow.net/rel/tailnet-control-server\",\"href\":\"https://${config.services.burrow.headscale.domain}\"}]}" 200 - @root path / - redir @root ${homeRepoUrl} 308 - respond 404 - ''; - } - // lib.optionalAttrs ( - config.services.forgejo-nsc.enable && config.services.forgejo-nsc.autoscaler.enable - ) { - "${cfg.nscAutoscalerDomain}".extraConfig = '' - encode gzip zstd - reverse_proxy 127.0.0.1:8090 - ''; - }; - }; - - systemd.services.burrow-forgejo-bootstrap = { - description = "Seed the initial Burrow Forgejo admin account"; - after = [ "forgejo.service" ]; - requires = [ "forgejo.service" ]; - wantedBy = [ "multi-user.target" ]; - path = [ - forgejoCfg.package - pkgs.coreutils - pkgs.gnugrep - ]; - serviceConfig = { - Type = "oneshot"; - User = forgejoCfg.user; - Group = forgejoCfg.group; - WorkingDirectory = forgejoCfg.stateDir; - }; - script = '' - set -euo pipefail - - if [ ! -s ${lib.escapeShellArg cfg.adminPasswordFile} ]; then - echo "bootstrap password file is missing; skipping admin bootstrap" >&2 - exit 0 - fi - - password="$(tr -d '\r\n' < ${lib.escapeShellArg cfg.adminPasswordFile})" - if [ -z "$password" ]; then - echo "bootstrap password file is empty; skipping admin bootstrap" >&2 - exit 0 - fi - - log_file="$(mktemp)" - trap 'rm -f "$log_file"' EXIT - - if ! ${forgejoExe} admin user create \ - ${forgejoAdminArgs} \ - --admin \ - --username ${lib.escapeShellArg cfg.adminUsername} \ - --email ${lib.escapeShellArg cfg.adminEmail} \ - --password "$password" \ - --must-change-password=false >"$log_file" 2>&1; then - if grep -qi "already exists" "$log_file"; then - ${forgejoExe} admin user change-password \ - ${forgejoAdminArgs} \ - --username ${lib.escapeShellArg cfg.adminUsername} \ - --password "$password" \ - --must-change-password=false - else - cat "$log_file" >&2 - exit 1 - fi - fi - ''; - }; - - systemd.services.burrow-forgejo-oidc-bootstrap = lib.mkIf (cfg.oidcClientSecretFile != null) { - description = "Seed the Burrow Forgejo OIDC login source"; - after = [ - "forgejo.service" - "postgresql.service" - ] ++ lib.optionals config.services.burrow.authentik.enable [ - "burrow-authentik-ready.service" - ]; - wants = lib.optionals config.services.burrow.authentik.enable [ - "burrow-authentik-ready.service" - ]; - requires = [ - "forgejo.service" - "postgresql.service" - ]; - wantedBy = [ "multi-user.target" ]; - restartTriggers = [ - cfg.oidcClientSecretFile - ]; - path = [ - pkgs.coreutils - pkgs.gnugrep - pkgs.jq - pkgs.postgresql - ]; - serviceConfig = { - Type = "oneshot"; - User = forgejoCfg.user; - Group = forgejoCfg.group; - WorkingDirectory = forgejoCfg.stateDir; - }; - script = '' - set -euo pipefail - - if [ ! -s ${lib.escapeShellArg cfg.oidcClientSecretFile} ]; then - echo "Forgejo OIDC client secret missing: ${cfg.oidcClientSecretFile}" >&2 - exit 1 - fi - - ready=0 - for attempt in $(seq 1 60); do - if ${pkgs.postgresql}/bin/psql -h /run/postgresql -U forgejo forgejo -tAc \ - "SELECT 1 FROM pg_tables WHERE schemaname='public' AND tablename='login_source';" \ - | grep -q 1; then - ready=1 - break - fi - sleep 1 - done - - if [ "$ready" -ne 1 ]; then - echo "Forgejo login_source table did not become ready" >&2 - exit 1 - fi - - oidc_secret="$(${pkgs.coreutils}/bin/tr -d '\r\n' < ${lib.escapeShellArg cfg.oidcClientSecretFile})" - if [ -z "$oidc_secret" ]; then - echo "Forgejo OIDC client secret is empty" >&2 - exit 1 - fi - - cfg_json="$(${pkgs.jq}/bin/jq -nc \ - --arg client_id ${lib.escapeShellArg cfg.oidcClientId} \ - --arg client_secret "$oidc_secret" \ - --arg discovery_url ${lib.escapeShellArg cfg.oidcDiscoveryUrl} \ - --argjson scopes '${builtins.toJSON cfg.oidcScopes}' \ - --arg group_claim_name ${lib.escapeShellArg cfg.oidcGroupClaimName} \ - --arg admin_group ${lib.escapeShellArg cfg.oidcAdminGroup} \ - --arg restricted_group ${lib.escapeShellArg cfg.oidcRestrictedGroup} \ - '{ - Provider: "openidConnect", - ClientID: $client_id, - ClientSecret: $client_secret, - OpenIDConnectAutoDiscoveryURL: $discovery_url, - CustomURLMapping: null, - IconURL: "", - Scopes: $scopes, - AttributeSSHPublicKey: "", - RequiredClaimName: "", - RequiredClaimValue: "", - GroupClaimName: $group_claim_name, - AdminGroup: $admin_group, - GroupTeamMap: "", - GroupTeamMapRemoval: false, - RestrictedGroup: $restricted_group - }')" - - ${pkgs.postgresql}/bin/psql -v ON_ERROR_STOP=1 \ - -h /run/postgresql -U forgejo forgejo \ - -v oidc_name=${lib.escapeShellArg cfg.oidcDisplayName} \ - -v cfg_json="$cfg_json" <<'SQL' - INSERT INTO login_source ( - type, name, is_active, is_sync_enabled, cfg, created_unix, updated_unix - ) VALUES ( - 6, - :'oidc_name', - TRUE, - FALSE, - :'cfg_json', - EXTRACT(EPOCH FROM NOW())::BIGINT, - EXTRACT(EPOCH FROM NOW())::BIGINT - ) - ON CONFLICT (name) DO UPDATE SET - type = EXCLUDED.type, - is_active = TRUE, - is_sync_enabled = FALSE, - cfg = EXCLUDED.cfg, - updated_unix = EXCLUDED.updated_unix; - SQL - ''; - }; - }; -} diff --git a/nixos/modules/burrow-headscale-policy.hujson b/nixos/modules/burrow-headscale-policy.hujson deleted file mode 100644 index 8f0bcd2..0000000 --- a/nixos/modules/burrow-headscale-policy.hujson +++ /dev/null @@ -1,11 +0,0 @@ -{ - // Bootstrap with a simple allow-all policy; Burrow-specific lane segmentation - // can be layered on once the control plane is live. - "acls": [ - { - "action": "accept", - "src": ["*"], - "dst": ["*:*"], - }, - ], -} diff --git a/nixos/modules/burrow-headscale.nix b/nixos/modules/burrow-headscale.nix deleted file mode 100644 index ad5ec68..0000000 --- a/nixos/modules/burrow-headscale.nix +++ /dev/null @@ -1,227 +0,0 @@ -{ config, lib, pkgs, ... }: - -let - cfg = config.services.burrow.headscale; - policyFile = ./burrow-headscale-policy.hujson; -in -{ - options.services.burrow.headscale = { - enable = lib.mkEnableOption "the Burrow Headscale control plane"; - - domain = lib.mkOption { - type = lib.types.str; - default = "ts.burrow.net"; - description = "Public Headscale control-plane domain."; - }; - - tailDomain = lib.mkOption { - type = lib.types.str; - default = "tail.burrow.net"; - description = "MagicDNS suffix served by Headscale."; - }; - - port = lib.mkOption { - type = lib.types.port; - default = 8413; - description = "Local Headscale listen port."; - }; - - oidcIssuer = lib.mkOption { - type = lib.types.str; - default = "https://${config.services.burrow.authentik.domain}/application/o/${config.services.burrow.authentik.headscaleProviderSlug}/"; - description = "OIDC issuer URL used by Headscale."; - }; - - oidcClientSecretFile = lib.mkOption { - type = lib.types.str; - default = config.services.burrow.authentik.headscaleClientSecretFile; - description = "Host-local file containing the OIDC client secret used by Headscale."; - }; - - bootstrapUsers = lib.mkOption { - type = with lib.types; listOf (submodule { - options = { - name = lib.mkOption { - type = str; - description = "Headscale username."; - }; - displayName = lib.mkOption { - type = str; - description = "Friendly display name."; - }; - email = lib.mkOption { - type = str; - description = "User email address."; - }; - }; - }); - default = [ - { - name = "contact"; - displayName = "Burrow"; - email = "contact@burrow.net"; - } - { - name = "conrad"; - displayName = "Conrad"; - email = "conrad@burrow.net"; - } - { - name = "agent"; - displayName = "Agent"; - email = "agent@burrow.net"; - } - { - name = "infra"; - displayName = "Infrastructure"; - email = "infra@burrow.net"; - } - ]; - description = "Users to create or reconcile inside Headscale."; - }; - }; - - config = lib.mkIf cfg.enable { - environment.systemPackages = [ pkgs.headscale ]; - - systemd.services.burrow-headscale-client-secret = { - description = "Ensure the Burrow Headscale OIDC client secret exists"; - before = - [ "headscale.service" ] - ++ lib.optionals config.services.burrow.authentik.enable [ "burrow-authentik-runtime.service" ]; - wantedBy = - [ "headscale.service" ] - ++ lib.optionals config.services.burrow.authentik.enable [ "burrow-authentik-runtime.service" ]; - path = [ - pkgs.coreutils - pkgs.openssl - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - RemainAfterExit = true; - }; - script = '' - set -euo pipefail - - install -d -m 0755 /var/lib/burrow/intake - - if [ ! -s ${lib.escapeShellArg cfg.oidcClientSecretFile} ]; then - umask 077 - ${pkgs.openssl}/bin/openssl rand -base64 48 > ${lib.escapeShellArg cfg.oidcClientSecretFile} - chown root:root ${lib.escapeShellArg cfg.oidcClientSecretFile} - chmod 0400 ${lib.escapeShellArg cfg.oidcClientSecretFile} - fi - ''; - }; - - services.headscale = { - enable = true; - address = "127.0.0.1"; - port = cfg.port; - settings = { - server_url = "https://${cfg.domain}"; - dns = { - magic_dns = true; - base_domain = cfg.tailDomain; - nameservers.global = [ - "1.1.1.1" - "1.0.0.1" - "2606:4700:4700::1111" - "2606:4700:4700::1001" - ]; - search_domains = [ cfg.tailDomain ]; - }; - database.sqlite.write_ahead_log = true; - log.level = "info"; - policy = { - mode = "file"; - path = policyFile; - }; - oidc = { - only_start_if_oidc_is_available = true; - issuer = cfg.oidcIssuer; - client_id = cfg.domain; - client_secret_path = "\${CREDENTIALS_DIRECTORY}/oidc_client_secret"; - scope = [ - "openid" - "profile" - "email" - ]; - pkce = { - enabled = true; - method = "S256"; - }; - }; - }; - }; - - systemd.services.headscale = { - after = - [ "burrow-headscale-client-secret.service" ] - ++ lib.optionals config.services.burrow.authentik.enable [ "burrow-authentik-ready.service" ]; - wants = - [ "burrow-headscale-client-secret.service" ] - ++ lib.optionals config.services.burrow.authentik.enable [ "burrow-authentik-ready.service" ]; - requires = - [ "burrow-headscale-client-secret.service" ] - ++ lib.optionals config.services.burrow.authentik.enable [ "burrow-authentik-ready.service" ]; - serviceConfig.LoadCredential = [ - "oidc_client_secret:${cfg.oidcClientSecretFile}" - ]; - }; - - systemd.services.headscale-bootstrap = { - description = "Bootstrap Burrow Headscale users"; - after = [ "headscale.service" ]; - requires = [ "headscale.service" ]; - wantedBy = [ "multi-user.target" ]; - path = [ - pkgs.coreutils - pkgs.headscale - pkgs.jq - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - - list_users() { - local users_json - users_json="$(${pkgs.headscale}/bin/headscale users list -o json)" - printf '%s\n' "$users_json" | ${pkgs.jq}/bin/jq -c 'if type == "array" then . else [] end' - } - - ensure_user() { - local name="$1" - local display_name="$2" - local email="$3" - if list_users | ${pkgs.jq}/bin/jq -e --arg name "$name" 'map(select(.name == $name)) | length > 0' >/dev/null; then - return 0 - fi - ${pkgs.headscale}/bin/headscale users create "$name" --display-name "$display_name" --email "$email" >/dev/null - } - - for _ in $(seq 1 60); do - if list_users >/dev/null 2>&1; then - break - fi - sleep 1 - done - - ${lib.concatMapStringsSep "\n" (user: '' - ensure_user ${lib.escapeShellArg user.name} ${lib.escapeShellArg user.displayName} ${lib.escapeShellArg user.email} - '') cfg.bootstrapUsers} - ''; - }; - - services.caddy.virtualHosts."${cfg.domain}".extraConfig = '' - encode gzip zstd - reverse_proxy 127.0.0.1:${toString cfg.port} - ''; - }; -} diff --git a/nixos/modules/burrow-zulip.nix b/nixos/modules/burrow-zulip.nix deleted file mode 100644 index 9670694..0000000 --- a/nixos/modules/burrow-zulip.nix +++ /dev/null @@ -1,587 +0,0 @@ -{ config, lib, pkgs, ... }: - -let - cfg = config.services.burrow.zulip; - realmSignupDomain = - let - parts = lib.splitString "@" cfg.administratorEmail; - in - if builtins.length parts == 2 then builtins.elemAt parts 1 else cfg.domain; - yamlFormat = pkgs.formats.yaml { }; - composeFile = yamlFormat.generate "burrow-zulip-compose.yaml" { - services = { - zulip = { - image = "ghcr.io/zulip/zulip-server:11.6-1"; - restart = "unless-stopped"; - network_mode = "host"; - secrets = [ - "zulip__postgres_password" - "zulip__rabbitmq_password" - "zulip__redis_password" - "zulip__secret_key" - "zulip__email_password" - ]; - environment = { - SETTING_REMOTE_POSTGRES_HOST = "127.0.0.1"; - SETTING_MEMCACHED_LOCATION = "127.0.0.1:11211"; - SETTING_RABBITMQ_HOST = "127.0.0.1"; - SETTING_REDIS_HOST = "127.0.0.1"; - }; - volumes = [ "${cfg.dataDir}/data:/data:rw" ]; - ulimits.nofile = { - soft = 1000000; - hard = 1048576; - }; - }; - }; - }; -in -{ - options.services.burrow.zulip = { - enable = lib.mkEnableOption "the Burrow Zulip deployment"; - - domain = lib.mkOption { - type = lib.types.str; - default = "chat.burrow.net"; - description = "Public Zulip domain."; - }; - - port = lib.mkOption { - type = lib.types.port; - default = 18090; - description = "Local loopback port Caddy should proxy to."; - }; - - dataDir = lib.mkOption { - type = lib.types.str; - default = "/var/lib/burrow/zulip"; - description = "Host directory storing Zulip compose state and generated runtime files."; - }; - - administratorEmail = lib.mkOption { - type = lib.types.str; - default = "contact@burrow.net"; - description = "Operational Zulip administrator email."; - }; - - realmName = lib.mkOption { - type = lib.types.str; - default = "Burrow"; - description = "Initial Zulip organization name for single-tenant bootstrap."; - }; - - realmOwnerName = lib.mkOption { - type = lib.types.str; - default = "Burrow"; - description = "Display name used for the initial Zulip organization owner."; - }; - - authentikDomain = lib.mkOption { - type = lib.types.str; - default = config.services.burrow.authentik.domain; - description = "Authentik domain Zulip should trust as its SAML IdP."; - }; - - authentikProviderSlug = lib.mkOption { - type = lib.types.str; - default = config.services.burrow.authentik.zulipProviderSlug; - description = "Authentik SAML application slug used for Zulip."; - }; - - postgresPasswordFile = lib.mkOption { - type = lib.types.str; - description = "File containing the Zulip PostgreSQL password."; - }; - - rabbitmqPasswordFile = lib.mkOption { - type = lib.types.str; - description = "File containing the Zulip RabbitMQ password."; - }; - - redisPasswordFile = lib.mkOption { - type = lib.types.str; - description = "File containing the Zulip Redis password."; - }; - - secretKeyFile = lib.mkOption { - type = lib.types.str; - description = "File containing the Zulip Django secret key."; - }; - }; - - config = lib.mkIf cfg.enable { - environment.systemPackages = [ - pkgs.podman - pkgs.podman-compose - ]; - - services.postgresql = { - ensureDatabases = [ "zulip" ]; - ensureUsers = [ - { - name = "zulip"; - ensureDBOwnership = true; - } - ]; - settings = { - listen_addresses = lib.mkDefault "127.0.0.1"; - password_encryption = lib.mkDefault "scram-sha-256"; - }; - authentication = lib.mkAfter '' - host zulip zulip 127.0.0.1/32 scram-sha-256 - ''; - }; - - services.postgresqlBackup = { - enable = true; - backupAll = false; - databases = [ "zulip" ]; - }; - - services.memcached = { - enable = true; - listen = "127.0.0.1"; - port = 11211; - extraOptions = [ "-U 0" ]; - }; - - services.redis.servers.zulip = { - enable = true; - bind = "127.0.0.1"; - port = 6379; - requirePassFile = cfg.redisPasswordFile; - }; - - services.rabbitmq = { - enable = true; - listenAddress = "127.0.0.1"; - port = 5672; - }; - - services.caddy.virtualHosts."${cfg.domain}".extraConfig = '' - encode gzip zstd - reverse_proxy 127.0.0.1:${toString cfg.port} - ''; - - systemd.tmpfiles.rules = [ - "d ${cfg.dataDir} 0755 root root - -" - "d ${cfg.dataDir}/data 0755 root root - -" - "d ${cfg.dataDir}/data/logs 0755 root root - -" - "d ${cfg.dataDir}/data/logs/emails 0755 root root - -" - "d ${cfg.dataDir}/data/secrets 0700 root root - -" - "d ${cfg.dataDir}/secrets 0700 root root - -" - "d ${cfg.dataDir}/logs 0755 root root - -" - ]; - - systemd.services.burrow-zulip-postgres-bootstrap = { - description = "Bootstrap PostgreSQL role for Burrow Zulip"; - after = [ "postgresql.service" ]; - wants = [ "postgresql.service" ]; - requiredBy = [ "burrow-zulip.service" ]; - before = [ "burrow-zulip.service" ]; - path = [ - config.services.postgresql.package - pkgs.bash - pkgs.coreutils - pkgs.python3 - pkgs.util-linux - ]; - serviceConfig = { - Type = "oneshot"; - User = "root"; - Group = "root"; - }; - script = '' - set -euo pipefail - - db_password="$(tr -d '\r\n' < ${lib.escapeShellArg cfg.postgresPasswordFile})" - db_password_sql="$(printf '%s' "$db_password" | python3 -c "import sys; print(sys.stdin.read().replace(chr(39), chr(39) * 2), end=\"\")")" - setup_sql="$(mktemp)" - trap 'rm -f "$setup_sql"' EXIT - - cat > "$setup_sql" < ${lib.escapeShellArg "${cfg.dataDir}/secrets/email-password"} - chmod 0600 ${lib.escapeShellArg "${cfg.dataDir}/secrets/email-password"} - - metadata_xml="$(${pkgs.curl}/bin/curl -fsSL https://${cfg.authentikDomain}/application/saml/${cfg.authentikProviderSlug}/metadata/)" - saml_cert="$(printf '%s' "$metadata_xml" | ${pkgs.python3}/bin/python3 -c ' -import xml.etree.ElementTree as ET, sys -xml = sys.stdin.read() -root = ET.fromstring(xml) -ns = {"ds": "http://www.w3.org/2000/09/xmldsig#"} -node = root.find(".//ds:X509Certificate", ns) -if node is None or not (node.text or "").strip(): - raise SystemExit("missing X509 certificate in Authentik metadata") -print((node.text or "").strip()) -')" - - cat > ${lib.escapeShellArg "${cfg.dataDir}/compose.override.yaml"} < "$zulip_data_dir/secrets/bootstrap-owner-password" - fi - chown 1000:1000 "$zulip_data_dir/secrets/bootstrap-owner-password" - chmod 0600 "$zulip_data_dir/secrets/bootstrap-owner-password" - } - - wait_for_zulip_supervisor() { - local attempts=0 - while ! podman exec burrow-zulip_zulip_1 supervisorctl status >/dev/null 2>&1; do - attempts=$((attempts + 1)) - if [ "$attempts" -ge 90 ]; then - echo "error: Zulip supervisor did not become ready" >&2 - exit 1 - fi - sleep 2 - done - } - - patch_uwsgi_scheme_handling() { - wait_for_zulip_supervisor - podman exec burrow-zulip_zulip_1 bash -lc "cat > /etc/nginx/zulip-include/trusted-proto <<'EOF' -map \$remote_addr \$trusted_x_forwarded_proto { - default \$scheme; - 127.0.0.1 \$http_x_forwarded_proto; - ::1 \$http_x_forwarded_proto; - 172.31.1.1 \$http_x_forwarded_proto; -} -map \$remote_addr \$trusted_x_forwarded_for { - default \"\"; - 127.0.0.1 \$http_x_forwarded_for; - ::1 \$http_x_forwarded_for; - 172.31.1.1 \$http_x_forwarded_for; -} -map \$remote_addr \$x_proxy_misconfiguration { - default \"\"; -} -EOF -cat > /etc/nginx/uwsgi_params <<'EOF' -uwsgi_param QUERY_STRING \$query_string; -uwsgi_param REQUEST_METHOD \$request_method; -uwsgi_param CONTENT_TYPE \$content_type; -uwsgi_param CONTENT_LENGTH \$content_length; -uwsgi_param REQUEST_URI \$request_uri; -uwsgi_param PATH_INFO \$document_uri; -uwsgi_param DOCUMENT_ROOT \$document_root; -uwsgi_param SERVER_PROTOCOL \$server_protocol; -uwsgi_param REQUEST_SCHEME \$trusted_x_forwarded_proto; -uwsgi_param HTTPS on; -uwsgi_param REMOTE_ADDR \$remote_addr; -uwsgi_param REMOTE_PORT \$remote_port; -uwsgi_param SERVER_ADDR \$server_addr; -uwsgi_param SERVER_PORT \$server_port; -uwsgi_param SERVER_NAME \$server_name; -uwsgi_param HTTP_X_REAL_IP \$remote_addr; -uwsgi_param HTTP_X_FORWARDED_PROTO \$trusted_x_forwarded_proto; -uwsgi_param HTTP_X_FORWARDED_SSL \"\"; -uwsgi_param HTTP_X_PROXY_MISCONFIGURATION \$x_proxy_misconfiguration; - -# This value is the default, and is provided for explicitness; it must -# be longer than the configured 55s harakiri timeout in uwsgi -uwsgi_read_timeout 60s; - -uwsgi_pass django; -EOF -supervisorctl restart nginx zulip-django >/dev/null" - } - - bootstrap_realm_if_needed() { - wait_for_zulip_supervisor - local realm_exists - - realm_exists="$( - podman exec burrow-zulip_zulip_1 bash -lc \ - "su zulip -c '/home/zulip/deployments/current/manage.py list_realms'" \ - | awk '$NF == "https://${cfg.domain}" { print "yes" }' - )" - - if [ -n "$realm_exists" ]; then - return 0 - fi - - local realm_name=${lib.escapeShellArg cfg.realmName} - local admin_email=${lib.escapeShellArg cfg.administratorEmail} - local owner_name=${lib.escapeShellArg cfg.realmOwnerName} - local create_realm_cmd - - printf -v create_realm_cmd '%q ' \ - /home/zulip/deployments/current/manage.py \ - create_realm \ - --string-id= \ - --password-file /data/secrets/bootstrap-owner-password \ - --automated \ - "$realm_name" \ - "$admin_email" \ - "$owner_name" - - podman exec burrow-zulip_zulip_1 su zulip -c "$create_realm_cmd" - } - - reconcile_realm_policy() { - wait_for_zulip_supervisor - local realm_id - realm_id="$( - podman exec burrow-zulip_zulip_1 bash -lc \ - "su zulip -c '/home/zulip/deployments/current/manage.py list_realms'" \ - | awk '$NF == "https://${cfg.domain}" { print $1 }' - )" - - podman exec burrow-zulip_zulip_1 su zulip -c \ - "/home/zulip/deployments/current/manage.py realm_domain --op add -r $realm_id ${realmSignupDomain} --allow-subdomains --automated" \ - >/dev/null 2>&1 || true - - podman exec burrow-zulip_zulip_1 su zulip -c \ - "/home/zulip/deployments/current/manage.py shell -c 'from zerver.models import Realm; realm = Realm.objects.get(id=$realm_id); realm.invite_required = False; realm.save(update_fields=[\"invite_required\"])'" - } - - if [ ! -e .initialized ]; then - compose pull - compose run --rm -T zulip app:init - touch .initialized - fi - - ensure_zulip_data_layout - compose up -d zulip - bootstrap_realm_if_needed - reconcile_realm_policy - patch_uwsgi_scheme_handling - ''; - }; - }; -} diff --git a/proto/burrow.proto b/proto/burrow.proto deleted file mode 100644 index ed1f89e..0000000 --- a/proto/burrow.proto +++ /dev/null @@ -1,140 +0,0 @@ -syntax = "proto3"; -package burrow; - -import "google/protobuf/timestamp.proto"; - -service Tunnel { - rpc TunnelConfiguration (Empty) returns (stream TunnelConfigurationResponse); - rpc TunnelPackets (stream TunnelPacket) returns (stream TunnelPacket); - rpc TunnelStart (Empty) returns (Empty); - rpc TunnelStop (Empty) returns (Empty); - rpc TunnelStatus (Empty) returns (stream TunnelStatusResponse); -} - -service Networks { - rpc NetworkAdd (Network) returns (Empty); - rpc NetworkList (Empty) returns (stream NetworkListResponse); - rpc NetworkReorder (NetworkReorderRequest) returns (Empty); - rpc NetworkDelete (NetworkDeleteRequest) returns (Empty); -} - -service TailnetControl { - rpc Discover (TailnetDiscoverRequest) returns (TailnetDiscoverResponse); - rpc Probe (TailnetProbeRequest) returns (TailnetProbeResponse); - rpc LoginStart (TailnetLoginStartRequest) returns (TailnetLoginStatusResponse); - rpc LoginStatus (TailnetLoginStatusRequest) returns (TailnetLoginStatusResponse); - rpc LoginCancel (TailnetLoginCancelRequest) returns (Empty); -} - -message NetworkReorderRequest { - int32 id = 1; - int32 index = 2; -} - -message WireGuardPeer { - string endpoint = 1; - repeated string subnet = 2; -} - -message WireGuardNetwork { - string address = 1; - string dns = 2; - repeated WireGuardPeer peer = 3; -} - -message NetworkDeleteRequest { - int32 id = 1; -} - -message Network { - int32 id = 1; - NetworkType type = 2; - bytes payload = 3; -} - -enum NetworkType { - WireGuard = 0; - Tailnet = 1; -} - -message NetworkListResponse { - repeated Network network = 1; -} - -message Empty { - -} - -message TailnetDiscoverRequest { - string email = 1; -} - -message TailnetDiscoverResponse { - string domain = 1; - string authority = 2; - string oidc_issuer = 3; - bool managed = 4; -} - -message TailnetProbeRequest { - string authority = 1; -} - -message TailnetProbeResponse { - string authority = 1; - int32 status_code = 2; - string summary = 3; - string detail = 4; - bool reachable = 5; -} - -message TailnetLoginStartRequest { - string account_name = 1; - string identity_name = 2; - string hostname = 3; - string authority = 4; -} - -message TailnetLoginStatusRequest { - string session_id = 1; -} - -message TailnetLoginCancelRequest { - string session_id = 1; -} - -message TailnetLoginStatusResponse { - string session_id = 1; - string backend_state = 2; - string auth_url = 3; - bool running = 4; - bool needs_login = 5; - string tailnet_name = 6; - string magic_dns_suffix = 7; - string self_dns_name = 8; - repeated string tailnet_ips = 9; - repeated string health = 10; -} - -enum State { - Stopped = 0; - Running = 1; -} - -message TunnelStatusResponse { - State state = 1; - optional google.protobuf.Timestamp start = 2; -} - -message TunnelConfigurationResponse { - repeated string addresses = 1; - int32 mtu = 2; - repeated string routes = 3; - repeated string dns_servers = 4; - repeated string search_domains = 5; - bool include_default_route = 6; -} - -message TunnelPacket { - bytes payload = 1; -} diff --git a/proto/google/protobuf/duration.proto b/proto/google/protobuf/duration.proto deleted file mode 100644 index 41f40c2..0000000 --- a/proto/google/protobuf/duration.proto +++ /dev/null @@ -1,115 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package google.protobuf; - -option cc_enable_arenas = true; -option go_package = "google.golang.org/protobuf/types/known/durationpb"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "DurationProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; - -// A Duration represents a signed, fixed-length span of time represented -// as a count of seconds and fractions of seconds at nanosecond -// resolution. It is independent of any calendar and concepts like "day" -// or "month". It is related to Timestamp in that the difference between -// two Timestamp values is a Duration and it can be added or subtracted -// from a Timestamp. Range is approximately +-10,000 years. -// -// # Examples -// -// Example 1: Compute Duration from two Timestamps in pseudo code. -// -// Timestamp start = ...; -// Timestamp end = ...; -// Duration duration = ...; -// -// duration.seconds = end.seconds - start.seconds; -// duration.nanos = end.nanos - start.nanos; -// -// if (duration.seconds < 0 && duration.nanos > 0) { -// duration.seconds += 1; -// duration.nanos -= 1000000000; -// } else if (duration.seconds > 0 && duration.nanos < 0) { -// duration.seconds -= 1; -// duration.nanos += 1000000000; -// } -// -// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. -// -// Timestamp start = ...; -// Duration duration = ...; -// Timestamp end = ...; -// -// end.seconds = start.seconds + duration.seconds; -// end.nanos = start.nanos + duration.nanos; -// -// if (end.nanos < 0) { -// end.seconds -= 1; -// end.nanos += 1000000000; -// } else if (end.nanos >= 1000000000) { -// end.seconds += 1; -// end.nanos -= 1000000000; -// } -// -// Example 3: Compute Duration from datetime.timedelta in Python. -// -// td = datetime.timedelta(days=3, minutes=10) -// duration = Duration() -// duration.FromTimedelta(td) -// -// # JSON Mapping -// -// In JSON format, the Duration type is encoded as a string rather than an -// object, where the string ends in the suffix "s" (indicating seconds) and -// is preceded by the number of seconds, with nanoseconds expressed as -// fractional seconds. For example, 3 seconds with 0 nanoseconds should be -// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -// microsecond should be expressed in JSON format as "3.000001s". -// -message Duration { - // Signed seconds of the span of time. Must be from -315,576,000,000 - // to +315,576,000,000 inclusive. Note: these bounds are computed from: - // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - int64 seconds = 1; - - // Signed fractions of a second at nanosecond resolution of the span - // of time. Durations less than one second are represented with a 0 - // `seconds` field and a positive or negative `nanos` field. For durations - // of one second or more, a non-zero value for the `nanos` field must be - // of the same sign as the `seconds` field. Must be from -999,999,999 - // to +999,999,999 inclusive. - int32 nanos = 2; -} diff --git a/proto/google/protobuf/timestamp.proto b/proto/google/protobuf/timestamp.proto deleted file mode 100644 index fd0bc07..0000000 --- a/proto/google/protobuf/timestamp.proto +++ /dev/null @@ -1,144 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package google.protobuf; - -option cc_enable_arenas = true; -option go_package = "google.golang.org/protobuf/types/known/timestamppb"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "TimestampProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; - -// A Timestamp represents a point in time independent of any time zone or local -// calendar, encoded as a count of seconds and fractions of seconds at -// nanosecond resolution. The count is relative to an epoch at UTC midnight on -// January 1, 1970, in the proleptic Gregorian calendar which extends the -// Gregorian calendar backwards to year one. -// -// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -// second table is needed for interpretation, using a [24-hour linear -// smear](https://developers.google.com/time/smear). -// -// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -// restricting to that range, we ensure that we can convert to and from [RFC -// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. -// -// # Examples -// -// Example 1: Compute Timestamp from POSIX `time()`. -// -// Timestamp timestamp; -// timestamp.set_seconds(time(NULL)); -// timestamp.set_nanos(0); -// -// Example 2: Compute Timestamp from POSIX `gettimeofday()`. -// -// struct timeval tv; -// gettimeofday(&tv, NULL); -// -// Timestamp timestamp; -// timestamp.set_seconds(tv.tv_sec); -// timestamp.set_nanos(tv.tv_usec * 1000); -// -// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. -// -// FILETIME ft; -// GetSystemTimeAsFileTime(&ft); -// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; -// -// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z -// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. -// Timestamp timestamp; -// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); -// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); -// -// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. -// -// long millis = System.currentTimeMillis(); -// -// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) -// .setNanos((int) ((millis % 1000) * 1000000)).build(); -// -// Example 5: Compute Timestamp from Java `Instant.now()`. -// -// Instant now = Instant.now(); -// -// Timestamp timestamp = -// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) -// .setNanos(now.getNano()).build(); -// -// Example 6: Compute Timestamp from current time in Python. -// -// timestamp = Timestamp() -// timestamp.GetCurrentTime() -// -// # JSON Mapping -// -// In JSON format, the Timestamp type is encoded as a string in the -// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -// where {year} is always expressed using four digits while {month}, {day}, -// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -// is required. A proto3 JSON serializer should always use UTC (as indicated by -// "Z") when printing the Timestamp type and a proto3 JSON parser should be -// able to accept both UTC and other timezones (as indicated by an offset). -// -// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -// 01:30 UTC on January 15, 2017. -// -// In JavaScript, one can convert a Date object to this format using the -// standard -// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -// method. In Python, a standard `datetime.datetime` object can be converted -// to this format using -// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -// the Joda Time's [`ISODateTimeFormat.dateTime()`]( -// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() -// ) to obtain a formatter capable of generating timestamps in this format. -// -message Timestamp { - // Represents seconds of UTC time since Unix epoch - // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - // 9999-12-31T23:59:59Z inclusive. - int64 seconds = 1; - - // Non-negative fractions of a second at nanosecond resolution. Negative - // second values with fractions must still have non-negative nanos values - // that count forward in time. Must be from 0 to 999,999,999 - // inclusive. - int32 nanos = 2; -} diff --git a/rust-toolchain.toml b/rust-toolchain.toml deleted file mode 100644 index ff09ebf..0000000 --- a/rust-toolchain.toml +++ /dev/null @@ -1,4 +0,0 @@ -[toolchain] -channel = "1.93.1" -components = ["rustfmt"] -profile = "minimal" diff --git a/secrets.nix b/secrets.nix deleted file mode 100644 index 3f9bba4..0000000 --- a/secrets.nix +++ /dev/null @@ -1,33 +0,0 @@ -let - conradev = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBueQxNbP2246pxr/m7au4zNVm+ShC96xuOcfEcpIjWZ"; - contact = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIO42guJ5QvNMw3k6YKWlQnjcTsc+X4XI9F2GBtl8aHOa"; - agent = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEN0+tRJy7Y2DW0uGYHb86N2t02WyU5lDNX6FaxBF/G8 agent@burrow.net"; - jett = builtins.replaceStrings [ "\n" ] [ "" ] (builtins.readFile ./nixos/keys/jett_at_burrow_net.pub); - burrowForgeHost = "age1quxf27gnun0xghlnxf3jrmqr3h3a3fzd8qxpallsaztd2u74pdfq9e7w9l"; - burrowForgeRecipients = [ - contact - agent - jett - burrowForgeHost - ]; - uiTestRecipients = burrowForgeRecipients ++ [ conradev ]; -in -{ - "secrets/infra/authentik.env.age".publicKeys = burrowForgeRecipients; - "secrets/infra/authentik-google-client-id.age".publicKeys = burrowForgeRecipients; - "secrets/infra/authentik-google-client-secret.age".publicKeys = burrowForgeRecipients; - "secrets/infra/authentik-google-account-map.json.age".publicKeys = burrowForgeRecipients; - "secrets/infra/authentik-ui-test-password.age".publicKeys = uiTestRecipients; - "secrets/infra/forgejo-oidc-client-secret.age".publicKeys = burrowForgeRecipients; - "secrets/infra/forgejo-nsc-autoscaler-config.age".publicKeys = burrowForgeRecipients; - "secrets/infra/forgejo-nsc-dispatcher-config.age".publicKeys = burrowForgeRecipients; - "secrets/infra/forgejo-nsc-token.age".publicKeys = burrowForgeRecipients; - "secrets/infra/headscale-oidc-client-secret.age".publicKeys = burrowForgeRecipients; - "secrets/infra/linear-scim-token.age".publicKeys = burrowForgeRecipients; - "secrets/infra/tailscale-oidc-client-secret.age".publicKeys = burrowForgeRecipients; - "secrets/infra/zulip-postgres-password.age".publicKeys = burrowForgeRecipients; - "secrets/infra/zulip-memcached-password.age".publicKeys = burrowForgeRecipients; - "secrets/infra/zulip-rabbitmq-password.age".publicKeys = burrowForgeRecipients; - "secrets/infra/zulip-redis-password.age".publicKeys = burrowForgeRecipients; - "secrets/infra/zulip-secret-key.age".publicKeys = burrowForgeRecipients; -} diff --git a/secrets/infra/authentik-google-account-map.json.age b/secrets/infra/authentik-google-account-map.json.age deleted file mode 100644 index 158814a..0000000 Binary files a/secrets/infra/authentik-google-account-map.json.age and /dev/null differ diff --git a/secrets/infra/authentik-google-client-id.age b/secrets/infra/authentik-google-client-id.age deleted file mode 100644 index 344c73b..0000000 --- a/secrets/infra/authentik-google-client-id.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q Cmf/vgRBGrP8KGwpc9XCXKo5H23Gcgi6dN688oazITQ -poYU28mmvkWFdciOiWLQ+powQcsHzof3Gyzq61V2olY --> ssh-ed25519 IrZmAg mowUPV3BbYR1IupBoT1o3KB+Fo7Q3E3DT0wRx82f4ic -TZ4r/L5EdHP9wwIbJWBjIITja2L2Pd4AX/U7JSfLm/Y --> ssh-ed25519 0kWPgQ v9NoFxsRERSgK5cgCHSdtZpn4EcPhvj4JCRR1axGqUM -ogDiLkSFr8i39b3y2WlnbTMprXiVJPG5KNHGKJIagLo --> X25519 4xouhPGq8wCmbbjLQsfZeGabsXxc4f74e2gXd+13kB4 -UM7/P0RZyu3PoU5mMY0aoGCdoqrOTgDshGuVjagoaEc ---- r6gIEDysfaqsHMaFF/vuLVaJv85uShPlNNTktMdpUvw -2TXěM"T뇵S=_U.=w -\Y/x*|tjZ'#uOcqL_hA$)ic{L @F \ No newline at end of file diff --git a/secrets/infra/authentik-google-client-secret.age b/secrets/infra/authentik-google-client-secret.age deleted file mode 100644 index 9a841c7..0000000 --- a/secrets/infra/authentik-google-client-secret.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q Q3rYrGroJXarMLdatYCHVERefWDyGwM0Ii/kOp5m3Fs -W3tgHNXLSVfGU5p8MhBj0mX72SNgMl8nf8sQX29yvBw --> ssh-ed25519 IrZmAg fyFQQkd51GthNZ4R+W5Al266LnlKbr4ZoMERlCM1OTQ -rNjnHTGCfF8LkqU8mzTrHlL5G4az1k62gvH4gW8zmjc --> ssh-ed25519 0kWPgQ OWokv9XAphqbkDi1cznb9V09VcM6Li1eIh0JpcIlVTY -TnPVlqKB78y7NPYp02UJmuRXdBMKJKCngpvo8TjpFZ8 --> X25519 HWaWhyejjo4IjDrNsBYxU1JaGU0899FqiBYgstInuiU -enbBGnhH+uJKY3NBD6mmy09Uos+in6ytRQ5BakvTUvI ---- gOBrh88hnvlUSmnRiowJiUIwgIz5zzVKH8YCRb8Ckdw -xokPn8v򵄙HRʏoMË9&Tb]ĉ'|<Pbe \ No newline at end of file diff --git a/secrets/infra/authentik-ui-test-password.age b/secrets/infra/authentik-ui-test-password.age deleted file mode 100644 index 773833e..0000000 Binary files a/secrets/infra/authentik-ui-test-password.age and /dev/null differ diff --git a/secrets/infra/authentik.env.age b/secrets/infra/authentik.env.age deleted file mode 100644 index dbada85..0000000 Binary files a/secrets/infra/authentik.env.age and /dev/null differ diff --git a/secrets/infra/forgejo-nsc-autoscaler-config.age b/secrets/infra/forgejo-nsc-autoscaler-config.age deleted file mode 100644 index 5b5da65..0000000 Binary files a/secrets/infra/forgejo-nsc-autoscaler-config.age and /dev/null differ diff --git a/secrets/infra/forgejo-nsc-dispatcher-config.age b/secrets/infra/forgejo-nsc-dispatcher-config.age deleted file mode 100644 index 4ab9cc0..0000000 Binary files a/secrets/infra/forgejo-nsc-dispatcher-config.age and /dev/null differ diff --git a/secrets/infra/forgejo-nsc-token.age b/secrets/infra/forgejo-nsc-token.age deleted file mode 100644 index 68b6572..0000000 Binary files a/secrets/infra/forgejo-nsc-token.age and /dev/null differ diff --git a/secrets/infra/forgejo-oidc-client-secret.age b/secrets/infra/forgejo-oidc-client-secret.age deleted file mode 100644 index 68c35e9..0000000 Binary files a/secrets/infra/forgejo-oidc-client-secret.age and /dev/null differ diff --git a/secrets/infra/headscale-oidc-client-secret.age b/secrets/infra/headscale-oidc-client-secret.age deleted file mode 100644 index 81cff1c..0000000 --- a/secrets/infra/headscale-oidc-client-secret.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q 8QtHVR8defharS9ppAsLOnwX1A3T5CqNLhaLDG41q0E -tQBUL3Wzh4lIwmIBGjLH5gjsvChWo6GJ4YxXc+cNddU --> ssh-ed25519 IrZmAg TgL3trgA3+4ivxpIpv/rEegjmZakSEx7B6e2sc4xhRw -NW4OgVJZhVJUXMBHaajk06CxEJjzrumqTNI2/6RDM4A --> ssh-ed25519 0kWPgQ uBosFXj4NCXBw5X+h/zr2QLCHnkhtgVZEYOHEBBGoFY -LTrparOr5iwAEEPM+rTZyDxJFJX/nQsTYpNdGSgKTes --> X25519 zbO7ax9E3Fya7mvNP/ueB/XL2UN1sHe8Is+2g6hM8WA -PnjKLk/ZQFrJ0mGIbX8fc9pqw3T2FTT0WSUaDjN1C+w ---- Aknf9dPdr3qD+tu5HyT74L2JMtg46ClYL0FBDhiLrxI -3_:޿rbA~vn™G/->5K\닊|iX~sX!EF'cv9>ԦrHxEP`> \ No newline at end of file diff --git a/secrets/infra/linear-scim-token.age b/secrets/infra/linear-scim-token.age deleted file mode 100644 index 5bed53e..0000000 --- a/secrets/infra/linear-scim-token.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q Tb3hxc6ZscCQpr7s8raup25FA8YAmq30jHZfOQp28Xs -L9YhaX9IVinud0IOs5K55ldGx82wjXHxnVBHZnRjiTA --> ssh-ed25519 IrZmAg etIe6hWDP9YkqDFCWybnvsOh7h8YO+z3tKc95pG64lU -BT3rH5a+LJZWv2xtWPbMJGS2oM9v4mOI9WPmnHebiew --> ssh-ed25519 0kWPgQ YpCf5m16VaKp7d+C3oF9MJQB/0xzCNtD7ODsTiV8t1o -xG8G/kSM+7VrWHm299A7fG/kBFnoiWZPiDZuldvimLw --> X25519 ETltnMPR7lWbBWJvJKmNZhS7wqX0WCa4aNu8UKzxMVE -Ys57VNuclgvN1nJIrLjNrwekbosa7KK9lFt0PTpr/MQ ---- ZeUmSOf8+NycQAFRGCJHYcQvTJqSBIGKEOEdCnNfJbE -<q1.O_դ7A۷_@%/5l7JɵčA xb "B \ No newline at end of file diff --git a/secrets/infra/tailscale-oidc-client-secret.age b/secrets/infra/tailscale-oidc-client-secret.age deleted file mode 100644 index 3c3c074..0000000 Binary files a/secrets/infra/tailscale-oidc-client-secret.age and /dev/null differ diff --git a/secrets/infra/zulip-memcached-password.age b/secrets/infra/zulip-memcached-password.age deleted file mode 100644 index 0769512..0000000 --- a/secrets/infra/zulip-memcached-password.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q x0r1UHgSibFIvKU34kP0+mnvQa5xXnac3P5fyqb7qFc -MfKnr5N0DV2NIoo4MFVFV0ULMayy0zzZqIq4FDzgDGc --> ssh-ed25519 IrZmAg rzoR8knGrsTGuh9Hqg/NB0NQKI1vx1WI0ZRyrLIPwVY -7gV/d1slrIT+W0+iX5YK/uUWjHGJfee6vA+f9a35nEY --> ssh-ed25519 0kWPgQ SyuEAfqmBAqLcuuQUHM5OzAv2hoquMMYtVdbKpBVhjI -7QqXens2363ln0euoormMh9a3Csh+nS2eBkHuQJmOWc --> X25519 qDjNNkYBUhWTYyBhrw9tYl8a7G6TCkVZbR4aPcP+J0c -QF33V6hFUuYRj0B8Eo4jqyyvCpBbpD2ViVWoS8A8f3E ---- 1/Jb0nvWlcszMmxI0yVr6kfexDN0sSk1p+wsTUL4WvU -9a5IكV[f,Db \v&LZ7!?4=JxFeV \ No newline at end of file diff --git a/secrets/infra/zulip-postgres-password.age b/secrets/infra/zulip-postgres-password.age deleted file mode 100644 index b03556c..0000000 Binary files a/secrets/infra/zulip-postgres-password.age and /dev/null differ diff --git a/secrets/infra/zulip-rabbitmq-password.age b/secrets/infra/zulip-rabbitmq-password.age deleted file mode 100644 index 9b1f6ec..0000000 --- a/secrets/infra/zulip-rabbitmq-password.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q s1hLIWvkXmlIv/VeHXpDSCe+dh09mE+iZd7xJiQccy0 -8WosTJQLGRPhTR06SIDjgtXNebcf+H/pFzY/lBCjXcs --> ssh-ed25519 IrZmAg zBNlK+o/RCTCyp8BRkoAYqsDn//kIKtYk3SICkMu3BA -EhBQy8QdSnCZKkdGzQho7zEMmAbJVoU5jZOMPN6tHG0 --> ssh-ed25519 0kWPgQ hv06idPXqAATkLeUC5vILdEO2NXNWPczlWnwMFvOdkA -3EeajviunGlcfcF1QlRJrVA9bwPT+fJZFX0uneYVs0c --> X25519 vm9rPYnQB16VSidi7+nr70lFaH0W/jIGY8zwUObZUV8 -jFgPy/w4j0/p1USKGjQY+coo1OUFXiIjJ5apIZCrZVI ---- Cf2c6WzLYOi8xE/sIn7ZtUqBy5AToASDUNpAxyjrI9M -:,+!ϨϬB4DmH|(9l9LPZ^zed=imz? \ No newline at end of file diff --git a/secrets/infra/zulip-redis-password.age b/secrets/infra/zulip-redis-password.age deleted file mode 100644 index 2aff8b6..0000000 --- a/secrets/infra/zulip-redis-password.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q DqDE3ZZlPUWUyyLA185xsOmfGi146SNk+hENMQXaiFY -D6FhZgynbdccPJQiFRJ18EYvCyDLz3cak0YuQa4f5p4 --> ssh-ed25519 IrZmAg lXgVeADmgjeHeVOOIS5oHqrhkN59ZWDemMOBJo3ubH8 -AQ24P+DnxNoHEguNnLaROIW4/Sq96w/UxzzQwEOyGRc --> ssh-ed25519 0kWPgQ 8x0pMohdACYueLY6jbNwg7MYVaZcjwBU4axthvDoFx4 -SgUVnd6MK1MccWVYOu9R3PtoMCBBNGKQ7jt5MSA+KkI --> X25519 UaO5huJPx8d8eMUnGhbI77tZjsFlIPWEffT4fgoO22w -DVz016ibRxJoa4TDmb2m0Qu9Dn8jpjWEBVtdm2TZx0c ---- 5+MHuvC26SjEBFSmRm0kXjiI27QnJGxvPl2w13EkMrw -FoQ]ȟeU//no.XGJ Э|+ž \ No newline at end of file diff --git a/secrets/infra/zulip-secret-key.age b/secrets/infra/zulip-secret-key.age deleted file mode 100644 index d903d66..0000000 --- a/secrets/infra/zulip-secret-key.age +++ /dev/null @@ -1,11 +0,0 @@ -age-encryption.org/v1 --> ssh-ed25519 ux4N8Q ml+kmLmuRb2nMXJyhKigby2+lPddxM/U7tjhGGQ/JGk -B3UCv/3+4GHeKR964o/m0CoicHwDgWQGEarPW94tb3I --> ssh-ed25519 IrZmAg AO0ELOuGGj+WanDZFRkHKUEJyZqJYFdhWbqmUfwbpiM -5RZMxVBvW5+TzCBFnn66ry3o5V5cJykweyoYMVBgczY --> ssh-ed25519 0kWPgQ gqQ/S33Re2OYLz1D9LoSAoqOKxuL4aUes8r6+NyAoXw -NHo2xFsxxJO1ZjnG9r3oxMuvjOUsCyyPvcar2ejZp9w --> X25519 vUAjBCE197YsckVNM4SYVIPBEESTWnBPCWnUlEwYs1I -L3l85DXFoAVm2ssHfjBeqRpWGlo1UGbmcNkEgoUB9fM ---- X/2O8ufjbTGrt2zCm4gSRqqoxT5v6a+13XjH4dpRsHs -Mkf"(qxF2BdMRYji ܴ<ґb_.!r+<Ussu?gD\V am(Ȉ&.& c/|w(WH4rѠ+j"B  \ No newline at end of file diff --git a/services/forgejo-nsc/README.md b/services/forgejo-nsc/README.md deleted file mode 100644 index 79058bb..0000000 --- a/services/forgejo-nsc/README.md +++ /dev/null @@ -1,179 +0,0 @@ -## forgejo-nsc-dispatcher - -This service exposes a simple HTTP API that tells Namespace Cloud to start -ephemeral Forgejo Actions runners on demand. It glues together three pieces: - -1. **Forgejo Actions** – the service requests a scoped registration token - for the repository/organization/instance where you want to run jobs. -2. **Namespace (`nsc`)** – the dispatcher shells out to the `nsc` CLI to create - a short‑lived environment, runs the `forgejo-runner` container inside it, - and exits after a single job (`forgejo-runner one-job`). The Namespace TTL is - the hard cap, not the typical lifetime. -3. **Your automation** – you call the service via HTTP (directly, through Caddy, - via Forgejo webhooks, etc.) whenever a new runner is needed. - -### Directory layout - -``` -. -├── cmd/forgejo-nsc-dispatcher # main entry point -├── internal/ # service packages (config, forgejo client, nsc dispatcher, HTTP server) -├── config.example.yaml # starter config referenced by README -├── flake.nix / flake.lock # reproducible builds (Go binary + container image) -└── .forgejo/workflows # CI that runs go test/build and publishes manifests -``` - -### Configuration - -Copy `config.example.yaml` and update it for your Forgejo instance and Namespace -profile. The important knobs are: - -- `forgejo.base_url` – HTTPS endpoint of your Forgejo server. A PAT with - `actions:runner` scope is required in `forgejo.token`. -- `forgejo.instance_url` – URL that spawned runners use to register back to Forgejo. - This must be reachable from the runner (typically the public URL like - `https://git.burrow.net`). On the forge host it commonly differs from `base_url` - (which may be `http://127.0.0.1:3000`). -- `forgejo.default_scope` – where new runners register - (`instance`, `organization`, or `repository`). -- `forgejo.default_labels` – labels applied to every spawned runner. GateForge - workflows via `runs-on: ["namespace-profile-linux-medium"]` (or other - `namespace-profile-linux-*` labels). -- `namespace.nsc_binary` – path to the `nsc` binary (the Nix container ships one - compiled from `namespacelabs/foundation` so `/app/bin/nsc` works out of the box). -- `namespace.image` – OCI image containing `forgejo-runner`. -- `namespace.machine_type` / `namespace.duration` – shape + TTL for the ephemeral - Namespace environment. The dispatcher destroys the instance after a job so the - TTL acts as a hard cap, not an idle timeout. - -### Running locally - -```shell -# Ensure nsc is available (e.g. `go build ./foundation/cmd/nsc`) -cp config.example.yaml config.yaml -nix develop # optional dev shell with Go toolchain -go run ./cmd/forgejo-nsc-dispatcher --config config.yaml -``` - -API example: - -```shell -curl -X POST http://localhost:8080/api/v1/dispatch \ - -H 'Content-Type: application/json' \ - -d '{ - "count": 1, - "ttl": "20m", - "labels": ["namespace-profile-linux-medium"], - "scope": {"level": "repository", "owner": "example", "name": "app"} - }' -``` - -### Deploying with Nix + GHCR - -- `nix build .#packages.x86_64-linux.container-amd64` produces a deterministic - tarball containing the service, the `nsc` binary, BusyBox, and `forgejo-runner`. -- The included `Build Container` workflow builds both `amd64` and `arm64` images - on Namespace runners and pushes them to `ghcr.io//`. - No Fly.io manifests are emitted – the multi‑arch manifest points only at GHCR. - -### How this fits behind Caddy (last-mile networking) - -The dispatcher is just an HTTP server. You can: - -1. Run it anywhere that can reach Forgejo and Namespace: bare metal, Namespace - cluster, Kubernetes, Fly, etc. -2. Put Caddy (or any reverse proxy) in front to terminate TLS, do auth, or - rewrite URLs. For example: - - ``` - forgejo-dispatcher.example.com { - reverse_proxy 127.0.0.1:8080 - basicauth /api/* { - user JDJhJDE... - } - } - ``` - -The service doesn’t assume Caddy, nor does it manipulate HTTP clients -directly – it simply waits for POST requests. As long as the dispatcher can -reach Forgejo’s REST API and run the `nsc` binary, you can drop it anywhere. - -### Autoscaling (webhook + poller) - -If you don’t want to call `/api/v1/dispatch` manually, there’s a companion -autoscaler (`cmd/forgejo-nsc-autoscaler`) that watches Forgejo job queues and -triggers the dispatcher for you. It operates in two modes simultaneously: - -1. **Polling** – every instance polls `GET /api/v1/.../actions/runners` to keep a - minimum number of idle Namespace runners per label. This continues until a - webhook is successfully processed, so the system is self-bootstrapping. -2. **Webhooks** – once Forgejo reaches the autoscaler via the `/webhook/{name}` - endpoint, the autoscaler stops polling and reacts to `workflow_job` events in - real time. Each payload is mapped to a target label set and results in a - dispatch call. - -You can manage multiple Forgejo instances by listing them under `instances` in -`autoscaler.example.yaml`: - -``` -listen: ":8090" -dispatcher: - url: "http://dispatcher:8080" - -instances: -- name: burrow - forgejo: - base_url: "https://git.burrow.net" - token: "PENDING-FORGEJO-PAT" - scope: - level: "repository" - owner: "burrow" - name: "burrow" - disable_polling: true # webhook-only mode - poll_interval: "30s" - webhook_secret: "supersecret" - webhook: - url: "https://nsc-autoscaler.burrow.net/webhook/burrow" - content_type: "json" - events: ["workflow_job"] - active: true - targets: - - labels: ["namespace-profile-linux-medium"] - min_idle: 0 # set to 0 to scale-to-zero between jobs - ttl: "20m" - - labels: ["namespace-profile-windows-large"] - min_idle: 0 - ttl: "45m" - machine_type: "windows/amd64:8x16" -``` - -For Burrow, use `Scripts/provision-forgejo-nsc.sh` to mint the Forgejo PAT, -generate a Namespace token from the logged-in namespace account, and render the -dispatcher/autoscaler configs into `intake/forgejo_nsc_{dispatcher,autoscaler}.yaml` -plus `intake/forgejo_nsc_token.txt`. - -For ongoing operations, use `Scripts/sync-forgejo-nsc-config.sh`: - -- `Scripts/sync-forgejo-nsc-config.sh` copies the intake-backed configs and - Namespace token onto `/var/lib/burrow/intake/` on the forge host, reapplies - file ownership for `forgejo-nsc`, and restarts the dispatcher/autoscaler. -- `Scripts/sync-forgejo-nsc-config.sh --rotate-pat` additionally mints a new - Forgejo PAT on the Burrow forge host and refreshes the local intake files. - -Run it next to the dispatcher: - -```bash -go run ./cmd/forgejo-nsc-autoscaler --config autoscaler.yaml -# or build the binary/container via `nix build .#forgejo-nsc-autoscaler` -``` - -If your Forgejo build doesn’t expose the runner listing API, set -`disable_polling: true` and rely on `webhook` entries. The autoscaler will -auto-create/update the webhook (using the PAT) so that new `workflow_job` events -immediately call the dispatcher even if the service isn’t publicly reachable yet. - -In Forgejo add a webhook pointing to `https://nsc-autoscaler.burrow.net/webhook/burrow` -with the shared secret (or let the autoscaler create it by specifying `webhook.url` -in config). The autoscaler continues polling until it receives the first valid -webhook (unless disabled), so you get capacity immediately even if outbound -webhooks from Forgejo aren’t yet configured. diff --git a/services/forgejo-nsc/autoscaler.example.yaml b/services/forgejo-nsc/autoscaler.example.yaml deleted file mode 100644 index 866d3b5..0000000 --- a/services/forgejo-nsc/autoscaler.example.yaml +++ /dev/null @@ -1,30 +0,0 @@ -listen: ":8090" -dispatcher: - url: "http://localhost:8080" - -instances: - - name: burrow - forgejo: - base_url: "https://git.burrow.net" - token: "PENDING-FORGEJO-PAT" - scope: - level: "repository" - owner: "burrow" - name: "burrow" - disable_polling: true - poll_interval: "30s" - webhook_secret: "supersecret" - webhook: - url: "https://nsc-autoscaler.burrow.net/webhook/burrow" - content_type: "json" - events: ["workflow_job"] - active: true - targets: - - labels: ["namespace-profile-linux-medium"] - min_idle: 1 - ttl: "20m" - machine_type: "8x16" - - labels: ["namespace-profile-windows-large"] - min_idle: 0 - ttl: "45m" - machine_type: "windows/amd64:8x16" diff --git a/services/forgejo-nsc/cmd/forgejo-nsc-autoscaler/main.go b/services/forgejo-nsc/cmd/forgejo-nsc-autoscaler/main.go deleted file mode 100644 index bdbb6f8..0000000 --- a/services/forgejo-nsc/cmd/forgejo-nsc-autoscaler/main.go +++ /dev/null @@ -1,46 +0,0 @@ -package main - -import ( - "context" - "flag" - "log/slog" - "os" - "os/signal" - "syscall" - - "namespacelabs.dev/foundation/std/tasks" - "namespacelabs.dev/foundation/std/tasks/simplelog" - - "github.com/burrow/forgejo-nsc/internal/autoscaler" -) - -func main() { - var configPath string - flag.StringVar(&configPath, "config", "autoscaler.yaml", "Path to the autoscaler config file") - flag.Parse() - - logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelInfo})) - - cfg, err := autoscaler.LoadConfig(configPath) - if err != nil { - logger.Error("failed to load config", "error", err) - os.Exit(1) - } - - service, err := autoscaler.NewService(cfg) - if err != nil { - logger.Error("failed to initialize autoscaler", "error", err) - os.Exit(1) - } - - ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) - defer cancel() - ctx = tasks.WithSink(ctx, simplelog.NewSink(os.Stdout, 0)) - - if err := tasks.Action("autoscaler.run").Run(ctx, func(ctx context.Context) error { - return service.Start(ctx) - }); err != nil { - logger.Error("autoscaler exited", "error", err) - os.Exit(1) - } -} diff --git a/services/forgejo-nsc/cmd/forgejo-nsc-dispatcher/main.go b/services/forgejo-nsc/cmd/forgejo-nsc-dispatcher/main.go deleted file mode 100644 index 9dcbfb1..0000000 --- a/services/forgejo-nsc/cmd/forgejo-nsc-dispatcher/main.go +++ /dev/null @@ -1,90 +0,0 @@ -package main - -import ( - "context" - "flag" - "log/slog" - "net/http" - "os" - "os/signal" - "syscall" - "time" - - "github.com/burrow/forgejo-nsc/internal/app" - "github.com/burrow/forgejo-nsc/internal/config" - "github.com/burrow/forgejo-nsc/internal/forgejo" - "github.com/burrow/forgejo-nsc/internal/nsc" - "github.com/burrow/forgejo-nsc/internal/server" -) - -func main() { - var configPath string - flag.StringVar(&configPath, "config", "config.yaml", "Path to the dispatcher config file.") - flag.Parse() - - logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelInfo})) - - cfg, err := config.Load(configPath) - if err != nil { - logger.Error("failed to load config", "error", err) - os.Exit(1) - } - - scope, err := cfg.Forgejo.DefaultScope.ToScope() - if err != nil { - logger.Error("invalid default scope", "error", err) - os.Exit(1) - } - - forgejoClient, err := forgejo.NewClient(cfg.Forgejo.BaseURL, cfg.Forgejo.Token) - if err != nil { - logger.Error("failed to create forgejo client", "error", err) - os.Exit(1) - } - - dispatcher, err := nsc.NewDispatcher(nsc.Options{ - BinaryPath: cfg.Namespace.NSCBinary, - ComputeBaseURL: cfg.Namespace.ComputeBaseURL, - DefaultImage: cfg.Namespace.Image, - DefaultMachine: cfg.Namespace.MachineType, - MacosBaseImageID: cfg.Namespace.MacosBaseImageID, - MacosMachineArch: cfg.Namespace.MacosMachineArch, - DefaultDuration: cfg.Namespace.Duration.Duration, - WorkDir: cfg.Namespace.WorkDir, - MaxParallel: cfg.Namespace.MaxParallel, - RunnerNamePrefix: cfg.Runner.NamePrefix, - Executor: cfg.Runner.Executor, - Network: cfg.Namespace.Network, - Logger: logger, - }) - if err != nil { - logger.Error("failed to create dispatcher", "error", err) - os.Exit(1) - } - - service := app.NewService(app.Config{ - DefaultScope: scope, - DefaultLabels: cfg.Forgejo.DefaultLabels, - InstanceURL: cfg.Forgejo.InstanceURL, - DefaultTTL: cfg.Namespace.Duration.Duration, - AllowLabels: cfg.Namespace.AllowLabels, - AllowScopes: cfg.Namespace.AllowScopes, - }, forgejoClient, dispatcher, logger) - - srv := server.New(cfg.Listen, service, logger) - - go func() { - logger.Info("dispatcher listening", "addr", cfg.Listen) - if err := srv.ListenAndServe(); err != nil && err != context.Canceled && err != http.ErrServerClosed { - logger.Error("server terminated", "error", err) - } - }() - - interrupt := make(chan os.Signal, 1) - signal.Notify(interrupt, syscall.SIGTERM, syscall.SIGINT) - <-interrupt - - ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) - defer cancel() - _ = srv.Shutdown(ctx) -} diff --git a/services/forgejo-nsc/config.example.yaml b/services/forgejo-nsc/config.example.yaml deleted file mode 100644 index 5dc7551..0000000 --- a/services/forgejo-nsc/config.example.yaml +++ /dev/null @@ -1,27 +0,0 @@ -listen: ":8080" - -forgejo: - base_url: "https://forgejo.example.com" - token: "${FORGEJO_PERSONAL_ACCESS_TOKEN}" - default_scope: - level: "organization" - owner: "example" - default_labels: - - namespace-profile-linux-medium - timeout: "30s" - -namespace: - nsc_binary: "/app/bin/nsc" - compute_base_url: "https://ord4.compute.namespaceapis.com" - image: "ghcr.io/forgejo/runner:3" - machine_type: "8x16" - macos_base_image_id: "tahoe" - macos_machine_arch: "arm64" - duration: "30m" - workdir: "/var/lib/forgejo-runner" - max_parallel: 4 - network: "" - -runner: - name_prefix: "nscloud-" - executor: "shell" diff --git a/services/forgejo-nsc/deploy/autoscaler.yaml b/services/forgejo-nsc/deploy/autoscaler.yaml deleted file mode 100644 index 084b076..0000000 --- a/services/forgejo-nsc/deploy/autoscaler.yaml +++ /dev/null @@ -1,31 +0,0 @@ -listen: "127.0.0.1:8090" - -dispatcher: - url: "http://127.0.0.1:8080" - -instances: - - name: burrow - forgejo: - base_url: "http://127.0.0.1:3000" - token: "PENDING-FORGEJO-PAT" - scope: - level: "repository" - owner: "burrow" - name: "burrow" - disable_polling: false - poll_interval: "30s" - webhook_secret: "PENDING-WEBHOOK-SECRET" - webhook: - url: "https://nsc-autoscaler.burrow.net/webhook/burrow" - content_type: "json" - events: ["workflow_job"] - active: true - targets: - - labels: ["namespace-profile-linux-medium"] - min_idle: 0 - ttl: "20m" - machine_type: "8x16" - - labels: ["namespace-profile-windows-large"] - min_idle: 0 - ttl: "45m" - machine_type: "windows/amd64:8x16" diff --git a/services/forgejo-nsc/deploy/dispatcher.yaml b/services/forgejo-nsc/deploy/dispatcher.yaml deleted file mode 100644 index 6d2aac5..0000000 --- a/services/forgejo-nsc/deploy/dispatcher.yaml +++ /dev/null @@ -1,29 +0,0 @@ -listen: "127.0.0.1:8080" - -forgejo: - base_url: "http://127.0.0.1:3000" - instance_url: "https://git.burrow.net" - token: "PENDING-FORGEJO-PAT" - default_scope: - level: "repository" - owner: "burrow" - name: "burrow" - default_labels: - - namespace-profile-linux-medium - timeout: "30s" - -namespace: - nsc_binary: "/run/current-system/sw/bin/nsc" - compute_base_url: "https://ord4.compute.namespaceapis.com" - image: "code.forgejo.org/forgejo/runner:3" - machine_type: "8x16" - macos_base_image_id: "tahoe" - macos_machine_arch: "arm64" - duration: "30m" - workdir: "/var/lib/forgejo-runner" - max_parallel: 4 - network: "" - -runner: - name_prefix: "nscloud-" - executor: "shell" diff --git a/services/forgejo-nsc/go.mod b/services/forgejo-nsc/go.mod deleted file mode 100644 index 215aac1..0000000 --- a/services/forgejo-nsc/go.mod +++ /dev/null @@ -1,65 +0,0 @@ -module github.com/burrow/forgejo-nsc - -go 1.24.4 - -require ( - buf.build/gen/go/namespace/cloud/connectrpc/go v1.19.1-20260212004106-290ae81f8d6d.2 - buf.build/gen/go/namespace/cloud/protocolbuffers/go v1.36.11-20260212004106-290ae81f8d6d.1 - connectrpc.com/connect v1.19.1 - github.com/go-chi/chi/v5 v5.2.1 - github.com/google/uuid v1.6.0 - golang.org/x/crypto v0.48.0 - golang.org/x/sync v0.19.0 - google.golang.org/protobuf v1.36.11 - gopkg.in/yaml.v3 v3.0.1 - namespacelabs.dev/foundation v0.0.478 -) - -require ( - github.com/cenkalti/backoff/v5 v5.0.3 // indirect - github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/fsnotify/fsnotify v1.9.0 // indirect - github.com/go-logr/logr v1.4.3 // indirect - github.com/go-logr/stdr v1.2.2 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect - github.com/hashicorp/hcl v1.0.0 // indirect - github.com/jxskiss/base62 v1.1.0 // indirect - github.com/kr/text v0.2.0 // indirect - github.com/magiconair/properties v1.8.6 // indirect - github.com/mattn/go-runewidth v0.0.16 // indirect - github.com/mattn/go-zglob v0.0.3 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/morikuni/aec v1.0.0 // indirect - github.com/muesli/reflow v0.3.0 // indirect - github.com/pelletier/go-toml v1.9.5 // indirect - github.com/pelletier/go-toml/v2 v2.2.4 // indirect - github.com/rivo/uniseg v0.4.2 // indirect - github.com/segmentio/ksuid v1.0.4 // indirect - github.com/spf13/afero v1.9.2 // indirect - github.com/spf13/cast v1.7.0 // indirect - github.com/spf13/jwalterweatherman v1.1.0 // indirect - github.com/spf13/pflag v1.0.7 // indirect - github.com/spf13/viper v1.14.0 // indirect - github.com/subosito/gotenv v1.4.1 // indirect - go.opentelemetry.io/auto/sdk v1.2.1 // indirect - go.opentelemetry.io/otel v1.38.0 // indirect - go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect - go.opentelemetry.io/otel/metric v1.38.0 // indirect - go.opentelemetry.io/otel/sdk v1.38.0 // indirect - go.opentelemetry.io/otel/trace v1.38.0 // indirect - go.opentelemetry.io/proto/otlp v1.7.1 // indirect - golang.org/x/exp v0.0.0-20250911091902-df9299821621 // indirect - golang.org/x/net v0.49.0 // indirect - golang.org/x/sys v0.41.0 // indirect - golang.org/x/term v0.40.0 // indirect - golang.org/x/text v0.34.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect - google.golang.org/grpc v1.76.0 // indirect - gopkg.in/ini.v1 v1.67.0 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect - helm.sh/helm/v3 v3.18.4 // indirect - namespacelabs.dev/go-ids v0.0.0-20221124082625-9fc72ee06af7 // indirect -) diff --git a/services/forgejo-nsc/go.sum b/services/forgejo-nsc/go.sum deleted file mode 100644 index 6e2a0a9..0000000 --- a/services/forgejo-nsc/go.sum +++ /dev/null @@ -1,575 +0,0 @@ -buf.build/gen/go/namespace/cloud/connectrpc/go v1.19.1-20260212004106-290ae81f8d6d.2 h1:XaeFtt6yN8G5q2uYoiTjyshOyai1Q+GzwfEKlxrTzVw= -buf.build/gen/go/namespace/cloud/connectrpc/go v1.19.1-20260212004106-290ae81f8d6d.2/go.mod h1:QvCL7PUDMFotMXVUoWMeRClEEnCbh7S51xHy39mO+H4= -buf.build/gen/go/namespace/cloud/protocolbuffers/go v1.36.11-20260212004106-290ae81f8d6d.1 h1:xTgPJaOj5QNRPAA3nxW3fTz01aAOLr/6SG7C4Iqxm54= -buf.build/gen/go/namespace/cloud/protocolbuffers/go v1.36.11-20260212004106-290ae81f8d6d.1/go.mod h1:Il2wpJNQB40Yj3Rmuhg5xKJPSXaZVwij+Q30d1PNuNY= -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= -connectrpc.com/connect v1.19.1 h1:R5M57z05+90EfEvCY1b7hBxDVOUl45PrtXtAV2fOC14= -connectrpc.com/connect v1.19.1/go.mod h1:tN20fjdGlewnSFeZxLKb0xwIZ6ozc3OQs2hTXy4du9w= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= -github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= -github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= -github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= -github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= -github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= -github.com/go-chi/chi/v5 v5.2.1 h1:KOIHODQj58PmL80G2Eak4WdvUzjSJSm0vG72crDCqb8= -github.com/go-chi/chi/v5 v5.2.1/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= -github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= -github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= -github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= -github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jxskiss/base62 v1.1.0 h1:A5zbF8v8WXx2xixnAKD2w+abC+sIzYJX+nxmhA6HWFw= -github.com/jxskiss/base62 v1.1.0/go.mod h1:HhWAlUXvxKThfOlZbcuFzsqwtF5TcqS9ru3y5GfjWAc= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= -github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= -github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= -github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-zglob v0.0.3 h1:6Ry4EYsScDyt5di4OI6xw1bYhOqfE5S33Z1OPy+d+To= -github.com/mattn/go-zglob v0.0.3/go.mod h1:9fxibJccNxU2cnpIKLRRFA7zX7qhkJIQWBb449FYHOo= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= -github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= -github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= -github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= -github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= -github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= -github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= -github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.4.2 h1:YwD0ulJSJytLpiaWua0sBDusfsCZohxjxzVTYjwxfV8= -github.com/rivo/uniseg v0.4.2/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= -github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= -github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c= -github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= -github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw= -github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y= -github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w= -github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.7 h1:vN6T9TfwStFPFM5XzjsvmzZkLuaLX+HS+0SeFLRgU6M= -github.com/spf13/pflag v1.0.7/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.14.0 h1:Rg7d3Lo706X9tHsJMUjdiwMpHB7W8WnSVOssIY+JElU= -github.com/spf13/viper v1.14.0/go.mod h1:WT//axPky3FdvXHzGw33dNdXXXfFQqmEalje+egj8As= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= -github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= -github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= -go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= -go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8= -go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM= -go.opentelemetry.io/otel/exporters/jaeger v1.17.0 h1:D7UpUy2Xc2wsi1Ras6V40q806WM07rqoCWzXu7Sqy+4= -go.opentelemetry.io/otel/exporters/jaeger v1.17.0/go.mod h1:nPCqOnEH9rNLKqH/+rrUjiMzHJdV1BlpKcTwRTyKkKI= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 h1:lwI4Dc5leUqENgGuQImwLo4WnuXFPetmPpkLi2IrX54= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0/go.mod h1:Kz/oCE7z5wuyhPxsXDuaPteSWqjSBD5YaSdbxZYGbGk= -go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= -go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= -go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= -go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= -go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= -go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= -go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= -go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= -go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4= -go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE= -go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= -go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts= -golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20250911091902-df9299821621 h1:2id6c1/gto0kaHYyrixvknJ8tUK/Qs5IsmBtrc+FtgU= -golang.org/x/exp v0.0.0-20250911091902-df9299821621/go.mod h1:TwQYMMnGpvZyc+JpB/UAuTNIsVJifOlSkrZkhcvpVUk= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o= -golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= -golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k= -golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg= -golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk= -golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= -gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 h1:BIRfGDEjiHRrk0QKZe3Xv2ieMhtgRGeLcZQ0mIVn4EY= -google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 h1:eaY8u2EuxbRv7c3NiGK0/NedzVsCcV6hDuU5qPX5EGE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5/go.mod h1:M4/wBTSeyLxupu3W3tJtOgB14jILAS/XWPSSa3TAlJc= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= -google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE= -google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -helm.sh/helm/v3 v3.18.4 h1:pNhnHM3nAmDrxz6/UC+hfjDY4yeDATQCka2/87hkZXQ= -helm.sh/helm/v3 v3.18.4/go.mod h1:WVnwKARAw01iEdjpEkP7Ii1tT1pTPYfM1HsakFKM3LI= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -namespacelabs.dev/foundation v0.0.478 h1:3xFLZcrjih7Jjey2N7faSfr6EoBCg2LMTHipq/3Hlrg= -namespacelabs.dev/foundation v0.0.478/go.mod h1:svBrTIfZK773sytmjudGkCzQWNisxcQtcWNCs+uLznI= -namespacelabs.dev/go-ids v0.0.0-20221124082625-9fc72ee06af7 h1:8NlnfPlzDSJr8TYV/qarIWwhjLd1gOXf3Jme0M/oGBM= -namespacelabs.dev/go-ids v0.0.0-20221124082625-9fc72ee06af7/go.mod h1:J+Sd+ngeffnCsaO/M7zgs2bR8Klq/ZBhS0+bbnDEH2M= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/services/forgejo-nsc/internal/app/service.go b/services/forgejo-nsc/internal/app/service.go deleted file mode 100644 index 45b66eb..0000000 --- a/services/forgejo-nsc/internal/app/service.go +++ /dev/null @@ -1,253 +0,0 @@ -package app - -import ( - "context" - "errors" - "fmt" - "log/slog" - "strings" - "time" - - "golang.org/x/sync/errgroup" - - "github.com/burrow/forgejo-nsc/internal/forgejo" - "github.com/burrow/forgejo-nsc/internal/nsc" -) - -type Dispatcher interface { - LaunchRunner(ctx context.Context, req nsc.LaunchRequest) (string, error) -} - -type ForgejoClient interface { - RegistrationToken(ctx context.Context, scope forgejo.Scope) (string, error) -} - -type Service struct { - forgejo ForgejoClient - dispatcher Dispatcher - logger *slog.Logger - - defaultScope forgejo.Scope - defaultLabels []string - instanceURL string - defaultTTL time.Duration - - allowLabels map[string]struct{} - allowScopes map[string]struct{} -} - -type Config struct { - DefaultScope forgejo.Scope - DefaultLabels []string - InstanceURL string - DefaultTTL time.Duration - AllowLabels []string - AllowScopes []string -} - -func NewService(cfg Config, forgejo ForgejoClient, dispatcher Dispatcher, logger *slog.Logger) *Service { - if logger == nil { - logger = slog.Default() - } - allowLabels := make(map[string]struct{}, len(cfg.AllowLabels)) - for _, label := range cfg.AllowLabels { - allowLabels[normalizeLabel(label)] = struct{}{} - } - allowScopes := make(map[string]struct{}, len(cfg.AllowScopes)) - for _, scope := range cfg.AllowScopes { - allowScopes[scope] = struct{}{} - } - return &Service{ - defaultScope: cfg.DefaultScope, - defaultLabels: cfg.DefaultLabels, - instanceURL: cfg.InstanceURL, - defaultTTL: cfg.DefaultTTL, - forgejo: forgejo, - dispatcher: dispatcher, - logger: logger, - allowLabels: allowLabels, - allowScopes: allowScopes, - } -} - -type DispatchRequest struct { - Count int - Labels []string - Scope *Scope - TTL time.Duration - Machine string - Image string - ExtraEnv map[string]string -} - -type Scope struct { - Level string - Owner string - Name string -} - -type DispatchResponse struct { - Runners []RunnerHandle `json:"runners"` -} - -type RunnerHandle struct { - Name string `json:"name"` -} - -func (s *Service) Dispatch(ctx context.Context, req DispatchRequest) (DispatchResponse, error) { - count := req.Count - if count <= 0 { - count = 1 - } - - scope, err := s.mergeScope(req.Scope) - if err != nil { - return DispatchResponse{}, err - } - - labels, err := s.mergeLabels(req.Labels) - if err != nil { - return DispatchResponse{}, err - } - if len(labels) == 0 { - return DispatchResponse{}, errors.New("no runner labels resolved") - } - - ttl := req.TTL - if ttl == 0 { - ttl = s.defaultTTL - } - - ctx, cancel := context.WithCancel(ctx) - defer cancel() - - res := DispatchResponse{ - Runners: make([]RunnerHandle, count), - } - eg, egCtx := errgroup.WithContext(ctx) - - for i := 0; i < count; i++ { - index := i - eg.Go(func() error { - token, err := s.forgejo.RegistrationToken(egCtx, scope) - if err != nil { - return fmt.Errorf("fetching registration token: %w", err) - } - - name, err := s.dispatcher.LaunchRunner(egCtx, nsc.LaunchRequest{ - Token: token, - InstanceURL: s.instanceURL, - Labels: labels, - Duration: ttl, - MachineType: req.Machine, - Image: req.Image, - ExtraEnv: req.ExtraEnv, - }) - if err != nil { - return err - } - - res.Runners[index] = RunnerHandle{Name: name} - return nil - }) - } - - if err := eg.Wait(); err != nil { - return DispatchResponse{}, err - } - - return res, nil -} - -func (s *Service) mergeScope(value *Scope) (forgejo.Scope, error) { - if value == nil { - return s.defaultScope, nil - } - - scope := forgejo.Scope{ - Level: forgejo.ScopeLevel(value.Level), - Owner: value.Owner, - Name: value.Name, - } - if scope.Level == "" { - return forgejo.Scope{}, errors.New("scope level is required") - } - switch scope.Level { - case forgejo.ScopeInstance: - if !s.scopeAllowed(scope) { - return forgejo.Scope{}, fmt.Errorf("scope %q not allowed", scopeKey(scope)) - } - return scope, nil - case forgejo.ScopeOrganization: - if scope.Owner == "" { - return forgejo.Scope{}, errors.New("organization scope requires owner") - } - if !s.scopeAllowed(scope) { - return forgejo.Scope{}, fmt.Errorf("scope %q not allowed", scopeKey(scope)) - } - return scope, nil - case forgejo.ScopeRepository: - if scope.Owner == "" || scope.Name == "" { - return forgejo.Scope{}, errors.New("repository scope requires owner and name") - } - if !s.scopeAllowed(scope) { - return forgejo.Scope{}, fmt.Errorf("scope %q not allowed", scopeKey(scope)) - } - return scope, nil - default: - return forgejo.Scope{}, fmt.Errorf("unsupported scope %q", scope.Level) - } -} - -func (s *Service) mergeLabels(labels []string) ([]string, error) { - var resolved []string - if len(labels) == 0 { - resolved = append([]string{}, s.defaultLabels...) - } else { - resolved = labels - } - if len(s.allowLabels) == 0 { - return resolved, nil - } - for _, label := range resolved { - norm := normalizeLabel(label) - if _, ok := s.allowLabels[norm]; !ok { - return nil, fmt.Errorf("label %q not allowed", label) - } - } - return resolved, nil -} - -func normalizeLabel(label string) string { - trimmed := strings.TrimSpace(label) - if trimmed == "" { - return "" - } - // Ignore any explicit executor suffix ("label:host"), since workflows - // and config allowlists typically deal in base label names. - if before, _, ok := strings.Cut(trimmed, ":"); ok { - return before - } - return trimmed -} - -func scopeKey(scope forgejo.Scope) string { - switch scope.Level { - case forgejo.ScopeInstance: - return "instance" - case forgejo.ScopeOrganization: - return fmt.Sprintf("organization:%s", scope.Owner) - case forgejo.ScopeRepository: - return fmt.Sprintf("repository:%s/%s", scope.Owner, scope.Name) - default: - return string(scope.Level) - } -} - -func (s *Service) scopeAllowed(scope forgejo.Scope) bool { - if len(s.allowScopes) == 0 { - return true - } - _, ok := s.allowScopes[scopeKey(scope)] - return ok -} diff --git a/services/forgejo-nsc/internal/app/service_test.go b/services/forgejo-nsc/internal/app/service_test.go deleted file mode 100644 index 2be3d3c..0000000 --- a/services/forgejo-nsc/internal/app/service_test.go +++ /dev/null @@ -1,160 +0,0 @@ -package app - -import ( - "context" - "sync" - "testing" - "time" - - "github.com/burrow/forgejo-nsc/internal/forgejo" - "github.com/burrow/forgejo-nsc/internal/nsc" -) - -type mockForgejo struct { - mu sync.Mutex - tokens []string - scopes []forgejo.Scope - err error - counter int -} - -func (m *mockForgejo) RegistrationToken(ctx context.Context, scope forgejo.Scope) (string, error) { - m.mu.Lock() - defer m.mu.Unlock() - m.scopes = append(m.scopes, scope) - if m.err != nil { - return "", m.err - } - if m.counter >= len(m.tokens) { - return "", context.Canceled - } - tok := m.tokens[m.counter] - m.counter++ - return tok, nil -} - -type mockDispatcher struct { - mu sync.Mutex - requests []nsc.LaunchRequest - responses []string - err error -} - -func (m *mockDispatcher) LaunchRunner(ctx context.Context, req nsc.LaunchRequest) (string, error) { - m.mu.Lock() - defer m.mu.Unlock() - if m.err != nil { - return "", m.err - } - m.requests = append(m.requests, req) - idx := len(m.requests) - 1 - if idx < len(m.responses) { - return m.responses[idx], nil - } - return "runner", nil -} - -func TestServiceDispatchUsesDefaults(t *testing.T) { - forgejoMock := &mockForgejo{tokens: []string{"token"}} - dispatcherMock := &mockDispatcher{responses: []string{"runner-default"}} - - cfg := Config{ - DefaultScope: forgejo.Scope{Level: forgejo.ScopeInstance}, - DefaultLabels: []string{"nscloud"}, - InstanceURL: "https://forgejo.example.com", - DefaultTTL: 15 * time.Minute, - } - - service := NewService(cfg, forgejoMock, dispatcherMock, nil) - - resp, err := service.Dispatch(context.Background(), DispatchRequest{}) - if err != nil { - t.Fatalf("Dispatch returned error: %v", err) - } - if len(resp.Runners) != 1 || resp.Runners[0].Name != "runner-default" { - t.Fatalf("unexpected dispatch response: %+v", resp) - } - - if len(forgejoMock.scopes) != 1 || forgejoMock.scopes[0].Level != forgejo.ScopeInstance { - t.Fatalf("expected default scope, got %+v", forgejoMock.scopes) - } - - if len(dispatcherMock.requests) != 1 { - t.Fatalf("expected one dispatcher call, got %d", len(dispatcherMock.requests)) - } - req := dispatcherMock.requests[0] - if req.InstanceURL != cfg.InstanceURL { - t.Fatalf("expected instance URL %s, got %s", cfg.InstanceURL, req.InstanceURL) - } - if got := req.Labels; len(got) != 1 || got[0] != "nscloud" { - t.Fatalf("expected default labels, got %v", got) - } - if req.Duration != cfg.DefaultTTL { - t.Fatalf("expected duration %v, got %v", cfg.DefaultTTL, req.Duration) - } -} - -func TestServiceDispatchCustomScopeAndCount(t *testing.T) { - forgejoMock := &mockForgejo{tokens: []string{"token-1", "token-2"}} - dispatcherMock := &mockDispatcher{responses: []string{"runner-1", "runner-2"}} - - cfg := Config{ - DefaultScope: forgejo.Scope{Level: forgejo.ScopeInstance}, - DefaultLabels: []string{"default"}, - InstanceURL: "https://forgejo.example.com", - DefaultTTL: 10 * time.Minute, - } - - service := NewService(cfg, forgejoMock, dispatcherMock, nil) - - reqScope := &Scope{Level: string(forgejo.ScopeRepository), Owner: "acme", Name: "repo"} - res, err := service.Dispatch(context.Background(), DispatchRequest{ - Count: 2, - Labels: []string{"custom"}, - Scope: reqScope, - TTL: 5 * time.Minute, - Machine: "4x8", - Image: "runner:latest", - ExtraEnv: map[string]string{"FOO": "bar"}, - }) - if err != nil { - t.Fatalf("Dispatch returned error: %v", err) - } - if len(res.Runners) != 2 { - t.Fatalf("expected two runners, got %+v", res) - } - - if len(forgejoMock.scopes) != 2 { - t.Fatalf("expected two scope calls, got %d", len(forgejoMock.scopes)) - } - for _, scope := range forgejoMock.scopes { - if scope.Level != forgejo.ScopeRepository || scope.Owner != "acme" || scope.Name != "repo" { - t.Fatalf("unexpected scope: %+v", scope) - } - } - - if len(dispatcherMock.requests) != 2 { - t.Fatalf("expected two dispatcher calls, got %d", len(dispatcherMock.requests)) - } - for _, call := range dispatcherMock.requests { - if call.MachineType != "4x8" || call.Image != "runner:latest" { - t.Fatalf("unexpected machine/image in %+v", call) - } - if call.Duration != 5*time.Minute { - t.Fatalf("expected TTL to override default, got %v", call.Duration) - } - if call.Labels[0] != "custom" { - t.Fatalf("expected custom labels, got %v", call.Labels) - } - if call.ExtraEnv["FOO"] != "bar" { - t.Fatalf("expected env passthrough, got %v", call.ExtraEnv) - } - } -} - -func TestServiceDispatchErrorsWithoutLabels(t *testing.T) { - service := NewService(Config{DefaultScope: forgejo.Scope{Level: forgejo.ScopeInstance}}, &mockForgejo{}, &mockDispatcher{}, nil) - if _, err := service.Dispatch(context.Background(), DispatchRequest{}); err == nil { - t.Fatalf("expected error when no labels are available") - } -} diff --git a/services/forgejo-nsc/internal/autoscaler/config.go b/services/forgejo-nsc/internal/autoscaler/config.go deleted file mode 100644 index 7603e67..0000000 --- a/services/forgejo-nsc/internal/autoscaler/config.go +++ /dev/null @@ -1,108 +0,0 @@ -package autoscaler - -import ( - "fmt" - "os" - "time" - - "gopkg.in/yaml.v3" - - "github.com/burrow/forgejo-nsc/internal/config" -) - -type Config struct { - Listen string `yaml:"listen"` - Dispatcher DispatcherConfig `yaml:"dispatcher"` - Instances []InstanceConfig `yaml:"instances"` -} - -type DispatcherConfig struct { - URL string `yaml:"url"` - Timeout config.Duration `yaml:"timeout"` -} - -type InstanceConfig struct { - Name string `yaml:"name"` - Forgejo ForgejoInstance `yaml:"forgejo"` - Scope config.ScopeConfig `yaml:"scope"` - PollInterval config.Duration `yaml:"poll_interval"` - DisablePolling bool `yaml:"disable_polling"` - WebhookSecret string `yaml:"webhook_secret"` - Webhook WebhookConfig `yaml:"webhook"` - Dispatcher *DispatcherConfig `yaml:"dispatcher"` - Targets []TargetConfig `yaml:"targets"` -} - -type ForgejoInstance struct { - BaseURL string `yaml:"base_url"` - Token string `yaml:"token"` -} - -type WebhookConfig struct { - URL string `yaml:"url"` - ContentType string `yaml:"content_type"` - Events []string `yaml:"events"` - Active *bool `yaml:"active"` -} - -type TargetConfig struct { - Labels []string `yaml:"labels"` - MinIdle int `yaml:"min_idle"` - TTL config.Duration `yaml:"ttl"` - MachineType string `yaml:"machine_type"` - Image string `yaml:"image"` - Env map[string]string `yaml:"env"` -} - -func LoadConfig(path string) (Config, error) { - data, err := os.ReadFile(path) - if err != nil { - return Config{}, err - } - var cfg Config - if err := yaml.Unmarshal(data, &cfg); err != nil { - return Config{}, err - } - if cfg.Listen == "" { - cfg.Listen = ":8090" - } - if cfg.Dispatcher.URL == "" { - return Config{}, fmt.Errorf("dispatcher.url is required") - } - if cfg.Dispatcher.Timeout.Duration == 0 { - cfg.Dispatcher.Timeout = config.Duration{Duration: 15 * time.Second} - } - if len(cfg.Instances) == 0 { - return Config{}, fmt.Errorf("at least one instance must be configured") - } - for i := range cfg.Instances { - inst := &cfg.Instances[i] - if inst.Name == "" { - return Config{}, fmt.Errorf("instance[%d] missing name", i) - } - if inst.Forgejo.BaseURL == "" || inst.Forgejo.Token == "" { - return Config{}, fmt.Errorf("instance %s missing forgejo.base_url or token", inst.Name) - } - if inst.PollInterval.Duration == 0 { - inst.PollInterval = config.Duration{Duration: 30 * time.Second} - } - if len(inst.Webhook.Events) == 0 { - inst.Webhook.Events = []string{"workflow_job"} - } - if inst.Webhook.ContentType == "" { - inst.Webhook.ContentType = "json" - } - if len(inst.Targets) == 0 { - return Config{}, fmt.Errorf("instance %s requires at least one target", inst.Name) - } - for ti, tgt := range inst.Targets { - if len(tgt.Labels) == 0 { - return Config{}, fmt.Errorf("instance %s target[%d] missing labels", inst.Name, ti) - } - if tgt.MinIdle < 0 { - return Config{}, fmt.Errorf("instance %s target[%d] min_idle must be >= 0", inst.Name, ti) - } - } - } - return cfg, nil -} diff --git a/services/forgejo-nsc/internal/autoscaler/service.go b/services/forgejo-nsc/internal/autoscaler/service.go deleted file mode 100644 index 08d4a42..0000000 --- a/services/forgejo-nsc/internal/autoscaler/service.go +++ /dev/null @@ -1,385 +0,0 @@ -package autoscaler - -import ( - "bytes" - "context" - "crypto/hmac" - "crypto/sha256" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "strings" - "sync" - "sync/atomic" - "time" - - "github.com/go-chi/chi/v5" - - "namespacelabs.dev/foundation/std/tasks" - - "github.com/burrow/forgejo-nsc/internal/forgejo" -) - -type Service struct { - listen string - controllers map[string]*InstanceController - router chi.Router -} - -func NewService(cfg Config) (*Service, error) { - controllers := make(map[string]*InstanceController) - for _, inst := range cfg.Instances { - scope, err := inst.Scope.ToScope() - if err != nil { - return nil, err - } - forgejoClient, err := forgejo.NewClient(inst.Forgejo.BaseURL, inst.Forgejo.Token) - if err != nil { - return nil, err - } - dispCfg := cfg.Dispatcher - if inst.Dispatcher != nil && inst.Dispatcher.URL != "" { - dispCfg = *inst.Dispatcher - if dispCfg.Timeout.Duration == 0 { - dispCfg.Timeout = cfg.Dispatcher.Timeout - } - } - dClient := newDispatcherClient(dispCfg.URL, dispCfg.Timeout.Duration) - webhookActive := true - if inst.Webhook.Active != nil { - webhookActive = *inst.Webhook.Active - } - controller := &InstanceController{ - name: inst.Name, - cfg: inst, - scope: scope, - forgejo: forgejoClient, - dispatcher: dClient, - webhook: forgejo.WebhookConfig{ - URL: inst.Webhook.URL, - ContentType: inst.Webhook.ContentType, - Events: inst.Webhook.Events, - Active: webhookActive, - }, - secret: inst.WebhookSecret, - } - controllers[inst.Name] = controller - } - - router := chi.NewRouter() - service := &Service{ - listen: cfg.Listen, - controllers: controllers, - router: router, - } - - router.Get("/healthz", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte("ok")) - }) - router.Post("/webhook/{instance}", service.handleWebhook) - - return service, nil -} - -func (s *Service) Start(ctx context.Context) error { - for _, controller := range s.controllers { - if err := controller.EnsureWebhook(ctx); err != nil { - return err - } - } - - var wg sync.WaitGroup - for _, controller := range s.controllers { - wg.Add(1) - go func(c *InstanceController) { - defer wg.Done() - c.Run(ctx) - }(controller) - } - - srv := &http.Server{ - Addr: s.listen, - Handler: s.router, - } - - go func() { - <-ctx.Done() - _ = srv.Shutdown(context.Background()) - }() - - if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { - return err - } - wg.Wait() - return nil -} - -func (s *Service) handleWebhook(w http.ResponseWriter, r *http.Request) { - name := chi.URLParam(r, "instance") - controller, ok := s.controllers[name] - if !ok { - http.Error(w, "unknown instance", http.StatusNotFound) - return - } - body, err := io.ReadAll(r.Body) - if err != nil { - http.Error(w, "invalid body", http.StatusBadRequest) - return - } - if controller.cfg.WebhookSecret != "" { - signature := r.Header.Get("X-Gitea-Signature") - if signature == "" { - http.Error(w, "missing signature", http.StatusUnauthorized) - return - } - if !verifySignature(controller.cfg.WebhookSecret, signature, body) { - http.Error(w, "invalid signature", http.StatusUnauthorized) - return - } - } - - var payload workflowJobPayload - if err := json.Unmarshal(body, &payload); err != nil { - http.Error(w, "bad payload", http.StatusBadRequest) - return - } - - controller.MarkWebhookSeen() - if payload.Action == "queued" { - controller.DispatchForJob(r.Context(), payload) - } - - w.WriteHeader(http.StatusAccepted) -} - -type workflowJobPayload struct { - Action string `json:"action"` - WorkflowJob struct { - Labels []string `json:"labels"` - } `json:"workflow_job"` -} - -type InstanceController struct { - name string - cfg InstanceConfig - scope forgejo.Scope - forgejo *forgejo.Client - dispatcher *dispatcherClient - ready atomic.Bool - webhook forgejo.WebhookConfig - secret string -} - -func (c *InstanceController) EnsureWebhook(ctx context.Context) error { - if c.webhook.URL == "" { - return nil - } - return tasks.Action("autoscaler.ensure-webhook").Arg("instance", c.name).Run(ctx, func(ctx context.Context) error { - return c.forgejo.EnsureWebhook(ctx, c.scope, c.webhook, c.secret) - }) -} - -func (c *InstanceController) Run(ctx context.Context) { - if c.cfg.DisablePolling { - <-ctx.Done() - return - } - ticker := time.NewTicker(c.cfg.PollInterval.Duration) - defer ticker.Stop() - for { - select { - case <-ctx.Done(): - return - case <-ticker.C: - _ = tasks.Action("autoscaler.poll").Arg("instance", c.name).Run(ctx, func(ctx context.Context) error { - return c.reconcile(ctx) - }) - } - } -} - -func (c *InstanceController) reconcile(ctx context.Context) error { - runners, err := c.forgejo.ListRunners(ctx, c.scope) - if err != nil { - // Keep polling even if runner listing fails; we can still dispatch based on queued jobs. - runners = nil - } - - for _, target := range c.cfg.Targets { - idle := countIdle(runners, target.Labels) - - need := 0 - if idle < target.MinIdle { - need = target.MinIdle - idle - } - - jobs, jobErr := c.forgejo.ListRunJobs(ctx, c.scope, target.Labels) - if jobErr != nil { - return jobErr - } - waiting := countWaitingJobs(jobs, target.Labels) - // Scale-to-zero friendly: if anything is waiting and there are no idle runners - // for that label set, dispatch exactly one runner to unblock the queue. - if waiting > 0 && idle == 0 && need < 1 { - need = 1 - } - - if need <= 0 { - continue - } - if err := c.dispatch(ctx, target, need, "poll"); err != nil { - return err - } - } - return nil -} - -func (c *InstanceController) dispatch(ctx context.Context, target TargetConfig, count int, reason string) error { - if count <= 0 { - return nil - } - req := dispatcherRequest{ - Count: count, - Labels: target.Labels, - } - if target.TTL.Duration > 0 { - req.TTL = target.TTL.Duration.String() - } - if target.MachineType != "" { - req.MachineType = target.MachineType - } - if target.Image != "" { - req.Image = target.Image - } - if len(target.Env) > 0 { - req.Env = target.Env - } - return tasks.Action("autoscaler.dispatch").Arg("instance", c.name).Arg("reason", reason).Arg("labels", strings.Join(target.Labels, ",")).Run(ctx, func(ctx context.Context) error { - return c.dispatcher.Dispatch(ctx, req) - }) -} - -func (c *InstanceController) DispatchForJob(ctx context.Context, payload workflowJobPayload) { - action := strings.ToLower(payload.Action) - if action != "queued" && action != "waiting" { - return - } - jobLabels := payload.WorkflowJob.Labels - for _, target := range c.cfg.Targets { - if labelsMatch(jobLabels, target.Labels) { - _ = c.dispatch(ctx, target, 1, "webhook") - return - } - } -} - -func (c *InstanceController) MarkWebhookSeen() { - c.ready.Store(true) -} - -func countIdle(runners []forgejo.Runner, labels []string) int { - count := 0 - for _, runner := range runners { - if strings.ToLower(runner.Status) != "online" || runner.Busy { - continue - } - if labelsMatch(extractLabels(runner.Labels), labels) { - count++ - } - } - return count -} - -func countWaitingJobs(jobs []forgejo.RunJob, labels []string) int { - count := 0 - for _, job := range jobs { - if status := strings.ToLower(job.Status); status != "waiting" && status != "queued" { - continue - } - if labelsMatch(job.RunsOn, labels) { - count++ - } - } - return count -} - -func extractLabels(src []forgejo.RunnerLabel) []string { - result := make([]string, 0, len(src)) - for _, lbl := range src { - result = append(result, lbl.Name) - } - return result -} - -func labelsMatch(have, want []string) bool { - set := make(map[string]struct{}, len(have)) - for _, label := range have { - set[label] = struct{}{} - } - for _, label := range want { - if _, ok := set[label]; !ok { - return false - } - } - return true -} - -func verifySignature(secret, signature string, body []byte) bool { - parts := strings.SplitN(signature, "=", 2) - if len(parts) == 2 { - signature = parts[1] - } - mac := hmac.New(sha256.New, []byte(secret)) - mac.Write(body) - expected := hex.EncodeToString(mac.Sum(nil)) - return hmac.Equal([]byte(expected), []byte(signature)) -} - -type dispatcherClient struct { - url string - client *http.Client -} - -type dispatcherRequest struct { - Count int `json:"count"` - Labels []string `json:"labels"` - TTL string `json:"ttl,omitempty"` - MachineType string `json:"machine_type,omitempty"` - Image string `json:"image,omitempty"` - Env map[string]string `json:"env,omitempty"` -} - -func newDispatcherClient(url string, timeout time.Duration) *dispatcherClient { - if timeout == 0 { - timeout = 30 * time.Second - } - return &dispatcherClient{ - url: url, - client: &http.Client{ - Timeout: timeout, - }, - } -} - -func (d *dispatcherClient) Dispatch(ctx context.Context, req dispatcherRequest) error { - body, _ := json.Marshal(req) - endpoint := strings.TrimSuffix(d.url, "/") + "/api/v1/dispatch" - httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(body)) - if err != nil { - return err - } - httpReq.Header.Set("Content-Type", "application/json") - resp, err := d.client.Do(httpReq) - if err != nil { - return err - } - defer resp.Body.Close() - if resp.StatusCode >= 300 { - return fmt.Errorf("dispatcher returned %s", resp.Status) - } - return nil -} diff --git a/services/forgejo-nsc/internal/config/config.go b/services/forgejo-nsc/internal/config/config.go deleted file mode 100644 index 264cbd0..0000000 --- a/services/forgejo-nsc/internal/config/config.go +++ /dev/null @@ -1,185 +0,0 @@ -package config - -import ( - "errors" - "fmt" - "os" - "strings" - "time" - - "gopkg.in/yaml.v3" - - "github.com/burrow/forgejo-nsc/internal/forgejo" -) - -// Duration wraps time.Duration to support YAML unmarshalling from strings. -type Duration struct { - time.Duration -} - -// UnmarshalYAML implements yaml.v3 unmarshalling for Duration. -func (d *Duration) UnmarshalYAML(value *yaml.Node) error { - switch value.Tag { - case "!!int": - var seconds int64 - if err := value.Decode(&seconds); err != nil { - return err - } - d.Duration = time.Duration(seconds) * time.Second - return nil - default: - parsed, err := time.ParseDuration(value.Value) - if err != nil { - return err - } - d.Duration = parsed - return nil - } -} - -// MarshalYAML implements yaml.v3 marshalling. -func (d Duration) MarshalYAML() (any, error) { - return d.Duration.String(), nil -} - -type Config struct { - Listen string `yaml:"listen"` - Forgejo ForgejoConfig `yaml:"forgejo"` - Namespace NamespaceConfig `yaml:"namespace"` - Runner RunnerConfig `yaml:"runner"` -} - -type ForgejoConfig struct { - BaseURL string `yaml:"base_url"` - // InstanceURL is the URL runners should use when registering with Forgejo. - // This must be reachable from the spawned runner (e.g. the public URL like - // https://git.burrow.net), and may differ from BaseURL (which can be a local - // loopback URL on the forge host). - InstanceURL string `yaml:"instance_url"` - Token string `yaml:"token"` - DefaultScope ScopeConfig `yaml:"default_scope"` - DefaultLabels []string `yaml:"default_labels"` - Timeout Duration `yaml:"timeout"` - ExtraHeaders yaml.Node `yaml:"extra_headers"` -} - -type ScopeConfig struct { - Level string `yaml:"level"` - Owner string `yaml:"owner,omitempty"` - Name string `yaml:"name,omitempty"` -} - -type NamespaceConfig struct { - NSCBinary string `yaml:"nsc_binary"` - // ComputeBaseURL is the Namespace Cloud Compute API endpoint (Connect RPC base URL). - // This is used for macOS runners, since NSC "run" is container-based (Linux-only). - // Example: "https://ord4.compute.namespaceapis.com" - ComputeBaseURL string `yaml:"compute_base_url"` - Image string `yaml:"image"` - MachineType string `yaml:"machine_type"` - // MacosBaseImageID selects which macOS base image to use (e.g. "tahoe"). - MacosBaseImageID string `yaml:"macos_base_image_id"` - // MacosMachineArch is the architecture used for macOS instances (typically "arm64"). - MacosMachineArch string `yaml:"macos_machine_arch"` - Duration Duration `yaml:"duration"` - WorkDir string `yaml:"workdir"` - MaxParallel int64 `yaml:"max_parallel"` - Environment []string `yaml:"environment"` - AllowLabels []string `yaml:"allow_labels"` - AllowScopes []string `yaml:"allow_scopes"` - Network string `yaml:"network"` - InstanceTags []string `yaml:"instance_tags"` -} - -type RunnerConfig struct { - NamePrefix string `yaml:"name_prefix"` - Executor string `yaml:"executor"` -} - -func Load(path string) (*Config, error) { - data, err := os.ReadFile(path) - if err != nil { - return nil, err - } - - var cfg Config - if err := yaml.Unmarshal(data, &cfg); err != nil { - return nil, err - } - - if err := cfg.Validate(); err != nil { - return nil, err - } - - return &cfg, nil -} - -func (c *Config) Validate() error { - if c.Listen == "" { - c.Listen = ":8080" - } - if c.Runner.NamePrefix == "" { - c.Runner.NamePrefix = "nscloud-" - } - if c.Runner.Executor == "" { - c.Runner.Executor = "shell" - } - - if c.Forgejo.BaseURL == "" { - return errors.New("forgejo.base_url is required") - } - if c.Forgejo.InstanceURL == "" { - // Backwards-compatible default: assume runners can reach the same URL. - c.Forgejo.InstanceURL = c.Forgejo.BaseURL - } - if c.Forgejo.Token == "" { - return errors.New("forgejo.token is required") - } - if c.Forgejo.Timeout.Duration == 0 { - c.Forgejo.Timeout.Duration = 30 * time.Second - } - if _, err := c.Forgejo.DefaultScope.ToScope(); err != nil { - return err - } - - if c.Namespace.NSCBinary == "" { - c.Namespace.NSCBinary = "nsc" - } - if c.Namespace.Image == "" { - c.Namespace.Image = "code.forgejo.org/forgejo/runner:11" - } - if c.Namespace.MacosBaseImageID == "" { - c.Namespace.MacosBaseImageID = "tahoe" - } - if c.Namespace.MacosMachineArch == "" { - c.Namespace.MacosMachineArch = "arm64" - } - if c.Namespace.Duration.Duration == 0 { - c.Namespace.Duration.Duration = 30 * time.Minute - } - if c.Namespace.MaxParallel <= 0 { - c.Namespace.MaxParallel = 4 - } - - return nil -} - -func (s ScopeConfig) ToScope() (forgejo.Scope, error) { - level := forgejo.ScopeLevel(strings.ToLower(s.Level)) - switch level { - case forgejo.ScopeInstance: - return forgejo.Scope{Level: level}, nil - case forgejo.ScopeOrganization: - if s.Owner == "" { - return forgejo.Scope{}, errors.New("forgejo default scope requires owner for organization level") - } - return forgejo.Scope{Level: level, Owner: s.Owner}, nil - case forgejo.ScopeRepository: - if s.Owner == "" || s.Name == "" { - return forgejo.Scope{}, errors.New("forgejo default scope requires owner and name for repository level") - } - return forgejo.Scope{Level: level, Owner: s.Owner, Name: s.Name}, nil - default: - return forgejo.Scope{}, fmt.Errorf("unknown scope level %q", s.Level) - } -} diff --git a/services/forgejo-nsc/internal/config/config_test.go b/services/forgejo-nsc/internal/config/config_test.go deleted file mode 100644 index e42f3c9..0000000 --- a/services/forgejo-nsc/internal/config/config_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package config - -import ( - "os" - "path/filepath" - "testing" - "time" -) - -func TestLoadConfig(t *testing.T) { - dir := t.TempDir() - path := filepath.Join(dir, "config.yaml") - content := ` -listen: ":9090" -forgejo: - base_url: https://forgejo.test - token: abc - default_scope: - level: instance -namespace: - nsc_binary: /usr/bin/nsc - image: ghcr.io/forgejo/runner:3 - duration: 15m -runner: - name_prefix: custom- -` - if err := os.WriteFile(path, []byte(content), 0o600); err != nil { - t.Fatal(err) - } - - cfg, err := Load(path) - if err != nil { - t.Fatalf("Load() error = %v", err) - } - if cfg.Listen != ":9090" { - t.Fatalf("unexpected listen addr: %s", cfg.Listen) - } - if cfg.Namespace.Duration.Duration != 15*time.Minute { - t.Fatalf("duration parsing failed: %s", cfg.Namespace.Duration.Duration) - } -} diff --git a/services/forgejo-nsc/internal/forgejo/client.go b/services/forgejo-nsc/internal/forgejo/client.go deleted file mode 100644 index 7f63e0c..0000000 --- a/services/forgejo-nsc/internal/forgejo/client.go +++ /dev/null @@ -1,454 +0,0 @@ -package forgejo - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "net/http" - "net/url" - "path" - "strings" - "time" -) - -type ScopeLevel string - -const ( - ScopeInstance ScopeLevel = "instance" - ScopeOrganization ScopeLevel = "organization" - ScopeRepository ScopeLevel = "repository" -) - -type Scope struct { - Level ScopeLevel - Owner string - Name string -} - -type Client struct { - baseURL *url.URL - token string - client *http.Client -} - -type Runner struct { - ID int64 `json:"id"` - Name string `json:"name"` - Status string `json:"status"` - Busy bool `json:"busy"` - Labels []RunnerLabel `json:"labels"` -} - -type RunnerLabel struct { - Name string `json:"name"` -} - -type RunJob struct { - ID int64 `json:"id"` - Name string `json:"name"` - RunsOn []string `json:"runs_on"` - Status string `json:"status"` - TaskID int64 `json:"task_id"` -} - -type WebhookConfig struct { - URL string - ContentType string - Events []string - Active bool -} - -type Option func(*Client) - -func WithHTTPClient(httpClient *http.Client) Option { - return func(c *Client) { - if httpClient != nil { - c.client = httpClient - } - } -} - -func NewClient(rawURL, token string, opts ...Option) (*Client, error) { - if rawURL == "" { - return nil, errors.New("forgejo base URL is required") - } - - u, err := url.Parse(rawURL) - if err != nil { - return nil, err - } - - client := &Client{ - baseURL: u, - token: strings.TrimSpace(token), - client: &http.Client{ - Timeout: 30 * time.Second, - }, - } - - for _, opt := range opts { - opt(client) - } - - if client.token == "" { - return nil, errors.New("forgejo token is required") - } - - return client, nil -} - -type registrationTokenResponse struct { - Token string `json:"token"` - TTL time.Time `json:"expires_at"` -} - -func (c *Client) RegistrationToken(ctx context.Context, scope Scope) (string, error) { - endpoint, err := c.registrationEndpoint(scope) - if err != nil { - return "", err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return "", err - } - req.Header.Set("Authorization", fmt.Sprintf("token %s", c.token)) - req.Header.Set("Accept", "application/json") - - resp, err := c.client.Do(req) - if err != nil { - return "", err - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return "", fmt.Errorf("forgejo returned %s", resp.Status) - } - - var decoded registrationTokenResponse - if err := json.NewDecoder(resp.Body).Decode(&decoded); err != nil { - return "", err - } - if decoded.Token == "" { - return "", errors.New("forgejo response missing token") - } - - return decoded.Token, nil -} - -func (c *Client) ListRunners(ctx context.Context, scope Scope) ([]Runner, error) { - endpoint, err := c.runnersEndpoint(scope) - if err != nil { - return nil, err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, err - } - req.Header.Set("Authorization", fmt.Sprintf("token %s", c.token)) - req.Header.Set("Accept", "application/json") - - resp, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return nil, fmt.Errorf("forgejo returned %s", resp.Status) - } - - var decoded []Runner - if err := json.NewDecoder(resp.Body).Decode(&decoded); err != nil { - return nil, err - } - - return decoded, nil -} - -func (c *Client) ListRunJobs(ctx context.Context, scope Scope, labels []string) ([]RunJob, error) { - endpoint, err := c.runJobsEndpoint(scope) - if err != nil { - return nil, err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, err - } - if len(labels) > 0 { - query := req.URL.Query() - query.Set("labels", strings.Join(labels, ",")) - req.URL.RawQuery = query.Encode() - } - req.Header.Set("Authorization", fmt.Sprintf("token %s", c.token)) - req.Header.Set("Accept", "application/json") - - resp, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return nil, fmt.Errorf("forgejo returned %s", resp.Status) - } - - var decoded []RunJob - if err := json.NewDecoder(resp.Body).Decode(&decoded); err != nil { - return nil, err - } - - if decoded == nil { - decoded = []RunJob{} - } - return decoded, nil -} - -func (c *Client) EnsureWebhook(ctx context.Context, scope Scope, cfg WebhookConfig, secret string) error { - if cfg.URL == "" { - return nil - } - - hooks, err := c.listWebhooks(ctx, scope) - if err != nil { - return err - } - - for _, hook := range hooks { - if strings.EqualFold(hook.Config.URL, cfg.URL) { - return c.updateWebhook(ctx, scope, hook.ID, cfg, secret) - } - } - - return c.createWebhook(ctx, scope, cfg, secret) -} - -func (c *Client) registrationEndpoint(scope Scope) (string, error) { - var segments []string - switch scope.Level { - case ScopeRepository: - if scope.Owner == "" || scope.Name == "" { - return "", errors.New("repository scope requires owner and name") - } - segments = []string{"api", "v1", "repos", scope.Owner, scope.Name, "actions", "runners", "registration-token"} - case ScopeOrganization: - if scope.Owner == "" { - return "", errors.New("organization scope requires owner") - } - segments = []string{"api", "v1", "orgs", scope.Owner, "actions", "runners", "registration-token"} - case ScopeInstance: - segments = []string{"api", "v1", "admin", "actions", "runners", "registration-token"} - default: - return "", fmt.Errorf("unsupported scope level %q", scope.Level) - } - - clone := *c.baseURL - clone.Path = path.Join(append([]string{clone.Path}, segments...)...) - return clone.String(), nil -} - -type webhook struct { - ID int64 `json:"id"` - Config webhookConfigPayload `json:"config"` -} - -type webhookConfigPayload struct { - URL string `json:"url"` - ContentType string `json:"content_type"` -} - -func (c *Client) listWebhooks(ctx context.Context, scope Scope) ([]webhook, error) { - endpoint, err := c.webhooksEndpoint(scope) - if err != nil { - return nil, err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) - if err != nil { - return nil, err - } - req.Header.Set("Authorization", fmt.Sprintf("token %s", c.token)) - req.Header.Set("Accept", "application/json") - - resp, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return nil, fmt.Errorf("forgejo returned %s", resp.Status) - } - - var hooks []webhook - if err := json.NewDecoder(resp.Body).Decode(&hooks); err != nil { - return nil, err - } - - return hooks, nil -} - -func (c *Client) createWebhook(ctx context.Context, scope Scope, cfg WebhookConfig, secret string) error { - payload := webhookRequestPayload{ - Type: "gitea", - Config: map[string]string{ - "url": cfg.URL, - "content_type": cfg.ContentType, - "secret": secret, - "insecure_ssl": "0", - }, - Events: cfg.Events, - Active: cfg.Active, - } - - body, err := json.Marshal(payload) - if err != nil { - return err - } - - endpoint, err := c.webhooksEndpoint(scope) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(body)) - if err != nil { - return err - } - req.Header.Set("Authorization", fmt.Sprintf("token %s", c.token)) - req.Header.Set("Content-Type", "application/json") - - resp, err := c.client.Do(req) - if err != nil { - return err - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return fmt.Errorf("forgejo returned %s", resp.Status) - } - - return nil -} - -func (c *Client) updateWebhook(ctx context.Context, scope Scope, id int64, cfg WebhookConfig, secret string) error { - payload := webhookRequestPayload{ - Type: "gitea", - Config: map[string]string{ - "url": cfg.URL, - "content_type": cfg.ContentType, - "secret": secret, - "insecure_ssl": "0", - }, - Events: cfg.Events, - Active: cfg.Active, - } - - body, err := json.Marshal(payload) - if err != nil { - return err - } - - endpoint, err := c.webhooksEndpoint(scope) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPatch, fmt.Sprintf("%s/%d", endpoint, id), bytes.NewReader(body)) - if err != nil { - return err - } - req.Header.Set("Authorization", fmt.Sprintf("token %s", c.token)) - req.Header.Set("Content-Type", "application/json") - - resp, err := c.client.Do(req) - if err != nil { - return err - } - defer resp.Body.Close() - - if resp.StatusCode >= 400 { - return fmt.Errorf("forgejo returned %s", resp.Status) - } - - return nil -} - -func (c *Client) webhooksEndpoint(scope Scope) (string, error) { - var segments []string - switch scope.Level { - case ScopeRepository: - if scope.Owner == "" || scope.Name == "" { - return "", errors.New("repository scope requires owner and name") - } - segments = []string{"api", "v1", "repos", scope.Owner, scope.Name, "hooks"} - case ScopeOrganization: - if scope.Owner == "" { - return "", errors.New("organization scope requires owner") - } - segments = []string{"api", "v1", "orgs", scope.Owner, "hooks"} - default: - return "", fmt.Errorf("webhook management not supported for scope level %q", scope.Level) - } - - clone := *c.baseURL - clone.Path = path.Join(append([]string{clone.Path}, segments...)...) - return clone.String(), nil -} - -type webhookRequestPayload struct { - Type string `json:"type"` - Config map[string]string `json:"config"` - Events []string `json:"events"` - Active bool `json:"active"` -} - -func (c *Client) runnersEndpoint(scope Scope) (string, error) { - var segments []string - switch scope.Level { - case ScopeRepository: - if scope.Owner == "" || scope.Name == "" { - return "", errors.New("repository scope requires owner and name") - } - segments = []string{"api", "v1", "repos", scope.Owner, scope.Name, "actions", "runners"} - case ScopeOrganization: - if scope.Owner == "" { - return "", errors.New("organization scope requires owner") - } - segments = []string{"api", "v1", "orgs", scope.Owner, "actions", "runners"} - case ScopeInstance: - segments = []string{"api", "v1", "actions", "runners"} - default: - return "", fmt.Errorf("unsupported scope level %q", scope.Level) - } - - clone := *c.baseURL - clone.Path = path.Join(append([]string{clone.Path}, segments...)...) - return clone.String(), nil -} - -func (c *Client) runJobsEndpoint(scope Scope) (string, error) { - var segments []string - switch scope.Level { - case ScopeRepository: - if scope.Owner == "" || scope.Name == "" { - return "", errors.New("repository scope requires owner and name") - } - segments = []string{"api", "v1", "repos", scope.Owner, scope.Name, "actions", "runners", "jobs"} - case ScopeOrganization: - if scope.Owner == "" { - return "", errors.New("organization scope requires owner") - } - segments = []string{"api", "v1", "orgs", scope.Owner, "actions", "runners", "jobs"} - default: - return "", fmt.Errorf("run jobs not supported for scope level %q", scope.Level) - } - - clone := *c.baseURL - clone.Path = path.Join(append([]string{clone.Path}, segments...)...) - return clone.String(), nil -} diff --git a/services/forgejo-nsc/internal/nsc/dispatcher.go b/services/forgejo-nsc/internal/nsc/dispatcher.go deleted file mode 100644 index 49cb4ec..0000000 --- a/services/forgejo-nsc/internal/nsc/dispatcher.go +++ /dev/null @@ -1,460 +0,0 @@ -package nsc - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "log/slog" - "os/exec" - "strings" - "time" - - "github.com/google/uuid" - "golang.org/x/sync/semaphore" -) - -type Options struct { - BinaryPath string - DefaultImage string - DefaultMachine string - DefaultDuration time.Duration - WorkDir string - MaxParallel int64 - RunnerNamePrefix string - Executor string - Network string - ComputeBaseURL string - MacosBaseImageID string - MacosMachineArch string - Logger *slog.Logger -} - -type LaunchRequest struct { - Token string - InstanceURL string - Labels []string - Duration time.Duration - MachineType string - Image string - ExtraEnv map[string]string -} - -type Dispatcher struct { - opts Options - sem *semaphore.Weighted - log *slog.Logger -} - -func NewDispatcher(opts Options) (*Dispatcher, error) { - if opts.BinaryPath == "" { - return nil, errors.New("nsc binary path is required") - } - if opts.DefaultImage == "" { - return nil, errors.New("default Namespace runner image is required") - } - if opts.RunnerNamePrefix == "" { - opts.RunnerNamePrefix = "nscloud-" - } - if opts.Executor == "" { - opts.Executor = "shell" - } - if opts.MacosBaseImageID == "" { - opts.MacosBaseImageID = "tahoe" - } - if opts.MacosMachineArch == "" { - opts.MacosMachineArch = "arm64" - } - if opts.MaxParallel <= 0 { - opts.MaxParallel = 4 - } - if opts.DefaultDuration == 0 { - opts.DefaultDuration = 30 * time.Minute - } - logger := opts.Logger - if logger == nil { - logger = slog.New(slog.NewTextHandler(io.Discard, nil)) - } - - return &Dispatcher{ - opts: opts, - sem: semaphore.NewWeighted(opts.MaxParallel), - log: logger, - }, nil -} - -func (d *Dispatcher) LaunchRunner(ctx context.Context, req LaunchRequest) (string, error) { - if req.Token == "" { - return "", errors.New("registration token is required") - } - if req.InstanceURL == "" { - return "", errors.New("forgejo instance url is required") - } - if err := d.sem.Acquire(ctx, 1); err != nil { - return "", err - } - defer d.sem.Release(1) - - runnerName := d.generateName() - duration := req.Duration - if duration == 0 { - duration = d.opts.DefaultDuration - } - machineType := choose(req.MachineType, d.opts.DefaultMachine) - image := choose(req.Image, d.opts.DefaultImage) - - if hasWindowsLabel(req.Labels) { - if err := d.launchWindowsRunnerViaWinRM(ctx, runnerName, req, duration, machineType); err != nil { - return "", err - } - return runnerName, nil - } - - if hasMacOSLabel(req.Labels) { - // Compute macOS shapes differ from the Linux "run" defaults. If the request - // didn't specify a machine type, ensure we pick a macOS-valid default. - if machineType == "" || machineType == d.opts.DefaultMachine { - machineType = "12x28" - } - - // Prefer the Compute API path because it uses the service token (NSC_TOKEN_FILE) - // and does not require an interactive `nsc login` session. - if err := d.launchMacOSRunner(ctx, runnerName, req, duration, machineType); err != nil { - d.log.Warn("macos compute launch failed; falling back to nsc create+ssh", "runner", runnerName, "err", err) - if err := d.launchMacOSRunnerViaNSC(ctx, runnerName, req, duration, machineType); err != nil { - return "", err - } - } - return runnerName, nil - } - - env := map[string]string{ - "FORGEJO_INSTANCE_URL": req.InstanceURL, - "FORGEJO_RUNNER_TOKEN": req.Token, - "FORGEJO_RUNNER_NAME": runnerName, - "FORGEJO_RUNNER_LABELS": strings.Join(req.Labels, ","), - "FORGEJO_RUNNER_EXEC": d.opts.Executor, - } - for k, v := range req.ExtraEnv { - env[k] = v - } - if _, ok := env["NSC_CACHE_PATH"]; !ok { - env["NSC_CACHE_PATH"] = "/nix/store" - } - - script := d.bootstrapScript() - args := []string{ - "run", - "--wait", - "--output", - "json", - "--duration", duration.String(), - "--image", image, - "--name", runnerName, - "--user", "root", - } - if machineType != "" { - args = append(args, "--machine_type", machineType) - } - if d.opts.Network != "" { - args = append(args, "--network", d.opts.Network) - } - for key, value := range env { - if value == "" { - continue - } - args = append(args, "-e", fmt.Sprintf("%s=%s", key, value)) - } - if d.opts.WorkDir != "" { - args = append(args, "-e", fmt.Sprintf("FORGEJO_RUNNER_WORKDIR=%s", d.opts.WorkDir)) - } - - args = append(args, "--", "/bin/sh", "-c", script) - - cmd := exec.CommandContext(ctx, d.opts.BinaryPath, args...) - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - - start := time.Now() - d.log.Info("launching Namespace runner", - "runner", runnerName, - "machine_type", machineType, - "image", image, - ) - err := cmd.Run() - if err != nil { - return "", fmt.Errorf("nsc run failed: %w\n%s", err, buf.String()) - } - - if output := strings.TrimSpace(buf.String()); output != "" { - d.log.Info("runner output", "runner", runnerName, "output", output) - } - - d.log.Info("runner completed", - "runner", runnerName, - "duration", time.Since(start), - ) - - if instanceID := parseInstanceID(buf.String()); instanceID != "" { - waitCtx, cancel := context.WithTimeout(context.Background(), duration) - defer cancel() - stopped := d.waitForInstanceStop(waitCtx, runnerName, instanceID, duration) - if !stopped { - d.log.Warn("runner did not stop before timeout", "runner", runnerName, "instance", instanceID) - } - d.destroyInstance(waitCtx, runnerName, instanceID) - } - - return runnerName, nil -} - -func (d *Dispatcher) generateName() string { - id := strings.ReplaceAll(uuid.NewString(), "-", "") - return d.opts.RunnerNamePrefix + id[:12] -} - -func parseInstanceID(output string) string { - if jsonBlob := extractJSON(output); jsonBlob != "" { - var payload struct { - ClusterID string `json:"cluster_id"` - } - if err := json.Unmarshal([]byte(jsonBlob), &payload); err == nil && payload.ClusterID != "" { - return payload.ClusterID - } - } - const marker = "ID:" - idx := strings.Index(output, marker) - if idx == -1 { - return "" - } - rest := strings.TrimSpace(output[idx+len(marker):]) - if rest == "" { - return "" - } - fields := strings.Fields(rest) - if len(fields) == 0 { - return "" - } - return fields[0] -} - -func extractJSON(output string) string { - trimmed := strings.TrimSpace(output) - if trimmed == "" { - return "" - } - start := strings.IndexAny(trimmed, "[{") - if start == -1 { - return "" - } - end := strings.LastIndexAny(trimmed, "]}") - if end == -1 || end < start { - return "" - } - return trimmed[start : end+1] -} - -type describeResponse struct { - Resource string `json:"resource"` - PerResource map[string]describeTarget `json:"per_resource"` -} - -type describeTarget struct { - Tombstone string `json:"tombstone"` - Container []describeContainer `json:"container"` -} - -type describeContainer struct { - Status string `json:"status"` - TerminatedAt string `json:"terminated_at"` -} - -func instanceStopped(output string) bool { - jsonBlob := extractJSON(output) - if jsonBlob == "" { - return false - } - var payload []describeResponse - if err := json.Unmarshal([]byte(jsonBlob), &payload); err != nil { - return false - } - if len(payload) == 0 { - return false - } - for _, entry := range payload { - for _, target := range entry.PerResource { - if target.Tombstone != "" { - return true - } - if len(target.Container) == 0 { - continue - } - for _, container := range target.Container { - if container.Status != "stopped" && container.TerminatedAt == "" { - return false - } - } - } - } - return true -} - -func (d *Dispatcher) waitForInstanceStop(ctx context.Context, runnerName, instanceID string, timeout time.Duration) bool { - if timeout <= 0 { - timeout = d.opts.DefaultDuration - } - deadline := time.Now().Add(timeout) - ticker := time.NewTicker(10 * time.Second) - defer ticker.Stop() - - for { - stopped, err := d.checkInstanceStopped(ctx, instanceID) - if err != nil { - d.log.Warn("runner stop check failed", "runner", runnerName, "instance", instanceID, "err", err) - return false - } - if stopped { - return true - } - if time.Now().After(deadline) { - return false - } - select { - case <-ctx.Done(): - return false - case <-ticker.C: - } - } -} - -func (d *Dispatcher) checkInstanceStopped(ctx context.Context, instanceID string) (bool, error) { - cmd := exec.CommandContext(ctx, d.opts.BinaryPath, "describe", "--output", "json", instanceID) - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - if err := cmd.Run(); err != nil { - output := strings.ToLower(buf.String()) - if strings.Contains(output, "destroyed") || strings.Contains(output, "not found") { - return true, nil - } - return false, fmt.Errorf("nsc describe failed: %w\n%s", err, strings.TrimSpace(buf.String())) - } - return instanceStopped(buf.String()), nil -} - -func (d *Dispatcher) destroyInstance(ctx context.Context, runnerName, instanceID string) { - cmd := exec.CommandContext(ctx, d.opts.BinaryPath, "destroy", "--force", instanceID) - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - if err := cmd.Run(); err != nil { - d.log.Warn("runner destroy failed", "runner", runnerName, "instance", instanceID, "err", err, "output", strings.TrimSpace(buf.String())) - return - } - if output := strings.TrimSpace(buf.String()); output != "" { - d.log.Info("runner destroyed", "runner", runnerName, "instance", instanceID, "output", output) - } else { - d.log.Info("runner destroyed", "runner", runnerName, "instance", instanceID) - } -} - -func choose(values ...string) string { - for _, v := range values { - if strings.TrimSpace(v) != "" { - return v - } - } - return "" -} - -func (d *Dispatcher) bootstrapScript() string { - var builder strings.Builder - builder.WriteString(`set -euo pipefail -mkdir -p "${FORGEJO_RUNNER_WORKDIR:-/tmp/forgejo-runner}" -cd "${FORGEJO_RUNNER_WORKDIR:-/tmp/forgejo-runner}" - -if ! command -v node >/dev/null 2>&1; then - apk add --no-cache nodejs npm >/dev/null -fi -if ! command -v sudo >/dev/null 2>&1; then - apk add --no-cache sudo bash >/dev/null -fi -if ! command -v curl >/dev/null 2>&1; then - apk add --no-cache curl >/dev/null -fi -if ! command -v xz >/dev/null 2>&1; then - apk add --no-cache xz >/dev/null -fi -export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" -node --version >/dev/null - -cat > runner.yaml <<'EOF' -log: - level: info -runner: - file: .runner - capacity: 1 - name: ${FORGEJO_RUNNER_NAME} - labels: -EOF -`) - builder.WriteString(`runner_exec="${FORGEJO_RUNNER_EXEC:-host}" -if [ "$runner_exec" = "shell" ]; then - runner_exec="host" -fi - -resolved_labels="" -for label in ${FORGEJO_RUNNER_LABELS//,/ } ; do - if [ -z "${label}" ]; then - continue - fi - case "${label}" in - *:*) resolved="${label}" ;; - *) - if [ "$runner_exec" = "host" ]; then - resolved="${label}:host" - else - resolved="${label}:${runner_exec}" - fi - ;; - esac - echo " - ${resolved}" >> runner.yaml - if [ -z "${resolved_labels}" ]; then - resolved_labels="${resolved}" - else - resolved_labels="${resolved_labels},${resolved}" - fi -done -`) - builder.WriteString(`cat >> runner.yaml <<'EOF' -cache: - enabled: false -EOF - -forgejo-runner register \ - --no-interactive \ - --instance "${FORGEJO_INSTANCE_URL}" \ - --token "${FORGEJO_RUNNER_TOKEN}" \ - --name "${FORGEJO_RUNNER_NAME}" \ - --labels "${resolved_labels}" \ - --config runner.yaml - -runner_mode="${FORGEJO_RUNNER_MODE:-one-job}" -case "$runner_mode" in - one-job) - forgejo-runner one-job --config runner.yaml - ;; - daemon) - forgejo-runner daemon --config runner.yaml - ;; - *) - echo "Unknown FORGEJO_RUNNER_MODE: ${runner_mode}" >&2 - exit 1 - ;; -esac -`) - return builder.String() -} diff --git a/services/forgejo-nsc/internal/nsc/macos.go b/services/forgejo-nsc/internal/nsc/macos.go deleted file mode 100644 index 9bf3837..0000000 --- a/services/forgejo-nsc/internal/nsc/macos.go +++ /dev/null @@ -1,708 +0,0 @@ -package nsc - -import ( - "bytes" - "context" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "io" - "net" - "net/http" - "os" - "strconv" - "strings" - "sync" - "time" - - computev1betaconnect "buf.build/gen/go/namespace/cloud/connectrpc/go/proto/namespace/cloud/compute/v1beta/computev1betaconnect" - computev1beta "buf.build/gen/go/namespace/cloud/protocolbuffers/go/proto/namespace/cloud/compute/v1beta" - stdlib "buf.build/gen/go/namespace/cloud/protocolbuffers/go/proto/namespace/stdlib" - "connectrpc.com/connect" - "golang.org/x/crypto/ssh" - "google.golang.org/protobuf/types/known/timestamppb" -) - -func hasMacOSLabel(labels []string) bool { - for _, label := range labels { - l := strings.TrimSpace(label) - if l == "" { - continue - } - if strings.HasPrefix(l, "namespace-profile-macos-") { - return true - } - } - return false -} - -type lockedBuffer struct { - mu sync.Mutex - b bytes.Buffer -} - -func (lb *lockedBuffer) Write(p []byte) (int, error) { - lb.mu.Lock() - defer lb.mu.Unlock() - return lb.b.Write(p) -} - -func (lb *lockedBuffer) Len() int { - lb.mu.Lock() - defer lb.mu.Unlock() - return lb.b.Len() -} - -func (lb *lockedBuffer) String() string { - lb.mu.Lock() - defer lb.mu.Unlock() - return lb.b.String() -} - -func macosSupportDiskSelectors(baseImageID string) []*stdlib.Label { - id := strings.TrimSpace(baseImageID) - if id == "" { - id = "tahoe" - } - - // Allow specifying selectors directly, e.g. "macos.version=26.x,image.with=xcode-26". - if strings.Contains(id, "=") { - var out []*stdlib.Label - for _, part := range strings.Split(id, ",") { - part = strings.TrimSpace(part) - if part == "" { - continue - } - name, value, ok := strings.Cut(part, "=") - name = strings.TrimSpace(name) - value = strings.TrimSpace(value) - if !ok || name == "" || value == "" { - continue - } - out = append(out, &stdlib.Label{Name: name, Value: value}) - } - if len(out) > 0 { - return out - } - } - - // Human-friendly presets used by burrow config. - switch strings.ToLower(id) { - case "sonoma", "macos-14", "macos14", "14": - return []*stdlib.Label{{Name: "macos.version", Value: "14.x"}} - case "sequoia", "macos-15", "macos15", "15": - return []*stdlib.Label{{Name: "macos.version", Value: "15.x"}} - case "tahoe", "macos-26", "macos26", "26": - // Constrain to the Xcode 26 support disk explicitly, since Apple builds - // depend on Xcode being present and Compute currently errors if it can't - // resolve a support disk selection. - return []*stdlib.Label{{Name: "macos.version", Value: "26.x"}, {Name: "image.with", Value: "xcode-26"}} - default: - return []*stdlib.Label{{Name: "macos.version", Value: "26.x"}} - } -} - -func macosComputeBaseImageID(baseImageID string) string { - id := strings.TrimSpace(baseImageID) - if id == "" { - return "tahoe" - } - // If selectors were provided directly, we cannot safely infer a canonical - // base image ID from them. - if strings.Contains(id, "=") { - return "" - } - switch strings.ToLower(id) { - case "sonoma", "macos-14", "macos14", "14": - return "sonoma" - case "sequoia", "macos-15", "macos15", "15": - return "sequoia" - case "tahoe", "macos-26", "macos26", "26": - return "tahoe" - default: - return id - } -} - -type nscBearerTokenFile struct { - BearerToken string `json:"bearer_token"` -} - -func readNSCBearerToken() (string, error) { - path := os.Getenv("NSC_TOKEN_FILE") - if path == "" { - return "", errors.New("NSC_TOKEN_FILE is required for macos runners") - } - raw, err := os.ReadFile(path) - if err != nil { - return "", fmt.Errorf("read NSC_TOKEN_FILE: %w", err) - } - trimmed := strings.TrimSpace(string(raw)) - if trimmed == "" { - return "", errors.New("NSC_TOKEN_FILE is empty") - } - // Support the on-host format used by burrow: {"bearer_token":"..."}. - var parsed nscBearerTokenFile - if err := json.Unmarshal([]byte(trimmed), &parsed); err == nil && parsed.BearerToken != "" { - return parsed.BearerToken, nil - } - // Fallback: allow a raw bearer token. - return trimmed, nil -} - -func parseMachineTypeCPUxMemGB(machineType string) (vcpu int32, memoryMB int32, err error) { - parts := strings.Split(machineType, "x") - if len(parts) != 2 { - return 0, 0, fmt.Errorf("invalid machine_type %q: expected CPUxMemoryGB (e.g. 12x28)", machineType) - } - cpu64, err := strconv.ParseInt(parts[0], 10, 32) - if err != nil { - return 0, 0, fmt.Errorf("invalid machine_type %q: cpu: %w", machineType, err) - } - memGB64, err := strconv.ParseInt(parts[1], 10, 32) - if err != nil { - return 0, 0, fmt.Errorf("invalid machine_type %q: memory: %w", machineType, err) - } - return int32(cpu64), int32(memGB64 * 1024), nil -} - -func (d *Dispatcher) launchMacOSRunner(ctx context.Context, runnerName string, req LaunchRequest, ttl time.Duration, machineType string) error { - if machineType == "" { - return errors.New("machine_type is required for macos runners") - } - vcpu, memoryMB, err := parseMachineTypeCPUxMemGB(machineType) - if err != nil { - return err - } - bearer, err := readNSCBearerToken() - if err != nil { - return err - } - - httpClient := &http.Client{Timeout: 60 * time.Second} - client := computev1betaconnect.NewComputeServiceClient(httpClient, d.opts.ComputeBaseURL) - - workdir := d.opts.WorkDir - if strings.TrimSpace(workdir) == "" { - workdir = "/tmp/forgejo-runner" - } - - env := map[string]string{ - "FORGEJO_INSTANCE_URL": req.InstanceURL, - "FORGEJO_RUNNER_TOKEN": req.Token, - "FORGEJO_RUNNER_NAME": runnerName, - "FORGEJO_RUNNER_LABELS": strings.Join(req.Labels, ","), - "FORGEJO_RUNNER_EXEC": d.opts.Executor, - "FORGEJO_RUNNER_WORKDIR": workdir, - } - for k, v := range req.ExtraEnv { - env[k] = v - } - // Best-effort caching: workflows call Scripts/nscloud-cache.sh, which is a - // no-op unless NSC_CACHE_PATH is set. This may still be skipped if spacectl - // lacks credentials, but setting the path is harmless and keeps behavior - // consistent across macOS / Linux runners. - if _, ok := env["NSC_CACHE_PATH"]; !ok { - env["NSC_CACHE_PATH"] = "/Users/runner/.cache/nscloud" - } - - deadline := timestamppb.New(time.Now().Add(ttl)) - - createReq := &computev1beta.CreateInstanceRequest{ - Shape: &computev1beta.InstanceShape{ - VirtualCpu: vcpu, - MemoryMegabytes: memoryMB, - MachineArch: d.opts.MacosMachineArch, - Os: "macos", - // Namespace macOS compute requires selectors to pick the base image - // ("support disk"), otherwise instance creation fails. - Selectors: macosSupportDiskSelectors(d.opts.MacosBaseImageID), - }, - DocumentedPurpose: fmt.Sprintf("burrow forgejo runner %s", runnerName), - Deadline: deadline, - Labels: []*stdlib.Label{ - {Name: "nsc.source", Value: "forgejo-nsc"}, - {Name: "burrow.service", Value: "forgejo-runner"}, - {Name: "burrow.runner", Value: runnerName}, - }, - Applications: []*computev1beta.ApplicationRequest{ - { - Name: "forgejo-runner", - Command: "/bin/bash", - Args: []string{"-lc", macosBootstrapScript()}, - Environment: env, - WorkloadType: computev1beta.ApplicationRequest_JOB, - }, - }, - } - if imageID := macosComputeBaseImageID(d.opts.MacosBaseImageID); imageID != "" { - createReq.Experimental = &computev1beta.CreateInstanceRequest_ExperimentalFeatures{ - MacosBaseImageId: imageID, - } - } - - d.log.Info("launching Namespace macos runner", - "runner", runnerName, - "compute_base_url", d.opts.ComputeBaseURL, - "macos_base_image_id", d.opts.MacosBaseImageID, - "shape", fmt.Sprintf("%dx%d", vcpu, memoryMB/1024), - "arch", d.opts.MacosMachineArch, - ) - - reqCreate := connect.NewRequest(createReq) - reqCreate.Header().Set("Authorization", "Bearer "+bearer) - resp, err := client.CreateInstance(ctx, reqCreate) - if err != nil { - return fmt.Errorf("compute create instance failed: %w", err) - } - if resp.Msg == nil || resp.Msg.Metadata == nil { - return errors.New("compute create instance returned no metadata") - } - instanceID := resp.Msg.Metadata.InstanceId - - waitErr := d.waitForMacOSRunnerStop(ctx, client, bearer, runnerName, instanceID, ttl) - d.destroyComputeInstance(context.Background(), client, bearer, runnerName, instanceID) - return waitErr -} - -func (d *Dispatcher) runMacOSComputeSSHScript(ctx context.Context, runnerName, instanceID, script string) error { - bearer, err := readNSCBearerToken() - if err != nil { - return err - } - - httpClient := &http.Client{Timeout: 60 * time.Second} - client := computev1betaconnect.NewComputeServiceClient(httpClient, d.opts.ComputeBaseURL) - - getReq := connect.NewRequest(&computev1beta.GetSSHConfigRequest{ - InstanceId: instanceID, - // TargetContainer is optional. Keep it empty to run commands in the default instance environment. - }) - getReq.Header().Set("Authorization", "Bearer "+bearer) - - resp, err := client.GetSSHConfig(ctx, getReq) - if err != nil { - return fmt.Errorf("compute get ssh config failed: %w", err) - } - if resp.Msg == nil { - return errors.New("compute get ssh config returned empty response") - } - if resp.Msg.Endpoint == "" { - return errors.New("compute get ssh config returned empty endpoint") - } - if len(resp.Msg.SshPrivateKey) == 0 { - return errors.New("compute get ssh config returned empty ssh private key") - } - if strings.TrimSpace(resp.Msg.Username) == "" { - return errors.New("compute get ssh config returned empty username") - } - - signer, err := ssh.ParsePrivateKey(resp.Msg.SshPrivateKey) - if err != nil { - return fmt.Errorf("parse ssh private key: %w", err) - } - - addr := fmt.Sprintf("%s:22", resp.Msg.Endpoint) - conn, err := net.Dial("tcp", addr) - if err != nil { - return fmt.Errorf("dial ssh endpoint: %w", err) - } - defer conn.Close() - - sshCfg := &ssh.ClientConfig{ - User: resp.Msg.Username, - Auth: []ssh.AuthMethod{ssh.PublicKeys(signer)}, - HostKeyCallback: ssh.InsecureIgnoreHostKey(), // Endpoint is short-lived and key is delivered out-of-band. - Timeout: 30 * time.Second, - } - - c, chans, reqs, err := ssh.NewClientConn(conn, addr, sshCfg) - if err != nil { - return fmt.Errorf("ssh client conn: %w", err) - } - clientSSH := ssh.NewClient(c, chans, reqs) - defer clientSSH.Close() - - session, err := clientSSH.NewSession() - if err != nil { - return fmt.Errorf("ssh new session: %w", err) - } - defer session.Close() - - var buf bytes.Buffer - session.Stdout = &buf - session.Stderr = &buf - session.Stdin = strings.NewReader(script) - - // Feed the bootstrap script via stdin so we don't need to quote/escape it. - // - // Note: Some SSH servers do not reliably parse exec strings with arguments. - // Running bare `/bin/bash` still reads from stdin and avoids argument parsing. - if err := session.Run("/bin/bash"); err != nil { - outRaw := buf.String() - out := strings.TrimSpace(outRaw) - - // Some SSH servers reject exec requests and only allow interactive shells, - // and others will "succeed" but still interpret stdin under the default - // login shell (showing the zsh banner / prompts). - // - // In those cases, retry via Shell() with a PTY. - exitStatus := 0 - exitErr, isExitErr := err.(*ssh.ExitError) - if isExitErr { - exitStatus = exitErr.ExitStatus() - } - - looksInteractive := strings.Contains(outRaw, "The default interactive shell is now zsh") || - strings.Contains(outRaw, " runner$ ") || - strings.Contains(outRaw, "bash-3.2$") - shouldFallback := !isExitErr || looksInteractive - - if shouldFallback { - d.log.Warn("compute ssh exec bootstrap failed; retrying via interactive shell", - "runner", runnerName, - "instance", instanceID, - "exit_status", exitStatus, - ) - - session2, err2 := clientSSH.NewSession() - if err2 != nil { - return fmt.Errorf("ssh new session (fallback): %w", err2) - } - defer session2.Close() - - // bytes.Buffer isn't safe for concurrent writes + reads; the SSH session - // writes from background goroutines. Wrap it so we can poll for a prompt - // before sending commands. - lb := &lockedBuffer{} - session2.Stdout = lb - session2.Stderr = lb - - in, err2 := session2.StdinPipe() - if err2 != nil { - return fmt.Errorf("ssh stdin pipe (fallback): %w", err2) - } - - // Request a PTY to match interactive semantics even when the caller - // doesn't have a local terminal. - _ = session2.RequestPty("xterm", 24, 80, nil) - - if err2 := session2.Shell(); err2 != nil { - return fmt.Errorf("ssh shell (fallback): %w", err2) - } - - // Wait briefly for the prompt/banner so the first command isn't dropped. - // We also emit a sentinel `echo` to verify the TTY is live. - deadline := time.Now().Add(3 * time.Second) - for time.Now().Before(deadline) { - n := lb.Len() - if n > 0 { - break - } - time.Sleep(50 * time.Millisecond) - } - - // Stream the script then exit. Prefer LF line endings; macOS shells and - // PTYs can treat CRLF as literal CR characters (breaking heredoc - // delimiters and quoting). - writeTTY := func(s string) { - if s == "" { - return - } - s = strings.ReplaceAll(s, "\r\n", "\n") - _, _ = io.WriteString(in, s) - } - - scriptTTY := strings.ReplaceAll(script, "\r\n", "\n") - - // Cut down noise in logs and reduce the chance of ZSH line-editing - // behavior corrupting long inputs. - writeTTY("stty -echo 2>/dev/null || true\n") - writeTTY("echo BURROW_BOOTSTRAP_TTY_OK\n") - - // Avoid heredocs for the script itself (PTY newline handling is fragile). - // Instead, stream base64 in short chunks to a file, then decode and run it. - enc := base64.StdEncoding.EncodeToString([]byte(scriptTTY)) - idSafe := strings.ReplaceAll(instanceID, "-", "_") - b64Path := "/tmp/burrow-bootstrap-" + idSafe + ".b64" - shPath := "/tmp/burrow-bootstrap-" + idSafe + ".sh" - - writeTTY("rm -f " + b64Path + " " + shPath + "\n") - writeTTY(": > " + b64Path + "\n") - - const chunkSize = 80 - for i := 0; i < len(enc); i += chunkSize { - j := i + chunkSize - if j > len(enc) { - j = len(enc) - } - chunk := enc[i:j] - // Base64 chunks contain only [A-Za-z0-9+/=], which are safe to pass - // unquoted. Avoid quotes entirely so a truncated line can't leave - // the remote shell in a multi-line continuation state. - writeTTY("printf %s " + chunk + " >> " + b64Path + "\n") - time.Sleep(5 * time.Millisecond) - } - - // macOS uses `base64 -D` (BSD), some environments use `-d` (GNU). - writeTTY("base64 -D " + b64Path + " > " + shPath + " 2>/dev/null || base64 -d " + b64Path + " > " + shPath + "\n") - writeTTY("/bin/bash " + shPath + "\n") - writeTTY("exit\n") - _ = in.Close() - - if err2 := session2.Wait(); err2 != nil { - out2 := strings.TrimSpace(lb.String()) - if len(out2) > 16*1024 { - out2 = out2[len(out2)-16*1024:] - } - return fmt.Errorf("compute ssh runner bootstrap failed (shell fallback): %w\n%s", err2, out2) - } - - d.log.Info("macos runner bootstrap completed via compute ssh shell", "runner", runnerName, "instance", instanceID) - return nil - } - - if len(out) > 16*1024 { - out = out[len(out)-16*1024:] - } - return fmt.Errorf("compute ssh runner bootstrap failed: %w\n%s", err, out) - } - - d.log.Info("macos runner bootstrap completed via compute ssh", "runner", runnerName, "instance", instanceID) - return nil -} - -func (d *Dispatcher) waitForMacOSRunnerStop(ctx context.Context, client computev1betaconnect.ComputeServiceClient, bearer, runnerName, instanceID string, ttl time.Duration) error { - if ttl <= 0 { - ttl = d.opts.DefaultDuration - } - deadline := time.Now().Add(ttl) - ticker := time.NewTicker(15 * time.Second) - defer ticker.Stop() - - for { - stopped, err := d.checkComputeInstanceStopped(ctx, client, bearer, instanceID) - if err != nil { - d.log.Warn("macos runner stop check failed", "runner", runnerName, "instance", instanceID, "err", err) - } else if stopped { - return nil - } - - if time.Now().After(deadline) { - return fmt.Errorf("macos runner exceeded ttl (%s) without stopping", ttl) - } - select { - case <-ctx.Done(): - return ctx.Err() - case <-ticker.C: - } - } -} - -func (d *Dispatcher) checkComputeInstanceStopped(ctx context.Context, client computev1betaconnect.ComputeServiceClient, bearer, instanceID string) (bool, error) { - describeReq := connect.NewRequest(&computev1beta.DescribeInstanceRequest{InstanceId: instanceID}) - describeReq.Header().Set("Authorization", "Bearer "+bearer) - resp, err := client.DescribeInstance(ctx, describeReq) - if err != nil { - // NotFound means the instance is already gone. - if connect.CodeOf(err) == connect.CodeNotFound { - return true, nil - } - return false, err - } - if resp.Msg == nil || resp.Msg.Metadata == nil { - return false, errors.New("describe instance returned no metadata") - } - switch resp.Msg.Metadata.Status { - case computev1beta.InstanceMetadata_DESTROYED: - return true, nil - case computev1beta.InstanceMetadata_ERROR: - // Best-effort include shutdown reasons; do not include unbounded output. - var b strings.Builder - for _, reason := range resp.Msg.ShutdownReasons { - if reason == nil { - continue - } - if b.Len() > 0 { - b.WriteString("; ") - } - b.WriteString(reason.String()) - if b.Len() > 1024 { - break - } - } - msg := strings.TrimSpace(b.String()) - if msg == "" { - msg = "unknown shutdown reason" - } - return true, fmt.Errorf("instance entered error state: %s", msg) - default: - if resp.Msg.Metadata.DestroyedAt != nil { - return true, nil - } - return false, nil - } -} - -func (d *Dispatcher) destroyComputeInstance(ctx context.Context, client computev1betaconnect.ComputeServiceClient, bearer, runnerName, instanceID string) { - if ctx == nil { - ctx = context.Background() - } - ctx, cancel := context.WithTimeout(ctx, 30*time.Second) - defer cancel() - - destroyReq := connect.NewRequest(&computev1beta.DestroyInstanceRequest{InstanceId: instanceID}) - destroyReq.Header().Set("Authorization", "Bearer "+bearer) - if _, err := client.DestroyInstance(ctx, destroyReq); err != nil { - if connect.CodeOf(err) == connect.CodeNotFound { - d.log.Info("macos runner destroyed", "runner", runnerName, "instance", instanceID, "status", "not_found") - return - } - d.log.Warn("macos runner destroy failed", "runner", runnerName, "instance", instanceID, "err", err) - return - } - d.log.Info("macos runner destroyed", "runner", runnerName, "instance", instanceID) -} - -func macosBootstrapScript() string { - // Keep this script self-contained: it runs on a fresh macOS VM base image. - var b strings.Builder - b.WriteString(`set -euo pipefail - -workdir="${FORGEJO_RUNNER_WORKDIR:-/tmp/forgejo-runner}" -mkdir -p "${workdir}" -cd "${workdir}" - -export PATH="/usr/local/bin:/opt/homebrew/bin:/usr/bin:/bin:/usr/sbin:/sbin:${PATH}" - -if ! command -v curl >/dev/null 2>&1; then - echo "curl is required" >&2 - exit 1 -fi - -if ! command -v nix >/dev/null 2>&1; then - echo "Installing nix (Determinate Systems installer)..." - installer="/tmp/nix-installer.$$" - curl -fsSL -o "${installer}" https://install.determinate.systems/nix - chmod +x "${installer}" - - if command -v sudo >/dev/null 2>&1; then - if sudo -n true 2>/dev/null; then - sudo -n sh "${installer}" install --no-confirm - else - sudo sh "${installer}" install --no-confirm - fi - else - sh "${installer}" install --no-confirm - fi - - rm -f "${installer}" -fi - -if [[ -f /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh ]]; then - # shellcheck disable=SC1091 - . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh -fi - -export PATH="/nix/var/nix/profiles/default/bin:/nix/var/nix/profiles/default/sbin:${PATH}" - -# Flake builds need nix-command + flakes enabled. Workflows may layer additional -# config, but ensure a sane default exists. -mkdir -p "${XDG_CONFIG_HOME:-$HOME/.config}/nix" -cat > "${XDG_CONFIG_HOME:-$HOME/.config}/nix/nix.conf" <<'EOF' -experimental-features = nix-command flakes -sandbox = true -fallback = true -substituters = https://cache.nixos.org -trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= -EOF - -mkdir -p bin -export PATH="${PWD}/bin:${PATH}" - -runner_version="v12.6.4" -runner_src_tgz="forgejo-runner-${runner_version}.tar.gz" -runner_src_url="https://code.forgejo.org/forgejo/runner/archive/${runner_version}.tar.gz" -runner_src_dir="forgejo-runner-src" - -if ! command -v forgejo-runner >/dev/null 2>&1; then - rm -rf "${runner_src_dir}" - mkdir -p "${runner_src_dir}" - curl -fsSL "${runner_src_url}" -o "${runner_src_tgz}" - tar -xzf "${runner_src_tgz}" -C "${runner_src_dir}" --strip-components=1 - - toolchain="$(grep -E '^toolchain ' "${runner_src_dir}/go.mod" | awk '{print $2}' | head -n 1 || true)" - if [ -z "${toolchain}" ]; then - toolchain="go1.25.7" - fi - - if ! command -v go >/dev/null 2>&1; then - go_tgz="${toolchain}.darwin-arm64.tar.gz" - go_url="https://go.dev/dl/${go_tgz}" - curl -fsSL "${go_url}" -o "${go_tgz}" - tar -xzf "${go_tgz}" - export GOROOT="${PWD}/go" - export PATH="${GOROOT}/bin:${PATH}" - fi - - export GOPATH="${PWD}/.gopath" - export GOMODCACHE="${PWD}/.gomodcache" - export GOCACHE="${PWD}/.gocache" - mkdir -p "${GOPATH}" "${GOMODCACHE}" "${GOCACHE}" - - (cd "${runner_src_dir}" && go build -o "${workdir}/bin/forgejo-runner" .) - chmod +x "${workdir}/bin/forgejo-runner" -fi - -cat > runner.yaml <<'EOF' -log: - level: info -runner: - file: .runner - capacity: 1 - name: ${FORGEJO_RUNNER_NAME} - labels: -EOF - -runner_exec="${FORGEJO_RUNNER_EXEC:-host}" -if [ "$runner_exec" = "shell" ]; then - runner_exec="host" -fi - -resolved_labels="" -for label in ${FORGEJO_RUNNER_LABELS//,/ } ; do - if [ -z "${label}" ]; then - continue - fi - case "${label}" in - *:*) resolved="${label}" ;; - *) - resolved="${label}:host" - ;; - esac - echo " - ${resolved}" >> runner.yaml - if [ -z "${resolved_labels}" ]; then - resolved_labels="${resolved}" - else - resolved_labels="${resolved_labels},${resolved}" - fi -done - -cat >> runner.yaml <<'EOF' -cache: - enabled: false -EOF - -forgejo-runner register \ - --no-interactive \ - --instance "${FORGEJO_INSTANCE_URL}" \ - --token "${FORGEJO_RUNNER_TOKEN}" \ - --name "${FORGEJO_RUNNER_NAME}" \ - --labels "${resolved_labels}" \ - --config runner.yaml - -forgejo-runner one-job --config runner.yaml -`) - return b.String() -} diff --git a/services/forgejo-nsc/internal/nsc/macos_nsc.go b/services/forgejo-nsc/internal/nsc/macos_nsc.go deleted file mode 100644 index c22fadb..0000000 --- a/services/forgejo-nsc/internal/nsc/macos_nsc.go +++ /dev/null @@ -1,373 +0,0 @@ -package nsc - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "net/url" - "os" - "os/exec" - "path/filepath" - "strings" - "time" -) - -func normalizeMacOSNSCMachineType(machineType string) (normalized string, changed bool, err error) { - vcpu, memoryMB, err := parseMachineTypeCPUxMemGB(machineType) - if err != nil { - return "", false, err - } - memGB := memoryMB / 1024 - if memGB <= 0 || vcpu <= 0 { - return "", false, fmt.Errorf("invalid machine_type %q after parse: vcpu=%d memGB=%d", machineType, vcpu, memGB) - } - - // NSC CLI (and the underlying InstanceService) enforce discrete cpu/mem sets - // for macOS. Normalize requested values by rounding up to the closest allowed - // values to keep provisioning stable even when configs drift. - // - // Observed allowed sets from Namespace API error output for macos/arm64: - // cpu: [4 6 8 12] - // mem: [7 14 28 56] (GB) - allowedCPU := []int32{4, 6, 8, 12} - allowedMemGB := []int32{7, 14, 28, 56} - - roundUp := func(v int32, allowed []int32) (int32, bool) { - for _, a := range allowed { - if v <= a { - return a, a != v - } - } - // Clamp to max if above all allowed values. - return allowed[len(allowed)-1], true - } - - newCPU, cpuChanged := roundUp(vcpu, allowedCPU) - newMemGB, memChanged := roundUp(memGB, allowedMemGB) - - normalized = fmt.Sprintf("%dx%d", newCPU, newMemGB) - changed = cpuChanged || memChanged - return normalized, changed, nil -} - -func (d *Dispatcher) launchMacOSRunnerViaNSC(ctx context.Context, runnerName string, req LaunchRequest, ttl time.Duration, machineType string) error { - if machineType == "" { - return errors.New("machine_type is required for macos runners") - } - if strings.TrimSpace(os.Getenv("NSC_TOKEN_FILE")) == "" { - // The Burrow forge host feeds NSC_TOKEN_FILE from the intake-backed runtime token. - return errors.New("NSC_TOKEN_FILE is required for macos runners") - } - - selectors := macosSelectorsArg(d.opts.MacosBaseImageID) - if selectors == "" { - return errors.New("macos selectors resolved empty") - } - - normalizedMachineType := machineType - if n, changed, err := normalizeMacOSNSCMachineType(machineType); err != nil { - return err - } else if changed { - normalizedMachineType = n - } - - // If capacity is constrained for the requested (large) shape, try a small - // set of progressively smaller shapes before failing the dispatch request. - // This keeps macOS builds flowing even when large runners are scarce. - candidates := []string{normalizedMachineType, "8x28", "6x14", "4x7"} - seen := map[string]struct{}{} - var uniq []string - for _, c := range candidates { - c = strings.TrimSpace(c) - if c == "" { - continue - } - if _, ok := seen[c]; ok { - continue - } - seen[c] = struct{}{} - uniq = append(uniq, c) - } - candidates = uniq - - type attemptCfg struct { - waitTimeout time.Duration - createTimeout time.Duration - } - attempts := []attemptCfg{ - {waitTimeout: 6 * time.Minute, createTimeout: 8 * time.Minute}, - {waitTimeout: 4 * time.Minute, createTimeout: 6 * time.Minute}, - {waitTimeout: 3 * time.Minute, createTimeout: 5 * time.Minute}, - } - - createInstance := func(mt string, a attemptCfg) (instanceID string, out string, err error) { - tmpDir, err := os.MkdirTemp("", "forgejo-nsc-macos-*") - if err != nil { - return "", "", fmt.Errorf("mktemp: %w", err) - } - defer os.RemoveAll(tmpDir) - - metaPath := filepath.Join(tmpDir, "create.json") - cidPath := filepath.Join(tmpDir, "create.cid") - - arch := strings.TrimSpace(d.opts.MacosMachineArch) - if arch == "" { - arch = "arm64" - } - // Namespace CLI requires the "os/arch:" prefix to create a macOS instance. - // Without it, `nsc create` defaults to Linux even if selectors include macos.*. - machineType := fmt.Sprintf("macos/%s:%s", arch, mt) - - args := []string{ - "create", - "--duration", ttl.String(), - "--machine_type", machineType, - "--selectors", selectors, - "--bare", - "--cidfile", cidPath, - "--log_actions", - "--purpose", fmt.Sprintf("burrow forgejo runner %s", runnerName), - // Prefer plain output for debuggability (progress, capacity errors, etc). - "--output", "plain", - "--output_json_to", metaPath, - // macOS instances can take a while to become ready. - "--wait_timeout", a.waitTimeout.String(), - } - args = prependNSCRegionArgs(args, d.opts.ComputeBaseURL) - - createCtx, cancel := context.WithTimeout(ctx, a.createTimeout) - defer cancel() - - cmd := exec.CommandContext(createCtx, d.opts.BinaryPath, args...) - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - - if err := cmd.Run(); err != nil { - // Best-effort cleanup: if the instance ID was written before the command failed - // (or before we timed it out), attempt to destroy it to avoid idling machines. - if instanceID := strings.TrimSpace(mustReadFile(cidPath)); instanceID != "" { - d.destroyNSCInstance(context.Background(), runnerName, instanceID) - } - if errors.Is(createCtx.Err(), context.DeadlineExceeded) { - return "", buf.String(), fmt.Errorf("nsc create timed out after %s", a.createTimeout) - } - return "", buf.String(), fmt.Errorf("nsc create failed: %w", err) - } - - instanceID, err = readNSCCreateInstanceID(metaPath) - if err != nil { - return "", buf.String(), fmt.Errorf("nsc create output parse failed: %w", err) - } - if instanceID == "" { - return "", buf.String(), fmt.Errorf("nsc create returned empty instance id") - } - return instanceID, buf.String(), nil - } - - var ( - instanceID string - lastOut string - lastErr error - ) - for i, mt := range candidates { - a := attempts[i] - if i >= len(attempts) { - a = attempts[len(attempts)-1] - } - - d.log.Info("launching Namespace macos runner via nsc", - "runner", runnerName, - "attempt", i+1, - "machine_type", mt, - "requested_machine_type", machineType, - "selectors", selectors, - ) - - id, out, err := createInstance(mt, a) - lastOut = out - lastErr = err - if err != nil { - // Timeouts are treated as retryable (capacity constrained). - if strings.Contains(err.Error(), "timed out") || strings.Contains(strings.ToLower(out), "capacity") { - continue - } - return fmt.Errorf("%w\n%s", err, out) - } - instanceID = id - break - } - if instanceID == "" { - if lastErr != nil { - return fmt.Errorf("%w\n%s", lastErr, lastOut) - } - return fmt.Errorf("nsc create failed without producing an instance id\n%s", lastOut) - } - - // Always attempt cleanup even if the runner fails. - defer d.destroyNSCInstance(context.Background(), runnerName, instanceID) - - script := macosBootstrapWrapperScript(runnerName, req, d.opts.Executor, d.opts.WorkDir) - // Use the Compute SSH config endpoint (direct TCP) instead of `nsc ssh`, which - // relies on a websocket-based SSH proxy that is not supported by the - // revokable tenant token we run the dispatcher with. - if err := d.runMacOSComputeSSHScript(ctx, runnerName, instanceID, script); err != nil { - return err - } - return nil -} - -func mustReadFile(path string) string { - raw, err := os.ReadFile(path) - if err != nil { - return "" - } - return string(raw) -} - -func macosSelectorsArg(baseImageID string) string { - id := strings.TrimSpace(baseImageID) - if id == "" { - id = "tahoe" - } - // Allow passing selectors directly via config, e.g. "macos.version=26.x,image.with=xcode-26". - if strings.Contains(id, "=") { - return id - } - switch strings.ToLower(id) { - case "sonoma", "macos-14", "macos14", "14": - return "macos.version=14.x" - case "sequoia", "macos-15", "macos15", "15": - return "macos.version=15.x" - case "tahoe", "macos-26", "macos26", "26": - return "macos.version=26.x,image.with=xcode-26" - default: - return "macos.version=26.x" - } -} - -type nscCreateMetadata struct { - InstanceID string `json:"instance_id"` - ClusterID string `json:"cluster_id"` - ID string `json:"id"` -} - -func readNSCCreateInstanceID(path string) (string, error) { - raw, err := os.ReadFile(path) - if err != nil { - return "", fmt.Errorf("read %s: %w", path, err) - } - var meta nscCreateMetadata - if err := json.Unmarshal(raw, &meta); err != nil { - return "", err - } - if meta.InstanceID != "" { - return meta.InstanceID, nil - } - if meta.ClusterID != "" { - return meta.ClusterID, nil - } - if meta.ID != "" { - return meta.ID, nil - } - return "", nil -} - -func (d *Dispatcher) destroyNSCInstance(ctx context.Context, runnerName, instanceID string) { - if ctx == nil { - ctx = context.Background() - } - ctx, cancel := context.WithTimeout(ctx, 2*time.Minute) - defer cancel() - - args := []string{"destroy", "--force", instanceID} - args = prependNSCRegionArgs(args, d.opts.ComputeBaseURL) - cmd := exec.CommandContext(ctx, d.opts.BinaryPath, args...) - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - if err := cmd.Run(); err != nil { - d.log.Warn("nsc destroy failed", "runner", runnerName, "instance", instanceID, "err", err, "output", strings.TrimSpace(buf.String())) - return - } - d.log.Info("nsc instance destroyed", "runner", runnerName, "instance", instanceID) -} - -func macosBootstrapWrapperScript(runnerName string, req LaunchRequest, executor, workdir string) string { - if strings.TrimSpace(workdir) == "" { - workdir = "/tmp/forgejo-runner" - } - - // Pass all values via stdin script so secrets do not appear in the nsc ssh argv. - env := map[string]string{ - "FORGEJO_INSTANCE_URL": req.InstanceURL, - "FORGEJO_RUNNER_TOKEN": req.Token, - "FORGEJO_RUNNER_NAME": runnerName, - "FORGEJO_RUNNER_LABELS": strings.Join(req.Labels, ","), - "FORGEJO_RUNNER_EXEC": executor, - "FORGEJO_RUNNER_WORKDIR": workdir, - } - for k, v := range req.ExtraEnv { - env[k] = v - } - - var b strings.Builder - b.WriteString("set -euo pipefail\n") - for k, v := range env { - if strings.TrimSpace(k) == "" { - continue - } - // Single-quote shell escaping: safe for arbitrary tokens. - b.WriteString("export ") - b.WriteString(k) - b.WriteString("=") - b.WriteString(shellSingleQuote(v)) - b.WriteString("\n") - } - b.WriteString("\n") - b.WriteString(macosBootstrapScript()) - return b.String() -} - -func shellSingleQuote(value string) string { - // 'foo' -> '\'' within single quotes: '"'"' - return "'" + strings.ReplaceAll(value, "'", `'\"'\"'`) + "'" -} - -func prependNSCRegionArgs(args []string, computeBaseURL string) []string { - region := strings.TrimSpace(os.Getenv("NSC_REGION")) - if region == "" { - region = regionFromComputeBaseURL(computeBaseURL) - } - if region == "" { - // Default to the burrow region used for other Namespace integrations. - region = "ord4" - } - return append([]string{"--region", region}, args...) -} - -func regionFromComputeBaseURL(raw string) string { - raw = strings.TrimSpace(raw) - if raw == "" { - return "" - } - u, err := url.Parse(raw) - if err != nil { - return "" - } - host := u.Hostname() - if host == "" { - return "" - } - parts := strings.Split(host, ".") - if len(parts) == 0 { - return "" - } - // ord4.compute.namespaceapis.com -> ord4 - if strings.HasSuffix(host, ".compute.namespaceapis.com") || strings.Contains(host, ".compute.") { - return parts[0] - } - return "" -} diff --git a/services/forgejo-nsc/internal/nsc/windows.go b/services/forgejo-nsc/internal/nsc/windows.go deleted file mode 100644 index 5c82d29..0000000 --- a/services/forgejo-nsc/internal/nsc/windows.go +++ /dev/null @@ -1,59 +0,0 @@ -package nsc - -import ( - "regexp" - "strings" -) - -const windowsDefaultMachineType = "windows/amd64:8x16" - -var cpuMemShapePattern = regexp.MustCompile(`^\d+x\d+$`) - -func hasWindowsLabel(labels []string) bool { - for _, label := range labels { - l := strings.TrimSpace(label) - if l == "" { - continue - } - base := l - if before, _, ok := strings.Cut(l, ":"); ok { - base = before - } - if strings.HasPrefix(base, "namespace-profile-windows-") { - return true - } - } - return false -} - -func normalizeWindowsMachineType(machineType string, labels []string) string { - mt := strings.TrimSpace(machineType) - if strings.HasPrefix(mt, "windows/") { - return mt - } - if cpuMemShapePattern.MatchString(mt) { - return "windows/amd64:" + mt - } - - // Label-derived defaults: keep a simple shape ladder for explicit profile sizes. - for _, label := range labels { - base := strings.TrimSpace(label) - if before, _, ok := strings.Cut(base, ":"); ok { - base = before - } - switch { - case strings.HasPrefix(base, "namespace-profile-windows-small"): - return "windows/amd64:2x4" - case strings.HasPrefix(base, "namespace-profile-windows-medium"): - return "windows/amd64:4x8" - case strings.HasPrefix(base, "namespace-profile-windows-large"): - return windowsDefaultMachineType - } - } - return windowsDefaultMachineType -} - -func powershellSingleQuote(value string) string { - // PowerShell single-quoted string escaping: ' -> '' - return "'" + strings.ReplaceAll(value, "'", "''") + "'" -} diff --git a/services/forgejo-nsc/internal/nsc/windows_test.go b/services/forgejo-nsc/internal/nsc/windows_test.go deleted file mode 100644 index 2f1b5e6..0000000 --- a/services/forgejo-nsc/internal/nsc/windows_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package nsc - -import "testing" - -func TestHasWindowsLabel(t *testing.T) { - t.Parallel() - - cases := []struct { - name string - labels []string - want bool - }{ - { - name: "namespace windows label", - labels: []string{"namespace-profile-windows-large"}, - want: true, - }, - { - name: "namespace windows label with host suffix", - labels: []string{"namespace-profile-windows-large:host"}, - want: true, - }, - { - name: "non namespace windows-like label", - labels: []string{"burrow-winrunner:host"}, - want: false, - }, - { - name: "macos label", - labels: []string{"namespace-profile-macos-large"}, - want: false, - }, - } - - for _, tc := range cases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - got := hasWindowsLabel(tc.labels) - if got != tc.want { - t.Fatalf("hasWindowsLabel(%v) = %v, want %v", tc.labels, got, tc.want) - } - }) - } -} - -func TestNormalizeWindowsMachineType(t *testing.T) { - t.Parallel() - - cases := []struct { - name string - machine string - labels []string - wantPrefix string - }{ - { - name: "explicit windows machine type keeps value", - machine: "windows/amd64:8x16", - labels: []string{"namespace-profile-windows-large"}, - wantPrefix: "windows/amd64:8x16", - }, - { - name: "shape only is normalized", - machine: "4x8", - labels: []string{"namespace-profile-windows-large"}, - wantPrefix: "windows/amd64:4x8", - }, - { - name: "large label default", - machine: "", - labels: []string{"namespace-profile-windows-large"}, - wantPrefix: "windows/amd64:8x16", - }, - { - name: "medium label default", - machine: "", - labels: []string{"namespace-profile-windows-medium"}, - wantPrefix: "windows/amd64:4x8", - }, - { - name: "fallback default", - machine: "", - labels: []string{"namespace-profile-windows-custom"}, - wantPrefix: "windows/amd64:8x16", - }, - } - - for _, tc := range cases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - got := normalizeWindowsMachineType(tc.machine, tc.labels) - if got != tc.wantPrefix { - t.Fatalf("normalizeWindowsMachineType(%q, %v) = %q, want %q", tc.machine, tc.labels, got, tc.wantPrefix) - } - }) - } -} diff --git a/services/forgejo-nsc/internal/nsc/windows_winrm.go b/services/forgejo-nsc/internal/nsc/windows_winrm.go deleted file mode 100644 index 22f13c9..0000000 --- a/services/forgejo-nsc/internal/nsc/windows_winrm.go +++ /dev/null @@ -1,499 +0,0 @@ -package nsc - -import ( - "bufio" - "bytes" - "context" - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "time" -) - -type windowsProxyOutput struct { - Endpoint string `json:"endpoint"` - RDP struct { - Credentials struct { - Username string `json:"username"` - Password string `json:"password"` - } `json:"credentials"` - } `json:"rdp"` -} - -func (d *Dispatcher) launchWindowsRunnerViaWinRM(ctx context.Context, runnerName string, req LaunchRequest, ttl time.Duration, machineType string) error { - script := windowsBootstrapScript(runnerName, req, d.opts.Executor, d.opts.WorkDir) - return d.launchWindowsScriptViaWinRM(ctx, runnerName, ttl, machineType, req.Labels, script) -} - -func (d *Dispatcher) launchWindowsScriptViaWinRM(ctx context.Context, runnerName string, ttl time.Duration, machineType string, labels []string, script string) error { - if ttl <= 0 { - ttl = d.opts.DefaultDuration - } - - mt := normalizeWindowsMachineType(machineType, labels) - instanceID, createOutput, err := d.createWindowsInstance(ctx, runnerName, ttl, mt) - if err != nil { - return fmt.Errorf("windows create failed: %w\n%s", err, createOutput) - } - defer d.destroyNSCInstance(context.Background(), runnerName, instanceID) - - username, password, err := d.resolveWindowsCredentials(ctx, instanceID) - if err != nil { - return err - } - - if err := d.probeWindowsWinRMService(ctx, instanceID); err != nil { - return err - } - - endpoint, stopForward, err := d.startWindowsWinRMPortForward(ctx, instanceID) - if err != nil { - return err - } - defer stopForward() - - if err := d.runWindowsWinRMPowerShell(ctx, endpoint, username, password, script); err != nil { - return err - } - - return nil -} - -func (d *Dispatcher) createWindowsInstance(ctx context.Context, runnerName string, ttl time.Duration, machineType string) (instanceID string, output string, err error) { - tmpDir, err := os.MkdirTemp("", "forgejo-nsc-windows-*") - if err != nil { - return "", "", fmt.Errorf("mktemp: %w", err) - } - defer os.RemoveAll(tmpDir) - - metaPath := filepath.Join(tmpDir, "create.json") - cidPath := filepath.Join(tmpDir, "create.cid") - - args := []string{ - "create", - "--duration", ttl.String(), - "--machine_type", machineType, - "--cidfile", cidPath, - "--purpose", fmt.Sprintf("burrow forgejo runner %s", runnerName), - "--output", "plain", - "--output_json_to", metaPath, - "--wait_timeout", "6m", - } - args = prependNSCRegionArgs(args, d.opts.ComputeBaseURL) - - createCtx, cancel := context.WithTimeout(ctx, 8*time.Minute) - defer cancel() - - cmd := exec.CommandContext(createCtx, d.opts.BinaryPath, args...) - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - - if err := cmd.Run(); err != nil { - if created := strings.TrimSpace(mustReadFile(cidPath)); created != "" { - d.destroyNSCInstance(context.Background(), runnerName, created) - } - if errors.Is(createCtx.Err(), context.DeadlineExceeded) { - return "", buf.String(), fmt.Errorf("nsc create timed out after %s", 8*time.Minute) - } - return "", buf.String(), fmt.Errorf("nsc create failed: %w", err) - } - - instanceID, err = readNSCCreateInstanceID(metaPath) - if err != nil { - return "", buf.String(), fmt.Errorf("nsc create output parse failed: %w", err) - } - if instanceID == "" { - return "", buf.String(), errors.New("nsc create returned empty instance id") - } - return instanceID, buf.String(), nil -} - -func (d *Dispatcher) resolveWindowsCredentials(ctx context.Context, instanceID string) (username string, password string, err error) { - tmpDir, err := os.MkdirTemp("", "forgejo-nsc-winproxy-*") - if err != nil { - return "", "", fmt.Errorf("mktemp: %w", err) - } - defer os.RemoveAll(tmpDir) - - outPath := filepath.Join(tmpDir, "proxy.json") - outFile, err := os.Create(outPath) - if err != nil { - return "", "", fmt.Errorf("create proxy output file: %w", err) - } - defer outFile.Close() - - var stderr bytes.Buffer - args := []string{"instance", "proxy", instanceID, "-s", "rdp", "-o", "json"} - args = prependNSCRegionArgs(args, d.opts.ComputeBaseURL) - - proxyCtx, cancel := context.WithTimeout(ctx, 90*time.Second) - defer cancel() - - cmd := exec.CommandContext(proxyCtx, d.opts.BinaryPath, args...) - cmd.Stdout = outFile - cmd.Stderr = &stderr - - if err := cmd.Start(); err != nil { - return "", "", fmt.Errorf("start nsc instance proxy: %w", err) - } - - waitDone := make(chan struct{}) - var waitErr error - go func() { - waitErr = cmd.Wait() - close(waitDone) - }() - - var payload windowsProxyOutput - deadline := time.Now().Add(45 * time.Second) - for time.Now().Before(deadline) { - raw, _ := os.ReadFile(outPath) - jsonBlob := extractJSON(string(raw)) - if jsonBlob != "" { - if err := json.Unmarshal([]byte(jsonBlob), &payload); err == nil { - username = strings.TrimSpace(payload.RDP.Credentials.Username) - password = strings.TrimSpace(payload.RDP.Credentials.Password) - if username != "" && password != "" { - break - } - } - } - select { - case <-waitDone: - if waitErr != nil { - return "", "", fmt.Errorf("nsc instance proxy exited before credentials were available: %w\n%s", waitErr, stderr.String()) - } - default: - } - time.Sleep(1 * time.Second) - } - - if cmd.Process != nil { - _ = cmd.Process.Kill() - } - <-waitDone - - if username == "" || password == "" { - raw, _ := os.ReadFile(outPath) - return "", "", fmt.Errorf("failed to resolve windows credentials from nsc instance proxy output\nstdout=%s\nstderr=%s", strings.TrimSpace(string(raw)), strings.TrimSpace(stderr.String())) - } - return username, password, nil -} - -func (d *Dispatcher) probeWindowsWinRMService(ctx context.Context, instanceID string) error { - args := []string{"instance", "proxy", instanceID, "-s", "winrm", "-o", "json", "--once"} - args = prependNSCRegionArgs(args, d.opts.ComputeBaseURL) - - probeCtx, cancel := context.WithTimeout(ctx, 15*time.Second) - defer cancel() - - cmd := exec.CommandContext(probeCtx, d.opts.BinaryPath, args...) - var out bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &out - - err := cmd.Run() - raw := strings.TrimSpace(out.String()) - if endpoint, ok := parseProxyEndpoint(raw); ok && endpoint != "" { - return nil - } - - if indicatesMissingProxyService(raw, "winrm") { - return fmt.Errorf("namespace windows non-interactive channel unavailable: instance does not expose winrm service (rdp-only)\n%s", raw) - } - - if errors.Is(probeCtx.Err(), context.DeadlineExceeded) { - return fmt.Errorf("timed out probing Namespace winrm service before bootstrap\n%s", raw) - } - - if err != nil { - return fmt.Errorf("nsc winrm service probe failed: %w\n%s", err, raw) - } - return fmt.Errorf("nsc winrm service probe did not yield endpoint output\n%s", raw) -} - -func parseProxyEndpoint(raw string) (string, bool) { - jsonBlob := extractJSON(raw) - if jsonBlob == "" { - return "", false - } - var payload struct { - Endpoint string `json:"endpoint"` - } - if err := json.Unmarshal([]byte(jsonBlob), &payload); err != nil { - return "", false - } - endpoint := strings.TrimSpace(payload.Endpoint) - if endpoint == "" { - return "", false - } - return endpoint, true -} - -func indicatesMissingProxyService(raw string, service string) bool { - service = strings.TrimSpace(service) - if service == "" { - return false - } - token := fmt.Sprintf("does not have service %q", service) - return strings.Contains(raw, token) -} - -func (d *Dispatcher) startWindowsWinRMPortForward(ctx context.Context, instanceID string) (endpoint string, stop func(), err error) { - args := []string{"instance", "port-forward", instanceID, "--target_port", "5985"} - args = prependNSCRegionArgs(args, d.opts.ComputeBaseURL) - - forwardCtx, cancel := context.WithCancel(ctx) - cmd := exec.CommandContext(forwardCtx, d.opts.BinaryPath, args...) - stdout, err := cmd.StdoutPipe() - if err != nil { - cancel() - return "", nil, fmt.Errorf("port-forward stdout pipe: %w", err) - } - var stderr bytes.Buffer - cmd.Stderr = &stderr - - if err := cmd.Start(); err != nil { - cancel() - return "", nil, fmt.Errorf("start nsc port-forward: %w", err) - } - - waitDone := make(chan struct{}) - var waitErr error - go func() { - waitErr = cmd.Wait() - close(waitDone) - }() - - endpointCh := make(chan string, 1) - scanErrCh := make(chan error, 1) - go func() { - scanner := bufio.NewScanner(stdout) - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if strings.HasPrefix(line, "Listening on ") { - endpointCh <- strings.TrimSpace(strings.TrimPrefix(line, "Listening on ")) - return - } - } - if err := scanner.Err(); err != nil { - scanErrCh <- err - } - }() - - select { - case endpoint = <-endpointCh: - stop = func() { - cancel() - if cmd.Process != nil { - _ = cmd.Process.Kill() - } - <-waitDone - } - return endpoint, stop, nil - case err := <-scanErrCh: - cancel() - if cmd.Process != nil { - _ = cmd.Process.Kill() - } - <-waitDone - return "", nil, fmt.Errorf("failed reading port-forward output: %w", err) - case <-waitDone: - cancel() - if waitErr != nil { - return "", nil, fmt.Errorf("nsc port-forward exited early: %w\n%s", waitErr, stderr.String()) - } - return "", nil, fmt.Errorf("nsc port-forward exited without endpoint\n%s", stderr.String()) - case <-time.After(45 * time.Second): - cancel() - if cmd.Process != nil { - _ = cmd.Process.Kill() - } - <-waitDone - return "", nil, fmt.Errorf("timed out waiting for WinRM port-forward endpoint\n%s", stderr.String()) - case <-ctx.Done(): - cancel() - if cmd.Process != nil { - _ = cmd.Process.Kill() - } - <-waitDone - return "", nil, ctx.Err() - } -} - -func (d *Dispatcher) runWindowsWinRMPowerShell(ctx context.Context, endpoint, username, password, script string) error { - pythonPath, err := exec.LookPath("python3") - if err != nil { - return fmt.Errorf("python3 is required for windows WinRM bootstrap: %w", err) - } - - workdir := strings.TrimSpace(d.opts.WorkDir) - if workdir == "" { - workdir = "/tmp/forgejo-runner" - } - if err := os.MkdirAll(workdir, 0o755); err != nil { - return fmt.Errorf("create workdir %s: %w", workdir, err) - } - - venvPath := filepath.Join(workdir, ".winrm-venv") - venvPython := filepath.Join(venvPath, "bin", "python") - if _, err := os.Stat(venvPython); err != nil { - cmd := exec.CommandContext(ctx, pythonPath, "-m", "venv", venvPath) - var out bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &out - if err := cmd.Run(); err != nil { - return fmt.Errorf("create python venv for winrm failed: %w\n%s", err, out.String()) - } - } - - ensurePyWinRM := ` -import importlib.util, subprocess, sys -if importlib.util.find_spec("winrm") is None: - subprocess.check_call([sys.executable, "-m", "pip", "install", "--quiet", "pywinrm"]) -` - ensureCmd := exec.CommandContext(ctx, venvPython, "-c", ensurePyWinRM) - var ensureOut bytes.Buffer - ensureCmd.Stdout = &ensureOut - ensureCmd.Stderr = &ensureOut - if err := ensureCmd.Run(); err != nil { - return fmt.Errorf("install pywinrm failed: %w\n%s", err, ensureOut.String()) - } - - runScript := ` -import base64, os, sys, time, traceback, winrm - -endpoint = os.environ["WINRM_ENDPOINT"] -user = os.environ["WINRM_USER"] -password = os.environ["WINRM_PASS"] -script = base64.b64decode(os.environ["WINRM_SCRIPT_B64"]).decode("utf-8") - -deadline = time.time() + 300.0 -last_err = None - -while time.time() < deadline: - try: - session = winrm.Session(f"http://{endpoint}/wsman", auth=(user, password), transport="ntlm") - result = session.run_ps(script) - sys.stdout.write(result.std_out.decode("utf-8", errors="replace")) - sys.stderr.write(result.std_err.decode("utf-8", errors="replace")) - print(f"winrm_exit={result.status_code}") - sys.exit(result.status_code) - except Exception as err: - last_err = err - time.sleep(5.0) - -sys.stderr.write("timed out waiting for WinRM connectivity after 300s\\n") -if last_err is not None: - traceback.print_exception(last_err, file=sys.stderr) -sys.exit(111) -` - runCmd := exec.CommandContext(ctx, venvPython, "-c", runScript) - runCmd.Env = append(os.Environ(), - "WINRM_ENDPOINT="+endpoint, - "WINRM_USER="+username, - "WINRM_PASS="+password, - "WINRM_SCRIPT_B64="+base64.StdEncoding.EncodeToString([]byte(script)), - ) - var runOut bytes.Buffer - runCmd.Stdout = &runOut - runCmd.Stderr = &runOut - if err := runCmd.Run(); err != nil { - return fmt.Errorf("windows winrm bootstrap command failed: %w\n%s", err, runOut.String()) - } - return nil -} - -func windowsBootstrapScript(runnerName string, req LaunchRequest, executor, workdir string) string { - if strings.TrimSpace(workdir) == "" { - workdir = `C:\burrow\forgejo-runner` - } - - runnerExec := strings.TrimSpace(executor) - if runnerExec == "" || runnerExec == "shell" { - runnerExec = "host" - } - - safeName := strings.NewReplacer(`\`, "-", ":", "-", "/", "-", " ", "-").Replace(runnerName) - workRoot := strings.TrimRight(workdir, `\`) + `\` + safeName - - var b strings.Builder - b.WriteString("$ErrorActionPreference = 'Stop'\n") - b.WriteString("$ProgressPreference = 'SilentlyContinue'\n") - b.WriteString("[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12\n") - b.WriteString("$runnerName = " + powershellSingleQuote(runnerName) + "\n") - b.WriteString("$runnerToken = " + powershellSingleQuote(req.Token) + "\n") - b.WriteString("$instanceURL = " + powershellSingleQuote(req.InstanceURL) + "\n") - b.WriteString("$labelsCsv = " + powershellSingleQuote(strings.Join(req.Labels, ",")) + "\n") - b.WriteString("$runnerExec = " + powershellSingleQuote(runnerExec) + "\n") - b.WriteString("$workRoot = " + powershellSingleQuote(workRoot) + "\n") - b.WriteString(` -New-Item -Path $workRoot -ItemType Directory -Force | Out-Null -Set-Location $workRoot - -$runnerVersion = "12.6.4" -$zipUrl = "https://code.forgejo.org/forgejo/runner/releases/download/v${runnerVersion}/forgejo-runner-${runnerVersion}-windows-amd64.zip" -$zipPath = Join-Path $workRoot "forgejo-runner.zip" -$extractDir = Join-Path $workRoot "forgejo-runner" - -if (Test-Path $extractDir) { - Remove-Item -Path $extractDir -Recurse -Force -} - -Invoke-WebRequest -Uri $zipUrl -OutFile $zipPath -Expand-Archive -Path $zipPath -DestinationPath $extractDir -Force - -$runnerExe = Join-Path $extractDir "forgejo-runner.exe" -if (-not (Test-Path $runnerExe)) { - throw "Missing forgejo-runner.exe after extract: $runnerExe" -} - -$labels = @() -foreach ($label in ($labelsCsv -split ",")) { - $trimmed = $label.Trim() - if ([string]::IsNullOrWhiteSpace($trimmed)) { continue } - if ($trimmed.Contains(":")) { - $labels += $trimmed - } else { - $labels += ("{0}:{1}" -f $trimmed, $runnerExec) - } -} -if ($labels.Count -eq 0) { - throw "No runner labels resolved for windows bootstrap" -} - -$labelLines = ($labels | ForEach-Object { " - $_" }) -join [Environment]::NewLine -$configPath = Join-Path $workRoot "runner.yaml" -$runnerYaml = @" -log: - level: info -runner: - file: .runner - capacity: 1 - name: $runnerName - labels: -$labelLines -cache: - enabled: false -"@ -Set-Content -Path $configPath -Value $runnerYaml -Encoding UTF8 - -$labelsArg = ($labels -join ",") -& $runnerExe register --no-interactive --instance $instanceURL --token $runnerToken --name $runnerName --labels $labelsArg --config $configPath -if ($LASTEXITCODE -ne 0) { - throw ("forgejo-runner register failed: {0}" -f $LASTEXITCODE) -} - -& $runnerExe one-job --config $configPath -if ($LASTEXITCODE -ne 0) { - throw ("forgejo-runner one-job failed: {0}" -f $LASTEXITCODE) -} -`) - return b.String() -} diff --git a/services/forgejo-nsc/internal/nsc/windows_winrm_integration_test.go b/services/forgejo-nsc/internal/nsc/windows_winrm_integration_test.go deleted file mode 100644 index 407749b..0000000 --- a/services/forgejo-nsc/internal/nsc/windows_winrm_integration_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package nsc - -import ( - "context" - "io" - "log/slog" - "os" - "os/exec" - "strings" - "testing" - "time" -) - -func TestWindowsWinRMScriptRoundTrip(t *testing.T) { - if os.Getenv("NSC_WINDOWS_E2E") != "1" { - t.Skip("set NSC_WINDOWS_E2E=1 to run Namespace Windows integration test") - } - - nscBinary, err := exec.LookPath("nsc") - if err != nil { - t.Skipf("nsc not found in PATH: %v", err) - } - - authCheck := exec.Command(nscBinary, "auth", "check-login") - if out, err := authCheck.CombinedOutput(); err != nil { - t.Skipf("nsc auth check-login failed: %v (%s)", err, strings.TrimSpace(string(out))) - } - - machineType := strings.TrimSpace(os.Getenv("NSC_WINDOWS_E2E_MACHINE_TYPE")) - if machineType == "" { - machineType = "windows/amd64:4x8" - } - - dispatcher, err := NewDispatcher(Options{ - BinaryPath: nscBinary, - DefaultImage: "code.forgejo.org/forgejo/runner:11", - DefaultMachine: machineType, - DefaultDuration: 20 * time.Minute, - MaxParallel: 1, - WorkDir: t.TempDir(), - ComputeBaseURL: strings.TrimSpace(os.Getenv("NSC_COMPUTE_BASE_URL")), - Logger: slog.New(slog.NewTextHandler(io.Discard, nil)), - }) - if err != nil { - t.Fatalf("NewDispatcher() error: %v", err) - } - - ctx, cancel := context.WithTimeout(context.Background(), 20*time.Minute) - defer cancel() - - script := "Write-Output ('winrm-ok:' + $env:COMPUTERNAME)" - labels := []string{"namespace-profile-windows-medium"} - if err := dispatcher.launchWindowsScriptViaWinRM(ctx, "nsc-winrm-itest", 20*time.Minute, machineType, labels, script); err != nil { - if strings.Contains(err.Error(), "does not expose winrm service (rdp-only)") { - t.Skipf("namespace windows control channel is rdp-only: %v", err) - } - t.Fatalf("launchWindowsScriptViaWinRM() error: %v", err) - } -} diff --git a/services/forgejo-nsc/internal/nsc/windows_winrm_test.go b/services/forgejo-nsc/internal/nsc/windows_winrm_test.go deleted file mode 100644 index 538d009..0000000 --- a/services/forgejo-nsc/internal/nsc/windows_winrm_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package nsc - -import "testing" - -func TestParseProxyEndpoint(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - raw string - want string - wantOK bool - }{ - { - name: "plain json payload", - raw: `{"endpoint":"127.0.0.1:61234"}`, - want: "127.0.0.1:61234", - wantOK: true, - }, - { - name: "json wrapped with extra output", - raw: `Connected. -{"endpoint":"127.0.0.1:61235","rdp":{"credentials":{"username":"runneradmin","password":"runneradmin"}}}`, - want: "127.0.0.1:61235", - wantOK: true, - }, - { - name: "missing endpoint field", - raw: `{"rdp":{"credentials":{"username":"runneradmin"}}}`, - wantOK: false, - }, - { - name: "non-json output", - raw: `Failed: instance does not have service "winrm"`, - wantOK: false, - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - got, ok := parseProxyEndpoint(tc.raw) - if ok != tc.wantOK { - t.Fatalf("parseProxyEndpoint(%q) ok=%v, want %v", tc.raw, ok, tc.wantOK) - } - if got != tc.want { - t.Fatalf("parseProxyEndpoint(%q) endpoint=%q, want %q", tc.raw, got, tc.want) - } - }) - } -} - -func TestIndicatesMissingProxyService(t *testing.T) { - t.Parallel() - - raw := `Failed: instance does not have service "winrm"` - if !indicatesMissingProxyService(raw, "winrm") { - t.Fatalf("indicatesMissingProxyService should return true for missing winrm message") - } - if indicatesMissingProxyService(raw, "ssh") { - t.Fatalf("indicatesMissingProxyService should be false when service name does not match") - } -} diff --git a/services/forgejo-nsc/internal/server/server.go b/services/forgejo-nsc/internal/server/server.go deleted file mode 100644 index b4bb1d2..0000000 --- a/services/forgejo-nsc/internal/server/server.go +++ /dev/null @@ -1,151 +0,0 @@ -package server - -import ( - "context" - "encoding/json" - "errors" - "log/slog" - "net/http" - "time" - - "github.com/go-chi/chi/v5" - "github.com/go-chi/chi/v5/middleware" - - "github.com/burrow/forgejo-nsc/internal/app" -) - -type Server struct { - httpServer *http.Server - app *app.Service - log *slog.Logger -} - -func New(listen string, svc *app.Service, logger *slog.Logger) *Server { - if logger == nil { - logger = slog.Default() - } - - router := chi.NewRouter() - router.Use(middleware.RequestID) - router.Use(middleware.RealIP) - router.Use(middleware.Logger) - router.Use(middleware.Recoverer) - - s := &Server{ - app: svc, - log: logger, - httpServer: &http.Server{ - Addr: listen, - Handler: router, - ReadTimeout: 30 * time.Second, - // Dispatch requests can legitimately run for the duration of a build. - // A short WriteTimeout will kill the request context mid-provisioning. - WriteTimeout: 2 * time.Hour, - IdleTimeout: 60 * time.Second, - }, - } - - router.Get("/healthz", s.handleHealthz) - router.Post("/api/v1/dispatch", s.handleDispatch) - - return s -} - -func (s *Server) ListenAndServe() error { - return s.httpServer.ListenAndServe() -} - -func (s *Server) Shutdown(ctx context.Context) error { - return s.httpServer.Shutdown(ctx) -} - -// Handler exposes the underlying HTTP handler for tests. -func (s *Server) Handler() http.Handler { - return s.httpServer.Handler -} - -type dispatchRequest struct { - Count int `json:"count"` - Labels []string `json:"labels"` - Scope *dispatchScope `json:"scope"` - TTL string `json:"ttl"` - Machine string `json:"machine_type"` - Image string `json:"image"` - Env map[string]string `json:"env"` -} - -type dispatchScope struct { - Level string `json:"level"` - Owner string `json:"owner"` - Name string `json:"name"` -} - -func (s *Server) handleDispatch(w http.ResponseWriter, r *http.Request) { - var payload dispatchRequest - if err := json.NewDecoder(r.Body).Decode(&payload); err != nil { - s.writeError(w, http.StatusBadRequest, err) - return - } - - duration, err := parseDuration(payload.TTL) - if err != nil { - s.writeError(w, http.StatusBadRequest, err) - return - } - - var scope *app.Scope - if payload.Scope != nil { - scope = &app.Scope{ - Level: payload.Scope.Level, - Owner: payload.Scope.Owner, - Name: payload.Scope.Name, - } - } - - resp, err := s.app.Dispatch(r.Context(), app.DispatchRequest{ - Count: payload.Count, - Labels: payload.Labels, - Scope: scope, - TTL: duration, - Machine: payload.Machine, - Image: payload.Image, - ExtraEnv: payload.Env, - }) - if err != nil { - s.writeError(w, http.StatusInternalServerError, err) - return - } - - s.writeJSON(w, http.StatusOK, resp) -} - -func parseDuration(value string) (time.Duration, error) { - if value == "" { - return 0, nil - } - dur, err := time.ParseDuration(value) - if err != nil { - return 0, err - } - if dur <= 0 { - return 0, errors.New("ttl must be positive") - } - return dur, nil -} - -func (s *Server) handleHealthz(w http.ResponseWriter, _ *http.Request) { - s.writeJSON(w, http.StatusOK, map[string]string{"status": "ok"}) -} - -func (s *Server) writeError(w http.ResponseWriter, code int, err error) { - s.log.Error("request failed", "err", err, "status", code) - s.writeJSON(w, code, map[string]string{ - "error": err.Error(), - }) -} - -func (s *Server) writeJSON(w http.ResponseWriter, code int, payload any) { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(code) - _ = json.NewEncoder(w).Encode(payload) -} diff --git a/services/forgejo-nsc/internal/server/server_test.go b/services/forgejo-nsc/internal/server/server_test.go deleted file mode 100644 index 09a9743..0000000 --- a/services/forgejo-nsc/internal/server/server_test.go +++ /dev/null @@ -1,111 +0,0 @@ -package server - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "net/http/httptest" - "sync" - "testing" - "time" - - "github.com/burrow/forgejo-nsc/internal/app" - "github.com/burrow/forgejo-nsc/internal/forgejo" - "github.com/burrow/forgejo-nsc/internal/nsc" -) - -type serverForgejoMock struct { - mu sync.Mutex - token string - scopes []forgejo.Scope -} - -func (m *serverForgejoMock) RegistrationToken(ctx context.Context, scope forgejo.Scope) (string, error) { - m.mu.Lock() - defer m.mu.Unlock() - m.scopes = append(m.scopes, scope) - return m.token, nil -} - -type serverDispatcherMock struct { - mu sync.Mutex - requests []nsc.LaunchRequest - result string -} - -func (m *serverDispatcherMock) LaunchRunner(ctx context.Context, req nsc.LaunchRequest) (string, error) { - m.mu.Lock() - defer m.mu.Unlock() - m.requests = append(m.requests, req) - if m.result != "" { - return m.result, nil - } - return "runner", nil -} - -func TestDispatchEndpoint(t *testing.T) { - forgejoMock := &serverForgejoMock{token: "token"} - dispatcherMock := &serverDispatcherMock{result: "runner-http"} - - cfg := app.Config{ - DefaultScope: forgejo.Scope{Level: forgejo.ScopeInstance}, - DefaultLabels: []string{"fallback"}, - InstanceURL: "https://forgejo.example.com", - DefaultTTL: 30 * time.Minute, - } - - service := app.NewService(cfg, forgejoMock, dispatcherMock, nil) - srv := New(":0", service, nil) - ts := httptest.NewServer(srv.Handler()) - defer ts.Close() - - body := map[string]any{ - "count": 1, - "ttl": "45m", - "labels": []string{"nscloud-arm"}, - "scope": map[string]string{"level": string(forgejo.ScopeOrganization), "owner": "acme"}, - "machine_type": "8x16", - "image": "runner:http", - "env": map[string]string{"FOO": "bar"}, - } - - payload, _ := json.Marshal(body) - - resp, err := http.Post(ts.URL+"/api/v1/dispatch", "application/json", bytes.NewReader(payload)) - if err != nil { - t.Fatalf("POST failed: %v", err) - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - t.Fatalf("expected 200 OK, got %d", resp.StatusCode) - } - - var decoded app.DispatchResponse - if err := json.NewDecoder(resp.Body).Decode(&decoded); err != nil { - t.Fatalf("failed to decode response: %v", err) - } - - if len(decoded.Runners) != 1 || decoded.Runners[0].Name != "runner-http" { - t.Fatalf("unexpected response: %+v", decoded) - } - - if len(forgejoMock.scopes) != 1 || forgejoMock.scopes[0].Level != forgejo.ScopeOrganization { - t.Fatalf("expected organization scope, got %+v", forgejoMock.scopes) - } - - if len(dispatcherMock.requests) != 1 { - t.Fatalf("expected dispatcher call") - } - call := dispatcherMock.requests[0] - if call.Duration != 45*time.Minute { - t.Fatalf("expected ttl override, got %v", call.Duration) - } - if call.Labels[0] != "nscloud-arm" { - t.Fatalf("expected labels passthrough, got %v", call.Labels) - } - if call.ExtraEnv["FOO"] != "bar" { - t.Fatalf("expected env passthrough") - } -} diff --git a/site/.eslintrc.json b/site/.eslintrc.json deleted file mode 100644 index bffb357..0000000 --- a/site/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "next/core-web-vitals" -} diff --git a/site/.gitignore b/site/.gitignore deleted file mode 100644 index 71b863e..0000000 --- a/site/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -/node_modules -/out/ - -/.next/ -next-env.d.ts diff --git a/site/.prettierignore b/site/.prettierignore deleted file mode 100644 index fcac576..0000000 --- a/site/.prettierignore +++ /dev/null @@ -1,6 +0,0 @@ -# Ignore artifacts: -build -coverage - -# Ignore all HTML files: -**/*.html \ No newline at end of file diff --git a/site/assets/Bold.woff2 b/site/assets/Bold.woff2 deleted file mode 100644 index 8c00084..0000000 Binary files a/site/assets/Bold.woff2 and /dev/null differ diff --git a/site/assets/Italic.woff2 b/site/assets/Italic.woff2 deleted file mode 100644 index 056909c..0000000 Binary files a/site/assets/Italic.woff2 and /dev/null differ diff --git a/site/assets/Regular.woff2 b/site/assets/Regular.woff2 deleted file mode 100644 index d7c3d52..0000000 Binary files a/site/assets/Regular.woff2 and /dev/null differ diff --git a/site/bun.lockb b/site/bun.lockb deleted file mode 100755 index ea2d137..0000000 Binary files a/site/bun.lockb and /dev/null differ diff --git a/site/layout/layout.tsx b/site/layout/layout.tsx deleted file mode 100644 index 057aa68..0000000 --- a/site/layout/layout.tsx +++ /dev/null @@ -1,40 +0,0 @@ -import localFont from "next/font/local"; - -const phantomSans = localFont({ - src: [ - { - path: "../assets/Regular.woff2", - weight: "400", - style: "normal", - }, - { - path: "../assets/Italic.woff2", - weight: "400", - style: "italic", - }, - { - path: "../assets/Bold.woff2", - weight: "700", - style: "normal", - }, - ], - variable: "--font-phantom-sans", -}); - -const fallbackFontVariables = { - "--font-space-mono": - '"SFMono-Regular", "SF Mono", ui-monospace, Menlo, Monaco, "Cascadia Mono", "Segoe UI Mono", "Roboto Mono", monospace', - "--font-poppins": - 'var(--font-phantom-sans), -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif', -} as React.CSSProperties; - -export default function Layout({ children }: { children: React.ReactNode }) { - return ( -
- {children} -
- ); -} diff --git a/site/next.config.js b/site/next.config.js deleted file mode 100644 index afbb70f..0000000 --- a/site/next.config.js +++ /dev/null @@ -1,13 +0,0 @@ -/** @type {import('next').NextConfig} */ -const nextConfig = { - headers() { - return [ - { - source: "/.well-known/apple-app-site-association", - headers: [{ key: "Content-Type", value: "application/json" }], - } - ]; - } -}; - -module.exports = nextConfig; diff --git a/site/package-lock.json b/site/package-lock.json deleted file mode 100644 index e1357f9..0000000 --- a/site/package-lock.json +++ /dev/null @@ -1,3907 +0,0 @@ -{ - "name": "burrow", - "version": "0.1.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "burrow", - "version": "0.1.0", - "dependencies": { - "@fortawesome/fontawesome-free": "^6.4.2", - "@fortawesome/fontawesome-svg-core": "^6.4.2", - "@fortawesome/free-brands-svg-icons": "^6.4.2", - "@fortawesome/free-solid-svg-icons": "^6.4.2", - "@fortawesome/react-fontawesome": "^0.2.0", - "@headlessui/react": "^1.7.17", - "@headlessui/tailwindcss": "^0.2.0", - "@types/node": "20.5.8", - "@types/react": "18.2.21", - "@types/react-dom": "18.2.7", - "autoprefixer": "10.4.15", - "eslint": "8.48.0", - "eslint-config-next": "13.4.19", - "next": "13.4.19", - "postcss": "8.4.29", - "react": "18.2.0", - "react-dom": "18.2.0", - "tailwindcss": "3.3.3", - "typescript": "5.2.2" - }, - "devDependencies": { - "prettier": "^3.0.3", - "prettier-plugin-tailwindcss": "^0.5.4" - } - }, - "node_modules/@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@alloc/quick-lru": { - "version": "5.2.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@babel/runtime": { - "version": "7.22.11", - "license": "MIT", - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.0", - "license": "MIT", - "dependencies": { - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" - } - }, - "node_modules/@eslint-community/regexpp": { - "version": "4.8.0", - "license": "MIT", - "engines": { - "node": "^12.0.0 || ^14.0.0 || >=16.0.0" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "2.1.2", - "license": "MIT", - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.6.0", - "globals": "^13.19.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/@eslint/js": { - "version": "8.48.0", - "license": "MIT", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/@fortawesome/fontawesome-common-types": { - "version": "6.5.1", - "hasInstallScript": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/fontawesome-free": { - "version": "6.5.1", - "hasInstallScript": true, - "license": "(CC-BY-4.0 AND OFL-1.1 AND MIT)", - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/fontawesome-svg-core": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-6.7.2.tgz", - "integrity": "sha512-yxtOBWDrdi5DD5o1pmVdq3WMCvnobT0LU6R8RyyVXPvFRd2o79/0NCuQoCjNTeZz9EzA9xS3JxNWfv54RIHFEA==", - "license": "MIT", - "dependencies": { - "@fortawesome/fontawesome-common-types": "6.7.2" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/fontawesome-svg-core/node_modules/@fortawesome/fontawesome-common-types": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.7.2.tgz", - "integrity": "sha512-Zs+YeHUC5fkt7Mg1l6XTniei3k4bwG/yo3iFUtZWd/pMx9g3fdvkSK9E0FOC+++phXOka78uJcYb8JaFkW52Xg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/free-brands-svg-icons": { - "version": "6.5.1", - "hasInstallScript": true, - "license": "(CC-BY-4.0 AND MIT)", - "dependencies": { - "@fortawesome/fontawesome-common-types": "6.5.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/free-solid-svg-icons": { - "version": "6.5.1", - "hasInstallScript": true, - "license": "(CC-BY-4.0 AND MIT)", - "dependencies": { - "@fortawesome/fontawesome-common-types": "6.5.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/@fortawesome/react-fontawesome": { - "version": "0.2.0", - "license": "MIT", - "dependencies": { - "prop-types": "^15.8.1" - }, - "peerDependencies": { - "@fortawesome/fontawesome-svg-core": "~1 || ~6", - "react": ">=16.3" - } - }, - "node_modules/@headlessui/react": { - "version": "1.7.18", - "license": "MIT", - "dependencies": { - "@tanstack/react-virtual": "^3.0.0-beta.60", - "client-only": "^0.0.1" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "react": "^16 || ^17 || ^18", - "react-dom": "^16 || ^17 || ^18" - } - }, - "node_modules/@headlessui/tailwindcss": { - "version": "0.2.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "tailwindcss": "^3.0" - } - }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.11.11", - "license": "Apache-2.0", - "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.5" - }, - "engines": { - "node": ">=10.10.0" - } - }, - "node_modules/@humanwhocodes/module-importer": { - "version": "1.0.1", - "license": "Apache-2.0", - "engines": { - "node": ">=12.22" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "license": "BSD-3-Clause" - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "license": "MIT", - "dependencies": { - "@jridgewell/set-array": "^1.0.1", - "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.1", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.19", - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@next/env": { - "version": "13.4.19", - "license": "MIT" - }, - "node_modules/@next/eslint-plugin-next": { - "version": "13.4.19", - "license": "MIT", - "dependencies": { - "glob": "7.1.7" - } - }, - "node_modules/@next/swc-darwin-arm64": { - "version": "13.4.19", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-darwin-x64": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-13.4.19.tgz", - "integrity": "sha512-jyzO6wwYhx6F+7gD8ddZfuqO4TtpJdw3wyOduR4fxTUCm3aLw7YmHGYNjS0xRSYGAkLpBkH1E0RcelyId6lNsw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-gnu": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.4.19.tgz", - "integrity": "sha512-vdlnIlaAEh6H+G6HrKZB9c2zJKnpPVKnA6LBwjwT2BTjxI7e0Hx30+FoWCgi50e+YO49p6oPOtesP9mXDRiiUg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-arm64-musl": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.4.19.tgz", - "integrity": "sha512-aU0HkH2XPgxqrbNRBFb3si9Ahu/CpaR5RPmN2s9GiM9qJCiBBlZtRTiEca+DC+xRPyCThTtWYgxjWHgU7ZkyvA==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-gnu": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.4.19.tgz", - "integrity": "sha512-htwOEagMa/CXNykFFeAHHvMJeqZfNQEoQvHfsA4wgg5QqGNqD5soeCer4oGlCol6NGUxknrQO6VEustcv+Md+g==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-linux-x64-musl": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.4.19.tgz", - "integrity": "sha512-4Gj4vvtbK1JH8ApWTT214b3GwUh9EKKQjY41hH/t+u55Knxi/0wesMzwQRhppK6Ddalhu0TEttbiJ+wRcoEj5Q==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-arm64-msvc": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.4.19.tgz", - "integrity": "sha512-bUfDevQK4NsIAHXs3/JNgnvEY+LRyneDN788W2NYiRIIzmILjba7LaQTfihuFawZDhRtkYCv3JDC3B4TwnmRJw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-ia32-msvc": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.4.19.tgz", - "integrity": "sha512-Y5kikILFAr81LYIFaw6j/NrOtmiM4Sf3GtOc0pn50ez2GCkr+oejYuKGcwAwq3jiTKuzF6OF4iT2INPoxRycEA==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@next/swc-win32-x64-msvc": { - "version": "13.4.19", - "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.4.19.tgz", - "integrity": "sha512-YzA78jBDXMYiINdPdJJwGgPNT3YqBNNGhsthsDoWHL9p24tEJn9ViQf/ZqTbwSpX/RrkPupLfuuTH2sf73JBAw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@rushstack/eslint-patch": { - "version": "1.3.3", - "license": "MIT" - }, - "node_modules/@swc/helpers": { - "version": "0.5.1", - "license": "Apache-2.0", - "dependencies": { - "tslib": "^2.4.0" - } - }, - "node_modules/@tanstack/react-virtual": { - "version": "3.2.0", - "license": "MIT", - "dependencies": { - "@tanstack/virtual-core": "3.2.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0", - "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/@tanstack/virtual-core": { - "version": "3.2.0", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/tannerlinsley" - } - }, - "node_modules/@types/json5": { - "version": "0.0.29", - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "20.5.8", - "license": "MIT" - }, - "node_modules/@types/prop-types": { - "version": "15.7.5", - "license": "MIT" - }, - "node_modules/@types/react": { - "version": "18.2.21", - "license": "MIT", - "dependencies": { - "@types/prop-types": "*", - "@types/scheduler": "*", - "csstype": "^3.0.2" - } - }, - "node_modules/@types/react-dom": { - "version": "18.2.7", - "license": "MIT", - "dependencies": { - "@types/react": "*" - } - }, - "node_modules/@types/scheduler": { - "version": "0.16.3", - "license": "MIT" - }, - "node_modules/@typescript-eslint/parser": { - "version": "6.5.0", - "license": "BSD-2-Clause", - "dependencies": { - "@typescript-eslint/scope-manager": "6.5.0", - "@typescript-eslint/types": "6.5.0", - "@typescript-eslint/typescript-estree": "6.5.0", - "@typescript-eslint/visitor-keys": "6.5.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "6.5.0", - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "6.5.0", - "@typescript-eslint/visitor-keys": "6.5.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "6.5.0", - "license": "MIT", - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.5.0", - "license": "BSD-2-Clause", - "dependencies": { - "@typescript-eslint/types": "6.5.0", - "@typescript-eslint/visitor-keys": "6.5.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.5.4", - "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.5.0", - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "6.5.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/acorn": { - "version": "8.10.0", - "license": "MIT", - "peer": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "license": "MIT", - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/any-promise": { - "version": "1.3.0", - "license": "MIT" - }, - "node_modules/anymatch": { - "version": "3.1.3", - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/arg": { - "version": "5.0.2", - "license": "MIT" - }, - "node_modules/argparse": { - "version": "2.0.1", - "license": "Python-2.0" - }, - "node_modules/aria-query": { - "version": "5.3.0", - "license": "Apache-2.0", - "dependencies": { - "dequal": "^2.0.3" - } - }, - "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array-includes": { - "version": "3.1.6", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", - "is-string": "^1.0.7" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array-union": { - "version": "2.1.0", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/array.prototype.findlastindex": { - "version": "1.2.3", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flat": { - "version": "1.3.1", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.flatmap": { - "version": "1.3.1", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/array.prototype.tosorted": { - "version": "1.1.1", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" - } - }, - "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.1", - "license": "MIT", - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", - "is-shared-array-buffer": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/ast-types-flow": { - "version": "0.0.7", - "license": "ISC" - }, - "node_modules/asynciterator.prototype": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - } - }, - "node_modules/autoprefixer": { - "version": "10.4.15", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001520", - "fraction.js": "^4.2.0", - "normalize-range": "^0.1.2", - "picocolors": "^1.0.0", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/available-typed-arrays": { - "version": "1.0.5", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/axe-core": { - "version": "4.7.2", - "license": "MPL-2.0", - "engines": { - "node": ">=4" - } - }, - "node_modules/axobject-query": { - "version": "3.2.1", - "license": "Apache-2.0", - "dependencies": { - "dequal": "^2.0.3" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "license": "MIT" - }, - "node_modules/binary-extensions": { - "version": "2.2.0", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.2", - "license": "MIT", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.21.10", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "peer": true, - "dependencies": { - "caniuse-lite": "^1.0.30001517", - "electron-to-chromium": "^1.4.477", - "node-releases": "^2.0.13", - "update-browserslist-db": "^1.0.11" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/busboy": { - "version": "1.6.0", - "dependencies": { - "streamsearch": "^1.1.0" - }, - "engines": { - "node": ">=10.16.0" - } - }, - "node_modules/call-bind": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/camelcase-css": { - "version": "2.0.1", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001525", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/chalk": { - "version": "4.1.2", - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chokidar": { - "version": "3.5.3", - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ], - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chokidar/node_modules/glob-parent": { - "version": "5.1.2", - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/client-only": { - "version": "0.0.1", - "license": "MIT" - }, - "node_modules/color-convert": { - "version": "2.0.1", - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "license": "MIT" - }, - "node_modules/commander": { - "version": "4.1.1", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "license": "MIT" - }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/cssesc": { - "version": "3.0.0", - "license": "MIT", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/csstype": { - "version": "3.1.2", - "license": "MIT" - }, - "node_modules/damerau-levenshtein": { - "version": "1.0.8", - "license": "BSD-2-Clause" - }, - "node_modules/debug": { - "version": "4.3.4", - "license": "MIT", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "license": "MIT" - }, - "node_modules/define-properties": { - "version": "1.2.0", - "license": "MIT", - "dependencies": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/dequal": { - "version": "2.0.3", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/didyoumean": { - "version": "1.2.2", - "license": "Apache-2.0" - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "license": "MIT", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/dlv": { - "version": "1.1.3", - "license": "MIT" - }, - "node_modules/doctrine": { - "version": "3.0.0", - "license": "Apache-2.0", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/electron-to-chromium": { - "version": "1.4.508", - "license": "ISC" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "license": "MIT" - }, - "node_modules/enhanced-resolve": { - "version": "5.15.0", - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/es-abstract": { - "version": "1.22.1", - "license": "MIT", - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "arraybuffer.prototype.slice": "^1.0.1", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "es-set-tostringtag": "^2.0.1", - "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.1", - "get-symbol-description": "^1.0.0", - "globalthis": "^1.0.3", - "gopd": "^1.0.1", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", - "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.0", - "safe-array-concat": "^1.0.0", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", - "typed-array-buffer": "^1.0.0", - "typed-array-byte-length": "^1.0.0", - "typed-array-byte-offset": "^1.0.0", - "typed-array-length": "^1.0.4", - "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.10" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/es-iterator-helpers": { - "version": "1.0.14", - "license": "MIT", - "dependencies": { - "asynciterator.prototype": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-set-tostringtag": "^2.0.1", - "function-bind": "^1.1.1", - "get-intrinsic": "^1.2.1", - "globalthis": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "iterator.prototype": "^1.1.0", - "safe-array-concat": "^1.0.0" - } - }, - "node_modules/es-set-tostringtag": { - "version": "2.0.1", - "license": "MIT", - "dependencies": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "has": "^1.0.3" - } - }, - "node_modules/es-to-primitive": { - "version": "1.2.1", - "license": "MIT", - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/escalade": { - "version": "3.1.1", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint": { - "version": "8.48.0", - "license": "MIT", - "peer": true, - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.2", - "@eslint/js": "8.48.0", - "@humanwhocodes/config-array": "^0.11.10", - "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.12.4", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.2", - "eslint-visitor-keys": "^3.4.3", - "espree": "^9.6.1", - "esquery": "^1.4.2", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "globals": "^13.19.0", - "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3", - "strip-ansi": "^6.0.1", - "text-table": "^0.2.0" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-config-next": { - "version": "13.4.19", - "license": "MIT", - "dependencies": { - "@next/eslint-plugin-next": "13.4.19", - "@rushstack/eslint-patch": "^1.1.3", - "@typescript-eslint/parser": "^5.4.2 || ^6.0.0", - "eslint-import-resolver-node": "^0.3.6", - "eslint-import-resolver-typescript": "^3.5.2", - "eslint-plugin-import": "^2.26.0", - "eslint-plugin-jsx-a11y": "^6.5.1", - "eslint-plugin-react": "^7.31.7", - "eslint-plugin-react-hooks": "^4.5.0 || 5.0.0-canary-7118f5dd7-20230705" - }, - "peerDependencies": { - "eslint": "^7.23.0 || ^8.0.0", - "typescript": ">=3.3.1" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/eslint-import-resolver-node": { - "version": "0.3.9", - "license": "MIT", - "dependencies": { - "debug": "^3.2.7", - "is-core-module": "^2.13.0", - "resolve": "^1.22.4" - } - }, - "node_modules/eslint-import-resolver-node/node_modules/debug": { - "version": "3.2.7", - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-import-resolver-typescript": { - "version": "3.6.0", - "license": "ISC", - "dependencies": { - "debug": "^4.3.4", - "enhanced-resolve": "^5.12.0", - "eslint-module-utils": "^2.7.4", - "fast-glob": "^3.3.1", - "get-tsconfig": "^4.5.0", - "is-core-module": "^2.11.0", - "is-glob": "^4.0.3" - }, - "engines": { - "node": "^14.18.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/unts/projects/eslint-import-resolver-ts" - }, - "peerDependencies": { - "eslint": "*", - "eslint-plugin-import": "*" - } - }, - "node_modules/eslint-module-utils": { - "version": "2.8.0", - "license": "MIT", - "dependencies": { - "debug": "^3.2.7" - }, - "engines": { - "node": ">=4" - }, - "peerDependenciesMeta": { - "eslint": { - "optional": true - } - } - }, - "node_modules/eslint-module-utils/node_modules/debug": { - "version": "3.2.7", - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-import": { - "version": "2.28.1", - "license": "MIT", - "peer": true, - "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.findlastindex": "^1.2.2", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", - "debug": "^3.2.7", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", - "eslint-module-utils": "^2.8.0", - "has": "^1.0.3", - "is-core-module": "^2.13.0", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.fromentries": "^2.0.6", - "object.groupby": "^1.0.0", - "object.values": "^1.1.6", - "semver": "^6.3.1", - "tsconfig-paths": "^3.14.2" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" - } - }, - "node_modules/eslint-plugin-import/node_modules/debug": { - "version": "3.2.7", - "license": "MIT", - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "2.1.0", - "license": "Apache-2.0", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-plugin-jsx-a11y": { - "version": "6.7.1", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.20.7", - "aria-query": "^5.1.3", - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "ast-types-flow": "^0.0.7", - "axe-core": "^4.6.2", - "axobject-query": "^3.1.1", - "damerau-levenshtein": "^1.0.8", - "emoji-regex": "^9.2.2", - "has": "^1.0.3", - "jsx-ast-utils": "^3.3.3", - "language-tags": "=1.0.5", - "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=4.0" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-jsx-a11y/node_modules/semver": { - "version": "6.3.1", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/eslint-plugin-react": { - "version": "7.33.2", - "license": "MIT", - "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", - "doctrine": "^2.1.0", - "es-iterator-helpers": "^1.0.12", - "estraverse": "^5.3.0", - "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", - "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.4", - "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.8" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-react-hooks": { - "version": "4.6.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" - } - }, - "node_modules/eslint-plugin-react/node_modules/doctrine": { - "version": "2.1.0", - "license": "Apache-2.0", - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.4", - "license": "MIT", - "dependencies": { - "is-core-module": "^2.9.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/eslint-plugin-react/node_modules/semver": { - "version": "6.3.1", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/eslint-scope": { - "version": "7.2.2", - "license": "BSD-2-Clause", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "3.4.3", - "license": "Apache-2.0", - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/espree": { - "version": "9.6.1", - "license": "BSD-2-Clause", - "dependencies": { - "acorn": "^8.9.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/esquery": { - "version": "1.5.0", - "license": "BSD-3-Clause", - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "license": "BSD-2-Clause", - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "5.3.0", - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "license": "MIT" - }, - "node_modules/fast-glob": { - "version": "3.3.1", - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "license": "MIT" - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "license": "MIT" - }, - "node_modules/fastq": { - "version": "1.15.0", - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/file-entry-cache": { - "version": "6.0.1", - "license": "MIT", - "dependencies": { - "flat-cache": "^3.0.4" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/fill-range": { - "version": "7.0.1", - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/flat-cache": { - "version": "3.1.0", - "license": "MIT", - "dependencies": { - "flatted": "^3.2.7", - "keyv": "^4.5.3", - "rimraf": "^3.0.2" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/flatted": { - "version": "3.2.7", - "license": "ISC" - }, - "node_modules/for-each": { - "version": "0.3.3", - "license": "MIT", - "dependencies": { - "is-callable": "^1.1.3" - } - }, - "node_modules/fraction.js": { - "version": "4.3.6", - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "license": "ISC" - }, - "node_modules/fsevents": { - "version": "2.3.3", - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.1", - "license": "MIT" - }, - "node_modules/function.prototype.name": { - "version": "1.1.6", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "functions-have-names": "^1.2.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/functions-have-names": { - "version": "1.2.3", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-intrinsic": { - "version": "1.2.1", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-symbol-description": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-tsconfig": { - "version": "4.7.0", - "license": "MIT", - "dependencies": { - "resolve-pkg-maps": "^1.0.0" - }, - "funding": { - "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" - } - }, - "node_modules/glob": { - "version": "7.1.7", - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "license": "BSD-2-Clause" - }, - "node_modules/globals": { - "version": "13.21.0", - "license": "MIT", - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globalthis": { - "version": "1.0.3", - "license": "MIT", - "dependencies": { - "define-properties": "^1.1.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "license": "MIT", - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/gopd": { - "version": "1.0.1", - "license": "MIT", - "dependencies": { - "get-intrinsic": "^1.1.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "license": "ISC" - }, - "node_modules/graphemer": { - "version": "1.4.0", - "license": "MIT" - }, - "node_modules/has": { - "version": "1.0.3", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-bigints": { - "version": "1.0.2", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "get-intrinsic": "^1.1.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-proto": { - "version": "1.0.1", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.0.3", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/ignore": { - "version": "5.2.4", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/import-fresh": { - "version": "3.3.0", - "license": "MIT", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "license": "ISC" - }, - "node_modules/internal-slot": { - "version": "1.0.5", - "license": "MIT", - "dependencies": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-array-buffer": { - "version": "3.0.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-async-function": { - "version": "2.0.0", - "license": "MIT", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-bigint": { - "version": "1.0.4", - "license": "MIT", - "dependencies": { - "has-bigints": "^1.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-boolean-object": { - "version": "1.1.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-callable": { - "version": "1.2.7", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-core-module": { - "version": "2.13.0", - "license": "MIT", - "dependencies": { - "has": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-date-object": { - "version": "1.0.5", - "license": "MIT", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-finalizationregistry": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-generator-function": { - "version": "1.0.10", - "license": "MIT", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-map": { - "version": "2.0.2", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-negative-zero": { - "version": "2.0.2", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-number-object": { - "version": "1.0.7", - "license": "MIT", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-regex": { - "version": "1.1.4", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-set": { - "version": "2.0.2", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-string": { - "version": "1.0.7", - "license": "MIT", - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-symbol": { - "version": "1.0.4", - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-typed-array": { - "version": "1.1.12", - "license": "MIT", - "dependencies": { - "which-typed-array": "^1.1.11" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakmap": { - "version": "2.0.1", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakref": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-weakset": { - "version": "2.0.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/isarray": { - "version": "2.0.5", - "license": "MIT" - }, - "node_modules/isexe": { - "version": "2.0.0", - "license": "ISC" - }, - "node_modules/iterator.prototype": { - "version": "1.1.1", - "license": "MIT", - "dependencies": { - "define-properties": "^1.2.0", - "get-intrinsic": "^1.2.1", - "has-symbols": "^1.0.3", - "reflect.getprototypeof": "^1.0.3" - } - }, - "node_modules/jiti": { - "version": "1.19.3", - "license": "MIT", - "bin": { - "jiti": "bin/jiti.js" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "license": "MIT" - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "license": "MIT", - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/json-buffer": { - "version": "3.0.1", - "license": "MIT" - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "license": "MIT" - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "license": "MIT" - }, - "node_modules/json5": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - } - }, - "node_modules/jsx-ast-utils": { - "version": "3.3.5", - "license": "MIT", - "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flat": "^1.3.1", - "object.assign": "^4.1.4", - "object.values": "^1.1.6" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/keyv": { - "version": "4.5.3", - "license": "MIT", - "dependencies": { - "json-buffer": "3.0.1" - } - }, - "node_modules/language-subtag-registry": { - "version": "0.3.22", - "license": "CC0-1.0" - }, - "node_modules/language-tags": { - "version": "1.0.5", - "license": "MIT", - "dependencies": { - "language-subtag-registry": "~0.3.2" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "license": "MIT", - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/lilconfig": { - "version": "2.1.0", - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "license": "MIT" - }, - "node_modules/locate-path": { - "version": "6.0.0", - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "license": "MIT" - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "license": "MIT", - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/merge2": { - "version": "1.4.1", - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.5", - "license": "MIT", - "dependencies": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/minimatch": { - "version": "3.1.2", - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.8", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/ms": { - "version": "2.1.2", - "license": "MIT" - }, - "node_modules/mz": { - "version": "2.7.0", - "license": "MIT", - "dependencies": { - "any-promise": "^1.0.0", - "object-assign": "^4.0.1", - "thenify-all": "^1.0.0" - } - }, - "node_modules/nanoid": { - "version": "3.3.6", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "license": "MIT" - }, - "node_modules/next": { - "version": "13.4.19", - "license": "MIT", - "dependencies": { - "@next/env": "13.4.19", - "@swc/helpers": "0.5.1", - "busboy": "1.6.0", - "caniuse-lite": "^1.0.30001406", - "postcss": "8.4.14", - "styled-jsx": "5.1.1", - "watchpack": "2.4.0", - "zod": "3.21.4" - }, - "bin": { - "next": "dist/bin/next" - }, - "engines": { - "node": ">=16.8.0" - }, - "optionalDependencies": { - "@next/swc-darwin-arm64": "13.4.19", - "@next/swc-darwin-x64": "13.4.19", - "@next/swc-linux-arm64-gnu": "13.4.19", - "@next/swc-linux-arm64-musl": "13.4.19", - "@next/swc-linux-x64-gnu": "13.4.19", - "@next/swc-linux-x64-musl": "13.4.19", - "@next/swc-win32-arm64-msvc": "13.4.19", - "@next/swc-win32-ia32-msvc": "13.4.19", - "@next/swc-win32-x64-msvc": "13.4.19" - }, - "peerDependencies": { - "@opentelemetry/api": "^1.1.0", - "react": "^18.2.0", - "react-dom": "^18.2.0", - "sass": "^1.3.0" - }, - "peerDependenciesMeta": { - "@opentelemetry/api": { - "optional": true - }, - "sass": { - "optional": true - } - } - }, - "node_modules/next/node_modules/postcss": { - "version": "8.4.14", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - } - ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.4", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/node-releases": { - "version": "2.0.13", - "license": "MIT" - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-hash": { - "version": "3.0.0", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/object-inspect": { - "version": "1.12.3", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.assign": { - "version": "4.1.4", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.entries": { - "version": "1.1.7", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.fromentries": { - "version": "2.0.7", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.groupby": { - "version": "1.0.1", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1" - } - }, - "node_modules/object.hasown": { - "version": "1.1.3", - "license": "MIT", - "dependencies": { - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/object.values": { - "version": "1.1.7", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/once": { - "version": "1.4.0", - "license": "ISC", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/optionator": { - "version": "0.9.3", - "license": "MIT", - "dependencies": { - "@aashutoshrathi/word-wrap": "^1.2.3", - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parent-module": { - "version": "1.0.1", - "license": "MIT", - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/path-exists": { - "version": "4.0.0", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "license": "MIT" - }, - "node_modules/path-type": { - "version": "4.0.0", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/picocolors": { - "version": "1.0.0", - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "2.3.1", - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pify": { - "version": "2.3.0", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pirates": { - "version": "4.0.6", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, - "node_modules/postcss": { - "version": "8.4.29", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "peer": true, - "dependencies": { - "nanoid": "^3.3.6", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-import": { - "version": "15.1.0", - "license": "MIT", - "dependencies": { - "postcss-value-parser": "^4.0.0", - "read-cache": "^1.0.0", - "resolve": "^1.1.7" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "postcss": "^8.0.0" - } - }, - "node_modules/postcss-js": { - "version": "4.0.1", - "license": "MIT", - "dependencies": { - "camelcase-css": "^2.0.1" - }, - "engines": { - "node": "^12 || ^14 || >= 16" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - "peerDependencies": { - "postcss": "^8.4.21" - } - }, - "node_modules/postcss-load-config": { - "version": "4.0.1", - "license": "MIT", - "dependencies": { - "lilconfig": "^2.0.5", - "yaml": "^2.1.1" - }, - "engines": { - "node": ">= 14" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - "peerDependencies": { - "postcss": ">=8.0.9", - "ts-node": ">=9.0.0" - }, - "peerDependenciesMeta": { - "postcss": { - "optional": true - }, - "ts-node": { - "optional": true - } - } - }, - "node_modules/postcss-nested": { - "version": "6.0.1", - "license": "MIT", - "dependencies": { - "postcss-selector-parser": "^6.0.11" - }, - "engines": { - "node": ">=12.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - "peerDependencies": { - "postcss": "^8.2.14" - } - }, - "node_modules/postcss-selector-parser": { - "version": "6.0.13", - "license": "MIT", - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "license": "MIT" - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "license": "MIT", - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/prettier": { - "version": "3.2.5", - "dev": true, - "license": "MIT", - "peer": true, - "bin": { - "prettier": "bin/prettier.cjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, - "node_modules/prettier-plugin-tailwindcss": { - "version": "0.5.13", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=14.21.3" - }, - "peerDependencies": { - "@ianvs/prettier-plugin-sort-imports": "*", - "@prettier/plugin-pug": "*", - "@shopify/prettier-plugin-liquid": "*", - "@trivago/prettier-plugin-sort-imports": "*", - "@zackad/prettier-plugin-twig-melody": "*", - "prettier": "^3.0", - "prettier-plugin-astro": "*", - "prettier-plugin-css-order": "*", - "prettier-plugin-import-sort": "*", - "prettier-plugin-jsdoc": "*", - "prettier-plugin-marko": "*", - "prettier-plugin-organize-attributes": "*", - "prettier-plugin-organize-imports": "*", - "prettier-plugin-sort-imports": "*", - "prettier-plugin-style-order": "*", - "prettier-plugin-svelte": "*" - }, - "peerDependenciesMeta": { - "@ianvs/prettier-plugin-sort-imports": { - "optional": true - }, - "@prettier/plugin-pug": { - "optional": true - }, - "@shopify/prettier-plugin-liquid": { - "optional": true - }, - "@trivago/prettier-plugin-sort-imports": { - "optional": true - }, - "@zackad/prettier-plugin-twig-melody": { - "optional": true - }, - "prettier-plugin-astro": { - "optional": true - }, - "prettier-plugin-css-order": { - "optional": true - }, - "prettier-plugin-import-sort": { - "optional": true - }, - "prettier-plugin-jsdoc": { - "optional": true - }, - "prettier-plugin-marko": { - "optional": true - }, - "prettier-plugin-organize-attributes": { - "optional": true - }, - "prettier-plugin-organize-imports": { - "optional": true - }, - "prettier-plugin-sort-imports": { - "optional": true - }, - "prettier-plugin-style-order": { - "optional": true - }, - "prettier-plugin-svelte": { - "optional": true - } - } - }, - "node_modules/prop-types": { - "version": "15.8.1", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.4.0", - "object-assign": "^4.1.1", - "react-is": "^16.13.1" - } - }, - "node_modules/punycode": { - "version": "2.3.0", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/react": { - "version": "18.2.0", - "license": "MIT", - "peer": true, - "dependencies": { - "loose-envify": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "18.2.0", - "license": "MIT", - "peer": true, - "dependencies": { - "loose-envify": "^1.1.0", - "scheduler": "^0.23.0" - }, - "peerDependencies": { - "react": "^18.2.0" - } - }, - "node_modules/react-is": { - "version": "16.13.1", - "license": "MIT" - }, - "node_modules/read-cache": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "pify": "^2.3.0" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/reflect.getprototypeof": { - "version": "1.0.4", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", - "globalthis": "^1.0.3", - "which-builtin-type": "^1.1.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/regenerator-runtime": { - "version": "0.14.0", - "license": "MIT" - }, - "node_modules/regexp.prototype.flags": { - "version": "1.5.0", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "functions-have-names": "^1.2.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve": { - "version": "1.22.4", - "license": "MIT", - "dependencies": { - "is-core-module": "^2.13.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/resolve-pkg-maps": { - "version": "1.0.0", - "license": "MIT", - "funding": { - "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "3.0.2", - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/safe-array-concat": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "has-symbols": "^1.0.3", - "isarray": "^2.0.5" - }, - "engines": { - "node": ">=0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safe-regex-test": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/scheduler": { - "version": "0.23.0", - "license": "MIT", - "dependencies": { - "loose-envify": "^1.1.0" - } - }, - "node_modules/semver": { - "version": "6.3.1", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/slash": { - "version": "3.0.0", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/source-map-js": { - "version": "1.0.2", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/streamsearch": { - "version": "1.1.0", - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/string.prototype.matchall": { - "version": "4.0.9", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "regexp.prototype.flags": "^1.5.0", - "side-channel": "^1.0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trim": { - "version": "1.2.7", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimend": { - "version": "1.0.6", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/string.prototype.trimstart": { - "version": "1.0.6", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-bom": { - "version": "3.0.0", - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/styled-jsx": { - "version": "5.1.1", - "license": "MIT", - "dependencies": { - "client-only": "0.0.1" - }, - "engines": { - "node": ">= 12.0.0" - }, - "peerDependencies": { - "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" - }, - "peerDependenciesMeta": { - "@babel/core": { - "optional": true - }, - "babel-plugin-macros": { - "optional": true - } - } - }, - "node_modules/sucrase": { - "version": "3.34.0", - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.2", - "commander": "^4.0.0", - "glob": "7.1.6", - "lines-and-columns": "^1.1.6", - "mz": "^2.7.0", - "pirates": "^4.0.1", - "ts-interface-checker": "^0.1.9" - }, - "bin": { - "sucrase": "bin/sucrase", - "sucrase-node": "bin/sucrase-node" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/sucrase/node_modules/glob": { - "version": "7.1.6", - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/tailwindcss": { - "version": "3.3.3", - "license": "MIT", - "peer": true, - "dependencies": { - "@alloc/quick-lru": "^5.2.0", - "arg": "^5.0.2", - "chokidar": "^3.5.3", - "didyoumean": "^1.2.2", - "dlv": "^1.1.3", - "fast-glob": "^3.2.12", - "glob-parent": "^6.0.2", - "is-glob": "^4.0.3", - "jiti": "^1.18.2", - "lilconfig": "^2.1.0", - "micromatch": "^4.0.5", - "normalize-path": "^3.0.0", - "object-hash": "^3.0.0", - "picocolors": "^1.0.0", - "postcss": "^8.4.23", - "postcss-import": "^15.1.0", - "postcss-js": "^4.0.1", - "postcss-load-config": "^4.0.1", - "postcss-nested": "^6.0.1", - "postcss-selector-parser": "^6.0.11", - "resolve": "^1.22.2", - "sucrase": "^3.32.0" - }, - "bin": { - "tailwind": "lib/cli.js", - "tailwindcss": "lib/cli.js" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/tapable": { - "version": "2.2.1", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/text-table": { - "version": "0.2.0", - "license": "MIT" - }, - "node_modules/thenify": { - "version": "3.3.1", - "license": "MIT", - "dependencies": { - "any-promise": "^1.0.0" - } - }, - "node_modules/thenify-all": { - "version": "1.6.0", - "license": "MIT", - "dependencies": { - "thenify": ">= 3.1.0 < 4" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/ts-api-utils": { - "version": "1.0.2", - "license": "MIT", - "engines": { - "node": ">=16.13.0" - }, - "peerDependencies": { - "typescript": ">=4.2.0" - } - }, - "node_modules/ts-interface-checker": { - "version": "0.1.13", - "license": "Apache-2.0" - }, - "node_modules/tsconfig-paths": { - "version": "3.14.2", - "license": "MIT", - "dependencies": { - "@types/json5": "^0.0.29", - "json5": "^1.0.2", - "minimist": "^1.2.6", - "strip-bom": "^3.0.0" - } - }, - "node_modules/tslib": { - "version": "2.6.2", - "license": "0BSD" - }, - "node_modules/type-check": { - "version": "0.4.0", - "license": "MIT", - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-fest": { - "version": "0.20.2", - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typed-array-buffer": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/typed-array-byte-length": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typed-array-byte-offset": { - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typed-array-length": { - "version": "1.0.4", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/typescript": { - "version": "5.2.2", - "license": "Apache-2.0", - "peer": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/unbox-primitive": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.0.11", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "license": "MIT" - }, - "node_modules/watchpack": { - "version": "2.4.0", - "license": "MIT", - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/which": { - "version": "2.0.2", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/which-boxed-primitive": { - "version": "1.0.2", - "license": "MIT", - "dependencies": { - "is-bigint": "^1.0.1", - "is-boolean-object": "^1.1.0", - "is-number-object": "^1.0.4", - "is-string": "^1.0.5", - "is-symbol": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-builtin-type": { - "version": "1.1.3", - "license": "MIT", - "dependencies": { - "function.prototype.name": "^1.1.5", - "has-tostringtag": "^1.0.0", - "is-async-function": "^2.0.0", - "is-date-object": "^1.0.5", - "is-finalizationregistry": "^1.0.2", - "is-generator-function": "^1.0.10", - "is-regex": "^1.1.4", - "is-weakref": "^1.0.2", - "isarray": "^2.0.5", - "which-boxed-primitive": "^1.0.2", - "which-collection": "^1.0.1", - "which-typed-array": "^1.1.9" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-collection": { - "version": "1.0.1", - "license": "MIT", - "dependencies": { - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-weakmap": "^2.0.1", - "is-weakset": "^2.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-typed-array": { - "version": "1.1.11", - "license": "MIT", - "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "license": "ISC" - }, - "node_modules/yallist": { - "version": "4.0.0", - "license": "ISC" - }, - "node_modules/yaml": { - "version": "2.3.2", - "license": "ISC", - "engines": { - "node": ">= 14" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/zod": { - "version": "3.21.4", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - } - } -} diff --git a/site/package.json b/site/package.json deleted file mode 100644 index 4fcacc8..0000000 --- a/site/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "burrow", - "version": "0.1.0", - "private": true, - "scripts": { - "dev": "next dev", - "build": "next build", - "start": "next start", - "lint": "next lint" - }, - "dependencies": { - "@fortawesome/fontawesome-free": "^6.4.2", - "@fortawesome/fontawesome-svg-core": "^6.4.2", - "@fortawesome/free-brands-svg-icons": "^6.4.2", - "@fortawesome/free-solid-svg-icons": "^6.4.2", - "@fortawesome/react-fontawesome": "^0.2.0", - "@headlessui/react": "^1.7.17", - "@headlessui/tailwindcss": "^0.2.0", - "@types/node": "20.5.8", - "@types/react": "18.2.21", - "@types/react-dom": "18.2.7", - "autoprefixer": "10.4.15", - "eslint": "8.48.0", - "eslint-config-next": "13.4.19", - "next": "13.4.19", - "postcss": "8.4.29", - "react": "18.2.0", - "react-dom": "18.2.0", - "tailwindcss": "3.3.3", - "typescript": "5.2.2" - }, - "devDependencies": { - "prettier": "^3.0.3", - "prettier-plugin-tailwindcss": "^0.5.4" - } -} diff --git a/site/pages/_app.tsx b/site/pages/_app.tsx deleted file mode 100644 index c0572f6..0000000 --- a/site/pages/_app.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import Layout from "@/layout/layout"; -import type { AppProps } from "next/app"; -import { config } from "@fortawesome/fontawesome-svg-core"; -import "@fortawesome/fontawesome-svg-core/styles.css"; -config.autoAddCss = false; -import "static/globals.css"; - -export default function App({ Component, pageProps }: AppProps) { - return ( - - - - ); -} diff --git a/site/pages/_document.tsx b/site/pages/_document.tsx deleted file mode 100644 index ce4b5e1..0000000 --- a/site/pages/_document.tsx +++ /dev/null @@ -1,13 +0,0 @@ -import { Html, Head, Main, NextScript } from "next/document"; - -export default function Document() { - return ( - - - -
- - - - ); -} diff --git a/site/pages/index.tsx b/site/pages/index.tsx deleted file mode 100644 index 20d7f1b..0000000 --- a/site/pages/index.tsx +++ /dev/null @@ -1,164 +0,0 @@ -import Head from "next/head"; -import { Menu, Transition } from "@headlessui/react"; -import { useState, useRef, useEffect } from "react"; - -function ChevronIcon({ open }: { open: boolean }) { - return ( - - ); -} - -function ExternalLinkIcon() { - return ( - - ); -} - -function GithubIcon() { - return ( - - ); -} - -export default function Page() { - const [chevron, setChevron] = useState(false); - const menuButtonRef = useRef(null); - const toggleDropdown = () => { - setChevron(!chevron); - }; - const handleClickOutside = (event: MouseEvent) => { - if ( - menuButtonRef.current && - !menuButtonRef.current.contains(event.target as Node) - ) { - setChevron(false); - } - }; - useEffect(() => { - document.addEventListener("click", handleClickOutside); - - return () => { - document.removeEventListener("click", handleClickOutside); - }; - }, []); - return ( - <> - - Burrow - - - -
-
-

- Burrow Through{" "} - Firewalls -

-
-

- Burrow is an open source tool for burrowing through firewalls, - built by teenagers at{" "} - - - Hack Club. - - {" "} - - burrow - {" "} - is a Rust-based VPN for getting around restrictive Internet - censors. -

-
-
-
- -
- toggleDropdown()} - ref={menuButtonRef} - className="w-50 h-12 rounded-2xl bg-hackClubRed px-3 font-SpaceMono hover:scale-105 md:h-12 md:w-auto md:rounded-3xl md:text-xl 2xl:h-16 2xl:text-2xl " - > - Install for Linux - - -
- - -
- - {({ active }) => ( - - Install for Windows - - )} - - - - Install for MacOS - - -
-
-
-
- - - -
- -
- {/* Footer */} - {/* */} -
-
- - ); -} diff --git a/site/postcss.config.js b/site/postcss.config.js deleted file mode 100644 index 12a703d..0000000 --- a/site/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -}; diff --git a/site/prettier.config.js b/site/prettier.config.js deleted file mode 100644 index d573118..0000000 --- a/site/prettier.config.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - plugins: ["prettier-plugin-tailwindcss"], -}; diff --git a/site/public/.well-known/apple-app-site-association b/site/public/.well-known/apple-app-site-association deleted file mode 100644 index 63262fb..0000000 --- a/site/public/.well-known/apple-app-site-association +++ /dev/null @@ -1,21 +0,0 @@ -{ - "applinks": { - "details": [ - { - "appIDs": [ - "P6PV2R9443.com.hackclub.burrow" - ], - "components": [ - { - "/": "/callback/*" - } - ] - } - ] - }, - "webcredentials": { - "apps": [ - "P6PV2R9443.com.hackclub.burrow" - ] - } -} diff --git a/site/public/hackclub.svg b/site/public/hackclub.svg deleted file mode 100644 index 38c2a68..0000000 --- a/site/public/hackclub.svg +++ /dev/null @@ -1,66 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/site/static/globals.css b/site/static/globals.css deleted file mode 100644 index b5c61c9..0000000 --- a/site/static/globals.css +++ /dev/null @@ -1,3 +0,0 @@ -@tailwind base; -@tailwind components; -@tailwind utilities; diff --git a/site/tailwind.config.ts b/site/tailwind.config.ts deleted file mode 100644 index 3df6f5a..0000000 --- a/site/tailwind.config.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type { Config } from "tailwindcss"; - -const config: Config = { - content: [ - "./pages/**/*.{js,ts,jsx,tsx,mdx}", - "./components/**/*.{js,ts,jsx,tsx,mdx}", - "./app/**/*.{js,ts,jsx,tsx,mdx}", - ], - theme: { - extend: { - colors: { - backgroundBlack: "#17171D", - hackClubRed: "#EC3750", - hackClubBlueShade: "#32323D", - hackClubBlue: "#338EDA", - burrowStroke: "#595959", - burrowHover: "#3D3D3D", - }, - fontFamily: { - SpaceMono: ["var(--font-space-mono)"], - Poppins: ["var(--font-poppins)"], - PhantomSans: ["var(--font-phantom-sans)"], - }, - }, - }, - plugins: [require("@headlessui/tailwindcss")({ prefix: "ui" })], -}; -export default config; diff --git a/site/tsconfig.json b/site/tsconfig.json deleted file mode 100644 index c714696..0000000 --- a/site/tsconfig.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "strict": true, - "noEmit": true, - "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "bundler", - "resolveJsonModule": true, - "isolatedModules": true, - "jsx": "preserve", - "incremental": true, - "plugins": [ - { - "name": "next" - } - ], - "paths": { - "@/*": ["./*"] - } - }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], - "exclude": ["node_modules"] -} diff --git a/tun/Cargo.toml b/tun/Cargo.toml index 1b07833..7413f65 100644 --- a/tun/Cargo.toml +++ b/tun/Cargo.toml @@ -8,7 +8,7 @@ libc = "0.2" fehler = "1.0" nix = { version = "0.26", features = ["ioctl"] } socket2 = "0.5" -tokio = { version = "1.37", default-features = false, optional = true } +tokio = { version = "1.28", features = [] } byteorder = "1.4" tracing = "0.1" log = "0.4" @@ -19,7 +19,10 @@ futures = { version = "0.3.28", optional = true } [features] serde = ["dep:serde", "dep:schemars"] -tokio = ["tokio/net", "dep:tokio", "dep:futures"] +tokio = ["tokio/net", "dep:futures"] + +[target.'cfg(feature = "tokio")'.dev-dependencies] +tokio = { features = ["rt", "macros"] } [target.'cfg(windows)'.dependencies] lazy_static = "1.4" @@ -34,7 +37,7 @@ windows = { version = "0.48", features = [ [target.'cfg(windows)'.build-dependencies] anyhow = "1.0" bindgen = "0.65" -reqwest = { version = "0.11" } +reqwest = { version = "0.11", features = ["native-tls"] } ssri = { version = "9.0", default-features = false } tokio = { version = "1.28", features = ["rt", "macros"] } zip = { version = "0.6", features = ["deflate"] } diff --git a/tun/build.rs b/tun/build.rs index 03ee131..8da8a40 100644 --- a/tun/build.rs +++ b/tun/build.rs @@ -26,7 +26,7 @@ async fn generate(out_dir: &std::path::Path) -> anyhow::Result<()> { println!("cargo:rerun-if-changed={}", binary_path.to_str().unwrap()); if let (Ok(..), Ok(..)) = (File::open(&bindings_path), File::open(&binary_path)) { - return Ok(()); + return Ok(()) }; let archive = download(out_dir) diff --git a/tun/src/options.rs b/tun/src/options.rs index bb364e5..e21bf5f 100644 --- a/tun/src/options.rs +++ b/tun/src/options.rs @@ -1,7 +1,5 @@ -#[cfg(all(any(target_os = "linux", target_vendor = "apple"), feature = "tokio"))] use std::io::Error; -#[cfg(all(any(target_os = "linux", target_vendor = "apple"), feature = "tokio"))] use fehler::throws; #[cfg(any(target_os = "linux", target_vendor = "apple"))] diff --git a/tun/src/tokio/mod.rs b/tun/src/tokio/mod.rs index f56f3d2..bd27109 100644 --- a/tun/src/tokio/mod.rs +++ b/tun/src/tokio/mod.rs @@ -33,7 +33,7 @@ impl TunInterface { Ok(result) => return result, Err(_would_block) => { tracing::debug!("WouldBlock"); - continue; + continue } } } diff --git a/tun/src/unix/address.rs b/tun/src/unix/address.rs deleted file mode 100644 index dc84e96..0000000 --- a/tun/src/unix/address.rs +++ /dev/null @@ -1,120 +0,0 @@ -use std::io::{Error, ErrorKind}; -use std::net::IpAddr; - -use fehler::throws; - -#[throws] -pub(crate) fn ensure_valid_ipv6_prefix(prefix_len: u8) { - if prefix_len > 128 { - Err(Error::new( - ErrorKind::InvalidInput, - "IPv6 prefix length must be between 0 and 128", - ))?; - } -} - -#[cfg_attr(not(any(test, target_vendor = "apple")), allow(dead_code))] -#[throws] -pub(crate) fn ipv6_prefix_octets(prefix_len: u8) -> [u8; 16] { - ensure_valid_ipv6_prefix(prefix_len)?; - - let mut octets = [0u8; 16]; - for bit in 0..prefix_len { - let idx = (bit / 8) as usize; - let offset = (bit % 8) as u8; - octets[idx] |= 0x80 >> offset; - } - - octets -} - -#[cfg_attr(not(any(test, target_vendor = "apple")), allow(dead_code))] -pub(crate) fn parse_addr_spec(spec: &str) -> Result)>, Error> { - let (addr_str, prefix) = match spec.split_once('/') { - Some((addr, prefix)) => (addr, Some(prefix)), - None => (spec, None), - }; - - let addr: IpAddr = match addr_str.parse() { - Ok(addr) => addr, - Err(_) => return Ok(None), - }; - - let prefix_len = if let Some(prefix) = prefix { - let parsed = prefix - .parse::() - .map_err(|_| Error::new(ErrorKind::InvalidInput, "Invalid prefix length"))?; - ensure_valid_ipv6_prefix(parsed)?; - Some(parsed) - } else { - None - }; - - Ok(Some((addr, prefix_len))) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; - - #[test] - fn parse_ipv4_without_prefix() { - let parsed = parse_addr_spec("192.0.2.1").expect("parse succeeds"); - assert_eq!( - parsed, - Some((IpAddr::V4(Ipv4Addr::new(192, 0, 2, 1)), None)) - ); - } - - #[test] - fn parse_ipv6_with_prefix() { - let parsed = parse_addr_spec("2001:db8::1/64").expect("parse succeeds"); - assert_eq!( - parsed, - Some(( - IpAddr::V6("2001:db8::1".parse::().unwrap()), - Some(64), - )) - ); - } - - #[test] - fn parse_invalid_addr_returns_none() { - assert_eq!(parse_addr_spec("not-an-ip").unwrap(), None); - } - - #[test] - fn parse_invalid_prefix_string_errors() { - assert!(parse_addr_spec("::1/not-a-number").is_err()); - } - - #[test] - fn parse_prefix_out_of_range_errors() { - assert!(parse_addr_spec("::1/129").is_err()); - } - - #[test] - fn ensure_valid_ipv6_prefix_accepts_bounds() { - ensure_valid_ipv6_prefix(0).expect("zero prefix is allowed"); - ensure_valid_ipv6_prefix(128).expect("max prefix is allowed"); - } - - #[test] - fn ensure_valid_ipv6_prefix_rejects_invalid() { - assert!(ensure_valid_ipv6_prefix(129).is_err()); - } - - #[test] - fn ipv6_prefix_octets_zero_prefix() { - assert_eq!(ipv6_prefix_octets(0).unwrap(), [0u8; 16]); - } - - #[test] - fn ipv6_prefix_octets_sets_bits_correctly() { - let mask = ipv6_prefix_octets(65).unwrap(); - assert_eq!(mask[0..8], [0xFF; 8]); - assert_eq!(mask[8], 0x80); - assert_eq!(mask[9..], [0u8; 7]); - } -} diff --git a/tun/src/unix/apple/mod.rs b/tun/src/unix/apple/mod.rs index 66a2f15..6e859ca 100644 --- a/tun/src/unix/apple/mod.rs +++ b/tun/src/unix/apple/mod.rs @@ -1,14 +1,11 @@ -use std::{ - ffi::CStr, - io::{Error, ErrorKind, IoSlice}, - mem, - net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddrV4, SocketAddrV6}, - os::fd::{AsRawFd, FromRawFd, RawFd}, -}; +use std::{io::{Error, IoSlice}, mem, net::{Ipv4Addr, SocketAddrV4}, os::fd::{AsRawFd, FromRawFd, RawFd}, ptr}; +use std::net::{IpAddr, Ipv6Addr, SocketAddrV6}; +use std::ptr::addr_of; use byteorder::{ByteOrder, NetworkEndian}; use fehler::throws; -use libc::{c_char, iovec, writev, AF_INET, AF_INET6}; +use libc::{c_char, iovec, writev, AF_INET, AF_INET6, sockaddr_in6}; +use nix::sys::socket::SockaddrIn6; use socket2::{Domain, SockAddr, Socket, Type}; use tracing::{self, instrument}; @@ -17,7 +14,6 @@ pub mod sys; use kern_control::SysControlSocket; -use super::address::{ensure_valid_ipv6_prefix, ipv6_prefix_octets, parse_addr_spec}; use super::{ifname_to_string, string_to_ifname}; use crate::TunOptions; @@ -73,11 +69,11 @@ impl TunInterface { #[throws] fn configure(&self, options: TunOptions) { - for spec in options.address { - if let Some((addr, prefix_len)) = parse_addr_spec(&spec)? { + for addr in options.address{ + if let Ok(addr) = addr.parse::() { match addr { - IpAddr::V4(addr) => self.set_ipv4_addr(addr)?, - IpAddr::V6(addr) => self.add_ipv6_addr(addr, prefix_len.unwrap_or(128))?, + IpAddr::V4(addr) => {self.set_ipv4_addr(addr)?} + IpAddr::V6(addr) => {self.set_ipv6_addr(addr)?} } } } @@ -114,10 +110,6 @@ impl TunInterface { ifname_to_string(buf) } - pub(crate) fn packet_information_size(&self) -> usize { - 4 - } - #[throws] #[instrument] fn ifreq(&self) -> sys::ifreq { @@ -154,78 +146,18 @@ impl TunInterface { } #[throws] - #[instrument] - pub fn add_ipv6_addr(&self, addr: Ipv6Addr, prefix_len: u8) { - ensure_valid_ipv6_prefix(prefix_len)?; - - let mut req: sys::in6_aliasreq = unsafe { mem::zeroed() }; - req.ifra_name = string_to_ifname(&self.name()?); - req.ifra_addr = ipv6_to_sockaddr(addr); - req.ifra_prefixmask = ipv6_prefix_mask(prefix_len)?; - self.perform6(|fd| unsafe { sys::if_add_addr6(fd, &req) })?; - tracing::info!( - "ipv6_addr_added: {:?}/{} (fd: {:?})", - addr, - prefix_len, - self.as_raw_fd() - ); - } - - #[throws] - #[instrument] - pub fn remove_ipv6_addr(&self, addr: Ipv6Addr, prefix_len: u8) { - ensure_valid_ipv6_prefix(prefix_len)?; - - let mut iff = self.in6_ifreq()?; - iff.ifr_ifru.ifru_addr = ipv6_to_sockaddr(addr); - iff.ifr_ifru.ifru_prefixmask = ipv6_prefix_mask(prefix_len)?; - self.perform6(|fd| unsafe { sys::if_del_addr6(fd, &iff) })?; - tracing::info!( - "ipv6_addr_removed: {:?}/{} (fd: {:?})", - addr, - prefix_len, - self.as_raw_fd() - ); - } - - #[throws] - #[instrument] - pub fn ipv6_addrs(&self) -> Vec { - struct IfAddrs(*mut libc::ifaddrs); - - impl Drop for IfAddrs { - fn drop(&mut self) { - if !self.0.is_null() { - unsafe { libc::freeifaddrs(self.0) }; - } - } - } - - let mut ifaddrs = std::ptr::null_mut(); - unsafe { - if libc::getifaddrs(&mut ifaddrs) != 0 { - Err(Error::last_os_error())?; - } - } - - let guard = IfAddrs(ifaddrs); - let interface_name = self.name()?; - let mut cursor = guard.0; - let mut result = Vec::new(); - - while let Some(ifa) = unsafe { cursor.as_ref() } { - if !ifa.ifa_addr.is_null() - && unsafe { (*ifa.ifa_addr).sa_family as i32 } == AF_INET6 - && unsafe { CStr::from_ptr(ifa.ifa_name) }.to_string_lossy() == interface_name - { - let sockaddr = unsafe { *(ifa.ifa_addr as *const libc::sockaddr_in6) }; - result.push(Ipv6Addr::from(in6_addr_octets(sockaddr.sin6_addr))); - } - - cursor = ifa.ifa_next; - } - - result + pub fn set_ipv6_addr(&self, addr: Ipv6Addr) { + // let addr = SockAddr::from(SocketAddrV6::new(addr, 0, 0, 0)); + // println!("addr: {:?}", addr); + // let mut iff = self.in6_ifreq()?; + // let sto = addr.as_storage(); + // let ifadddr_ptr: *const sockaddr_in6 = addr_of!(sto).cast(); + // iff.ifr_ifru.ifru_addr = unsafe { *ifadddr_ptr }; + // println!("ifru addr set"); + // println!("{:?}", sys::SIOCSIFADDR_IN6); + // self.perform6(|fd| unsafe { sys::if_set_addr6(fd, &iff) })?; + // tracing::info!("ipv6_addr_set"); + tracing::warn!("Setting IPV6 address on MacOS CLI mode is not supported yet."); } #[throws] @@ -294,6 +226,7 @@ impl TunInterface { #[throws] #[instrument] pub fn send(&self, buf: &[u8]) -> usize { + use std::io::ErrorKind; let proto = match buf[0] >> 4 { 6 => Ok(AF_INET6), 4 => Ok(AF_INET), @@ -315,19 +248,3 @@ impl TunInterface { .map_err(|_| Error::new(ErrorKind::Other, "Conversion error"))? } } - -#[inline] -fn in6_addr_octets(addr: libc::in6_addr) -> [u8; 16] { - addr.s6_addr -} - -fn ipv6_to_sockaddr(addr: Ipv6Addr) -> libc::sockaddr_in6 { - let sockaddr = SockAddr::from(SocketAddrV6::new(addr, 0, 0, 0)); - unsafe { *(sockaddr.as_ptr() as *const libc::sockaddr_in6) } -} - -#[throws] -fn ipv6_prefix_mask(prefix_len: u8) -> libc::sockaddr_in6 { - let octets = ipv6_prefix_octets(prefix_len)?; - ipv6_to_sockaddr(Ipv6Addr::from(octets)) -} diff --git a/tun/src/unix/apple/sys.rs b/tun/src/unix/apple/sys.rs index 282ee34..d48d6ee 100644 --- a/tun/src/unix/apple/sys.rs +++ b/tun/src/unix/apple/sys.rs @@ -2,11 +2,20 @@ use std::mem; use libc::{c_char, c_int, c_short, c_uint, c_ulong, sockaddr, sockaddr_in6, time_t}; pub use libc::{ - c_void, sockaddr_ctl, sockaddr_in, socklen_t, AF_SYSTEM, AF_SYS_CONTROL, IFNAMSIZ, + c_void, + sockaddr_ctl, + sockaddr_in, + socklen_t, + AF_SYSTEM, + AF_SYS_CONTROL, + IFNAMSIZ, SYSPROTO_CONTROL, }; use nix::{ - ioctl_read_bad, ioctl_readwrite, ioctl_write_ptr_bad, request_code_readwrite, + ioctl_read_bad, + ioctl_readwrite, + ioctl_write_ptr_bad, + request_code_readwrite, request_code_write, }; @@ -68,7 +77,7 @@ pub struct ifreq { #[repr(C)] #[derive(Copy, Clone, Debug)] -pub struct in6_addrlifetime { +pub struct in6_addrlifetime{ pub ia6t_expire: time_t, pub ia6t_preferred: time_t, pub ia6t_vltime: u32, @@ -148,7 +157,6 @@ pub struct icmp6_ifstat { pub union ifr_ifru6 { pub ifru_addr: sockaddr_in6, pub ifru_dstaddr: sockaddr_in6, - pub ifru_prefixmask: sockaddr_in6, pub ifru_flags: c_int, pub ifru_flags6: c_int, pub ifru_metric: c_int, @@ -157,7 +165,7 @@ pub union ifr_ifru6 { pub ifru_lifetime: in6_addrlifetime, // ifru_lifetime pub ifru_stat: in6_ifstat, pub ifru_icmp6stat: icmp6_ifstat, - pub ifru_scope_id: [u32; SCOPE6_ID_MAX], + pub ifru_scope_id: [u32; SCOPE6_ID_MAX] } #[repr(C)] @@ -166,21 +174,8 @@ pub struct in6_ifreq { pub ifr_ifru: ifr_ifru6, } -#[repr(C)] -#[derive(Copy, Clone, Debug)] -pub struct in6_aliasreq { - pub ifra_name: [c_char; IFNAMSIZ], - pub ifra_addr: sockaddr_in6, - pub ifra_dstaddr: sockaddr_in6, - pub ifra_prefixmask: sockaddr_in6, - pub ifra_lifetime: in6_addrlifetime, - pub ifra_flags: c_int, -} - pub const SIOCSIFADDR: c_ulong = request_code_write!(b'i', 12, mem::size_of::()); pub const SIOCSIFADDR_IN6: c_ulong = request_code_write!(b'i', 12, mem::size_of::()); -pub const SIOCAIFADDR_IN6: c_ulong = request_code_write!(b'i', 30, mem::size_of::()); -pub const SIOCDIFADDR_IN6: c_ulong = request_code_write!(b'i', 25, mem::size_of::()); pub const SIOCGIFMTU: c_ulong = request_code_readwrite!(b'i', 51, mem::size_of::()); pub const SIOCSIFMTU: c_ulong = request_code_write!(b'i', 52, mem::size_of::()); pub const SIOCGIFNETMASK: c_ulong = request_code_readwrite!(b'i', 37, mem::size_of::()); @@ -203,7 +198,6 @@ ioctl_read_bad!(if_get_addr, libc::SIOCGIFADDR, ifreq); ioctl_read_bad!(if_get_mtu, SIOCGIFMTU, ifreq); ioctl_read_bad!(if_get_netmask, SIOCGIFNETMASK, ifreq); ioctl_write_ptr_bad!(if_set_addr, SIOCSIFADDR, ifreq); -ioctl_write_ptr_bad!(if_add_addr6, SIOCAIFADDR_IN6, in6_aliasreq); -ioctl_write_ptr_bad!(if_del_addr6, SIOCDIFADDR_IN6, in6_ifreq); +ioctl_write_ptr_bad!(if_set_addr6, SIOCSIFADDR_IN6, in6_ifreq); ioctl_write_ptr_bad!(if_set_mtu, SIOCSIFMTU, ifreq); ioctl_write_ptr_bad!(if_set_netmask, SIOCSIFNETMASK, ifreq); diff --git a/tun/src/unix/linux/mod.rs b/tun/src/unix/linux/mod.rs index 9fc963a..60d6341 100644 --- a/tun/src/unix/linux/mod.rs +++ b/tun/src/unix/linux/mod.rs @@ -1,7 +1,6 @@ use std::{ - ffi::CStr, fs::OpenOptions, - io::Error, + io::{Error, Write}, mem, net::{Ipv4Addr, Ipv6Addr, SocketAddrV4}, os::{ @@ -11,11 +10,10 @@ use std::{ }; use fehler::throws; -use libc::{in6_ifreq, AF_INET6}; +use libc::in6_ifreq; use socket2::{Domain, SockAddr, Socket, Type}; use tracing::{info, instrument}; -use super::address::ensure_valid_ipv6_prefix; use super::{ifname_to_string, string_to_ifname}; use crate::TunOptions; @@ -73,21 +71,6 @@ impl TunInterface { ifname_to_string(iff.ifr_name) } - pub(crate) fn packet_information_size(&self) -> usize { - let mut iff = unsafe { mem::zeroed::() }; - match unsafe { sys::tun_get_iff(self.socket.as_raw_fd(), &mut iff) } { - Ok(_) => { - let flags = unsafe { iff.ifr_ifru.ifru_flags }; - if flags & libc::IFF_NO_PI as i16 != 0 { - 0 - } else { - 4 - } - } - Err(_) => 4, - } - } - #[throws] #[instrument] fn ifreq(&self) -> sys::ifreq { @@ -157,76 +140,11 @@ impl TunInterface { #[throws] #[instrument] - pub fn add_ipv6_addr(&self, addr: Ipv6Addr, prefix_len: u8) { - ensure_valid_ipv6_prefix(prefix_len)?; - + pub fn set_ipv6_addr(&self, addr: Ipv6Addr) { let mut iff = self.in6_ifreq()?; iff.ifr6_addr.s6_addr = addr.octets(); - iff.ifr6_prefixlen = prefix_len.into(); - self.perform6(|fd| unsafe { sys::if_add_addr6(fd, &iff) })?; - info!( - "ipv6_addr_added: {:?}/{} (fd: {:?})", - addr, - prefix_len, - self.as_raw_fd() - ) - } - - #[throws] - #[instrument] - pub fn remove_ipv6_addr(&self, addr: Ipv6Addr, prefix_len: u8) { - ensure_valid_ipv6_prefix(prefix_len)?; - - let mut iff = self.in6_ifreq()?; - iff.ifr6_addr.s6_addr = addr.octets(); - iff.ifr6_prefixlen = prefix_len.into(); - self.perform6(|fd| unsafe { sys::if_del_addr6(fd, &iff) })?; - info!( - "ipv6_addr_removed: {:?}/{} (fd: {:?})", - addr, - prefix_len, - self.as_raw_fd() - ) - } - - #[throws] - #[instrument] - pub fn ipv6_addrs(&self) -> Vec { - struct IfAddrs(*mut libc::ifaddrs); - - impl Drop for IfAddrs { - fn drop(&mut self) { - if !self.0.is_null() { - unsafe { libc::freeifaddrs(self.0) }; - } - } - } - - let mut ifaddrs = std::ptr::null_mut(); - unsafe { - if libc::getifaddrs(&mut ifaddrs) != 0 { - Err(Error::last_os_error())?; - } - } - - let guard = IfAddrs(ifaddrs); - let interface_name = self.name()?; - let mut cursor = guard.0; - let mut result = Vec::new(); - - while let Some(ifa) = unsafe { cursor.as_ref() } { - if !ifa.ifa_addr.is_null() - && unsafe { (*ifa.ifa_addr).sa_family as i32 } == AF_INET6 - && unsafe { CStr::from_ptr(ifa.ifa_name) }.to_string_lossy() == interface_name - { - let sockaddr = unsafe { *(ifa.ifa_addr as *const libc::sockaddr_in6) }; - result.push(Ipv6Addr::from(sockaddr.sin6_addr.s6_addr)); - } - - cursor = ifa.ifa_next; - } - - result + self.perform6(|fd| unsafe { sys::if_set_addr6(fd, &iff) })?; + info!("ipv6_addr_set: {:?} (fd: {:?})", addr, self.as_raw_fd()) } #[throws] @@ -298,16 +216,6 @@ impl TunInterface { #[throws] #[instrument] pub fn send(&self, buf: &[u8]) -> usize { - let len = unsafe { - libc::write( - self.as_raw_fd(), - buf.as_ptr().cast::(), - buf.len(), - ) - }; - if len < 0 { - Err(Error::last_os_error())?; - } - len as usize + self.socket.send(buf)? } } diff --git a/tun/src/unix/linux/sys.rs b/tun/src/unix/linux/sys.rs index cba5554..e12c8ec 100644 --- a/tun/src/unix/linux/sys.rs +++ b/tun/src/unix/linux/sys.rs @@ -1,6 +1,6 @@ use std::mem::size_of; -pub use libc::{ifreq, sockaddr_in}; +pub use libc::{ifreq, sockaddr, sockaddr_in, sockaddr_in6}; use nix::{ioctl_read_bad, ioctl_write_ptr_bad, request_code_read, request_code_write}; ioctl_write_ptr_bad!( @@ -20,8 +20,7 @@ ioctl_read_bad!(if_get_mtu, libc::SIOCGIFMTU, libc::ifreq); ioctl_read_bad!(if_get_netmask, libc::SIOCGIFNETMASK, libc::ifreq); ioctl_write_ptr_bad!(if_set_addr, libc::SIOCSIFADDR, libc::ifreq); -ioctl_write_ptr_bad!(if_add_addr6, libc::SIOCSIFADDR, libc::in6_ifreq); -ioctl_write_ptr_bad!(if_del_addr6, libc::SIOCDIFADDR, libc::in6_ifreq); +ioctl_write_ptr_bad!(if_set_addr6, libc::SIOCSIFADDR, libc::in6_ifreq); ioctl_write_ptr_bad!(if_set_brdaddr, libc::SIOCSIFBRDADDR, libc::ifreq); ioctl_write_ptr_bad!(if_set_mtu, libc::SIOCSIFMTU, libc::ifreq); ioctl_write_ptr_bad!(if_set_netmask, libc::SIOCSIFNETMASK, libc::ifreq); diff --git a/tun/src/unix/mod.rs b/tun/src/unix/mod.rs index ad25667..ae0b77a 100644 --- a/tun/src/unix/mod.rs +++ b/tun/src/unix/mod.rs @@ -6,7 +6,6 @@ use std::{ use tracing::instrument; -mod address; mod queue; #[cfg(target_vendor = "apple")] @@ -48,26 +47,12 @@ impl TunInterface { #[throws] #[instrument] pub fn recv(&self, buf: &mut [u8]) -> usize { - let packet_information_size = self.packet_information_size(); - let mut tmp_buf = [MaybeUninit::uninit(); 1504]; - let len = unsafe { - libc::read( - self.as_raw_fd(), - tmp_buf.as_mut_ptr().cast::(), - tmp_buf.len(), - ) - }; - if len < 0 { - Err(Error::last_os_error())?; - } - let len = len as usize; - if len < packet_information_size { - return 0; - } - - let result_buf = unsafe { assume_init(&tmp_buf[packet_information_size..len]) }; - buf[..len - packet_information_size].copy_from_slice(result_buf); - len - packet_information_size + // Use IoVec to read directly into target buffer + let mut tmp_buf = [MaybeUninit::uninit(); 1500]; + let len = self.socket.recv(&mut tmp_buf)?; + let result_buf = unsafe { assume_init(&tmp_buf[4..len]) }; + buf[..len - 4].copy_from_slice(result_buf); + len - 4 } #[throws] diff --git a/tun/tests/configure.rs b/tun/tests/configure.rs index bfa56ef..e7e2c6d 100644 --- a/tun/tests/configure.rs +++ b/tun/tests/configure.rs @@ -3,33 +3,17 @@ use std::{io::Error, net::Ipv4Addr}; use fehler::throws; use tun::TunInterface; -fn open_tun() -> Result, Error> { - match TunInterface::new() { - Ok(tun) => Ok(Some(tun)), - Err(err) - if err.kind() == std::io::ErrorKind::PermissionDenied - || matches!(err.raw_os_error(), Some(1 | 13)) => - { - eprintln!("skipping tun test without tunnel privileges: {err}"); - Ok(None) - } - Err(err) => Err(err), - } -} - #[test] #[throws] fn test_create() { - let _ = open_tun()?; + TunInterface::new()?; } #[test] #[throws] #[cfg(not(any(target_os = "windows", target_vendor = "apple")))] fn test_set_get_broadcast_addr() { - let Some(tun) = open_tun()? else { - return Ok(()); - }; + let tun = TunInterface::new()?; let addr = Ipv4Addr::new(10, 0, 0, 1); tun.set_ipv4_addr(addr)?; @@ -44,9 +28,7 @@ fn test_set_get_broadcast_addr() { #[throws] #[cfg(not(target_os = "windows"))] fn test_set_get_ipv4() { - let Some(tun) = open_tun()? else { - return Ok(()); - }; + let tun = TunInterface::new()?; let addr = Ipv4Addr::new(10, 0, 0, 1); tun.set_ipv4_addr(addr)?; @@ -61,12 +43,10 @@ fn test_set_get_ipv4() { fn test_set_get_ipv6() { use std::net::Ipv6Addr; - let Some(tun) = open_tun()? else { - return Ok(()); - }; + let tun = TunInterface::new()?; let addr = Ipv6Addr::new(1, 1, 1, 1, 1, 1, 1, 1); - tun.add_ipv6_addr(addr, 128)?; + tun.set_ipv6_addr(addr)?; // let result = tun.ipv6_addr()?; // assert_eq!(addr, result); @@ -76,9 +56,7 @@ fn test_set_get_ipv6() { #[throws] #[cfg(not(target_os = "windows"))] fn test_set_get_mtu() { - let Some(interf) = open_tun()? else { - return Ok(()); - }; + let interf = TunInterface::new()?; interf.set_mtu(500)?; @@ -89,9 +67,7 @@ fn test_set_get_mtu() { #[throws] #[cfg(not(target_os = "windows"))] fn test_set_get_netmask() { - let Some(interf) = open_tun()? else { - return Ok(()); - }; + let interf = TunInterface::new()?; let netmask = Ipv4Addr::new(255, 0, 0, 0); let addr = Ipv4Addr::new(192, 168, 1, 1); diff --git a/tun/tests/packets.rs b/tun/tests/packets.rs index b9607b3..80c078b 100644 --- a/tun/tests/packets.rs +++ b/tun/tests/packets.rs @@ -1,5 +1,5 @@ -use std::net::Ipv6Addr; use std::{io::Error, net::Ipv4Addr}; +use std::net::Ipv6Addr; use fehler::throws; use tun::TunInterface; @@ -44,5 +44,5 @@ fn set_ipv6() { println!("tun name: {:?}", tun.name()?); let targ_addr: Ipv6Addr = "::1".parse().unwrap(); println!("v6 addr: {:?}", targ_addr); - tun.add_ipv6_addr(targ_addr, 128)?; -} + tun.set_ipv6_addr(targ_addr)?; +} \ No newline at end of file diff --git a/tun/tests/tokio.rs b/tun/tests/tokio.rs index 3b89777..f7cb273 100644 --- a/tun/tests/tokio.rs +++ b/tun/tests/tokio.rs @@ -1,27 +1,9 @@ -#[cfg(all(feature = "tokio", not(target_os = "windows")))] use std::net::Ipv4Addr; -#[cfg(all(feature = "tokio", not(target_os = "windows")))] -fn open_tun() -> Option { - match tun::TunInterface::new() { - Ok(tun) => Some(tun), - Err(err) - if err.kind() == std::io::ErrorKind::PermissionDenied - || matches!(err.raw_os_error(), Some(1 | 13)) => - { - eprintln!("skipping tokio tun test without tunnel privileges: {err}"); - None - } - Err(err) => panic!("failed to create tun interface: {err}"), - } -} - #[tokio::test] #[cfg(all(feature = "tokio", not(target_os = "windows")))] async fn test_create() { - let Some(tun) = open_tun() else { - return; - }; + let tun = tun::TunInterface::new().unwrap(); let _ = tun::tokio::TunInterface::new(tun).unwrap(); } @@ -29,9 +11,7 @@ async fn test_create() { #[ignore = "requires interactivity"] #[cfg(all(feature = "tokio", not(target_os = "windows")))] async fn test_write() { - let Some(tun) = open_tun() else { - return; - }; + let tun = tun::TunInterface::new().unwrap(); tun.set_ipv4_addr(Ipv4Addr::from([192, 168, 1, 10])) .unwrap(); let async_tun = tun::tokio::TunInterface::new(tun).unwrap();