diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..5592118f --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[alias] +xtask = "run -p xtask --" diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 725fe21a..f031fb7e 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -10,7 +10,6 @@ body: Before submitting, please search existing issues to avoid duplicates. If a similar issue exists, comment there instead of opening a new one. To help us resolve the issue efficiently, please provide the necessary details below. - - type: textarea id: bug-description attributes: @@ -19,7 +18,6 @@ body: placeholder: Provide a clear and concise description of the problem, including what you expected to happen and what actually occurred. validations: required: true - - type: textarea id: environment attributes: diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index f5101ed8..b88054bd 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -9,7 +9,6 @@ body: A clear and concise description of what the problem is. validations: required: true - - type: textarea attributes: label: Describe the solution you'd like @@ -17,7 +16,6 @@ body: A clear and concise description of what you'd like to happen. validations: required: true - - type: textarea attributes: label: Additional context diff --git a/.github/actions/rust-toolchain-yq/action.yml b/.github/actions/rust-toolchain-yq/action.yml new file mode 100644 index 00000000..b3197f9c --- /dev/null +++ b/.github/actions/rust-toolchain-yq/action.yml @@ -0,0 +1,82 @@ +name: "Rust toolchain (from rust-toolchain.toml)" +description: "Installs yq, reads rust-toolchain.toml, and installs that Rust toolchain via dtolnay/rust-toolchain." +inputs: + targets: + description: "Comma-separated Rust target triples to install" + required: false + default: "" + components: + description: "Comma-separated Rust components to install" + required: false + default: "" +outputs: + toolchain: + description: "Resolved toolchain channel" + value: ${{ steps.get_toolchain.outputs.toolchain }} +runs: + using: "composite" + steps: + - name: Install yq (mikefarah) + shell: bash + run: | + set -euo pipefail + + version="v4.50.1" + + os="${RUNNER_OS}" # Linux|macOS|Windows + arch="${RUNNER_ARCH}" # X64|ARM64 + + case "${os}" in + Linux) platform="linux"; ext="" ;; + macOS) platform="darwin"; ext="" ;; + Windows) platform="windows"; ext=".exe" ;; + *) echo "Unsupported runner OS: ${os}" >&2; exit 1 ;; + esac + + case "${arch}" in + X64) cpu="amd64" ;; + ARM64) cpu="arm64" ;; + *) echo "Unsupported runner arch: ${arch}" >&2; exit 1 ;; + esac + + url="https://github.com/mikefarah/yq/releases/download/${version}/yq_${platform}_${cpu}${ext}" + + install_dir="${RUNNER_TEMP}/yq" + mkdir -p "${install_dir}" + bin="${install_dir}/yq${ext}" + + echo "Downloading ${url}" >&2 + + if [[ "${os}" == "Windows" ]]; then + powershell -NoProfile -Command "Invoke-WebRequest -Uri '${url}' -OutFile '${bin}'" + else + curl -fsSL "${url}" -o "${bin}" + fi + + if [[ "${os}" != "Windows" ]]; then + chmod +x "${bin}" + fi + + echo "${install_dir}" >> "${GITHUB_PATH}" + + "${bin}" --version + - name: Get toolchain from rust-toolchain.toml + id: get_toolchain + shell: bash + run: | + ext="" + if [[ "${RUNNER_OS}" == "Windows" ]]; then + ext=".exe" + fi + toolchain="$(${RUNNER_TEMP}/yq/yq${ext} -r '.toolchain.channel' rust-toolchain.toml)" + if [[ -z "${toolchain}" || "${toolchain}" == "null" ]]; then + echo "Could not determine toolchain from rust-toolchain.toml" >&2 + exit 1 + fi + echo "toolchain=${toolchain}" >> "${GITHUB_OUTPUT}" + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@f7ccc83f9ed1e5b9c81d8a67d7ad1a747e22a561 + with: + toolchain: ${{ steps.get_toolchain.outputs.toolchain }} + targets: ${{ inputs.targets }} + components: ${{ inputs.components }} diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fde8a59c..8b477e80 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,24 +10,22 @@ updates: update-types: - patch - minor + cooldown: + default-days: 7 - package-ecosystem: github-actions directory: "/" schedule: interval: weekly - - package-ecosystem: npm - directory: "/vscode" - schedule: - interval: weekly groups: version: applies-to: version-updates update-types: - patch - minor - ignore: - - dependency-name: "@types/vscode" - - package-ecosystem: docker - directory: "/" + cooldown: + default-days: 7 + - package-ecosystem: npm + directory: "/vscode" schedule: interval: weekly groups: @@ -36,8 +34,10 @@ updates: update-types: - patch - minor - - package-ecosystem: npm - directory: "/scripts" + cooldown: + default-days: 7 + - package-ecosystem: docker + directory: "/" schedule: interval: weekly groups: @@ -46,4 +46,5 @@ updates: update-types: - patch - minor - - major + cooldown: + default-days: 7 diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yml similarity index 53% rename from .github/workflows/build.yaml rename to .github/workflows/build.yml index a5b9d061..f2868964 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yml @@ -1,41 +1,41 @@ name: Build RustOwl - on: push: branches: ["main"] - pull_request: - types: ["labeled"] workflow_dispatch: workflow_call: outputs: run_id: description: Run ID of this workflow value: ${{ github.run_id }} - jobs: rustowl: - if: github.event.action != 'labeled' || github.event.label.name == 'do-build-check' strategy: matrix: - os: - - ubuntu-24.04 - - ubuntu-24.04-arm - - macos-15 - - macos-13 - - windows-2022 - - windows-11-arm - + include: + - os: ubuntu-24.04 + target: x86_64-unknown-linux-gnu + - os: ubuntu-24.04-arm + target: aarch64-unknown-linux-gnu + - os: macos-15 + target: aarch64-apple-darwin + - os: macos-15-intel + target: x86_64-apple-darwin + - os: windows-2022 + target: x86_64-pc-windows-msvc + - os: windows-11-arm + target: aarch64-pc-windows-msvc runs-on: ${{ matrix.os }} permissions: contents: write defaults: run: shell: bash - steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false # Using fat LTO causes failure to link on Windows ARM - name: Set build profile run: | @@ -44,56 +44,47 @@ jobs: else echo "build_profile=release" >> $GITHUB_ENV fi - # uname on Windows on ARM returns "x86_64" - name: Set ARCH flag for Windows on ARM if: matrix.os == 'windows-11-arm' run: echo "TOOLCHAIN_ARCH=aarch64" >> $GITHUB_ENV - - name: setup env run: | - host_tuple="$(./scripts/build/toolchain eval 'echo $HOST_TUPLE')" - echo "host_tuple=$host_tuple" >> $GITHUB_ENV - toolchain="$(./scripts/build/toolchain eval 'echo $RUSTOWL_TOOLCHAIN')" + toolchain="$(cargo xtask toolchain sh -lc 'echo $RUSTOWL_TOOLCHAIN')" echo "toolchain=$toolchain" >> $GITHUB_ENV - ([[ "$host_tuple" == *msvc* ]] && echo "exec_ext=.exe" || echo "exec_ext=") >> $GITHUB_ENV - ([[ "$host_tuple" == *windows* ]] && echo "is_windows=true" || echo "is_windows=false") >> $GITHUB_ENV - ([[ "$host_tuple" == *linux* ]] && echo "is_linux=true" || echo "is_linux=false") >> $GITHUB_ENV - + ([[ "${{ matrix.target }}" == *msvc* ]] && echo "exec_ext=.exe" || echo "exec_ext=") >> $GITHUB_ENV + ([[ "${{ matrix.target }}" == *windows* ]] && echo "is_windows=true" || echo "is_windows=false") >> $GITHUB_ENV + ([[ "${{ matrix.target }}" == *linux* ]] && echo "is_linux=true" || echo "is_linux=false") >> $GITHUB_ENV - name: Install zig if: ${{ env.is_linux == 'true' }} - uses: mlugg/setup-zig@v2 + uses: mlugg/setup-zig@e7d1537c378b83b8049f65dda471d87a2f7b2df2 # v2.2.0 with: version: 0.13.0 - - name: Build run: | if [[ "${{ env.is_linux }}" == "true" ]]; then - ./scripts/build/toolchain cargo install --locked cargo-zigbuild - ./scripts/build/toolchain cargo zigbuild --target ${{ env.host_tuple }}.2.17 --profile=${{ env.build_profile }} + cargo xtask toolchain cargo install --locked cargo-zigbuild + cargo xtask toolchain cargo zigbuild --target ${{ matrix.target }}.2.17 --profile=${{ env.build_profile }} else - ./scripts/build/toolchain cargo build --target ${{ env.host_tuple }} --profile=${{ env.build_profile }} + cargo xtask toolchain cargo build --target ${{ matrix.target }} --profile=${{ env.build_profile }} fi - - name: Check the functionality run: | - ./target/${{ env.host_tuple }}/${{ env.build_profile }}/rustowl${{ env.exec_ext }} check ./perf-tests/dummy-package - + ./target/${{ matrix.target }}/${{ env.build_profile }}/rustowl${{ env.exec_ext }} check ./perf-tests/dummy-package - name: Set archive name run: | if [[ "${{ env.is_windows }}" == "true" ]]; then - echo "archive_name=rustowl-${{ env.host_tuple }}.zip" >> $GITHUB_ENV + echo "archive_name=rustowl-${{ matrix.target }}.zip" >> $GITHUB_ENV else - echo "archive_name=rustowl-${{ env.host_tuple }}.tar.gz" >> $GITHUB_ENV + echo "archive_name=rustowl-${{ matrix.target }}.tar.gz" >> $GITHUB_ENV fi - - name: Setup archive artifacts run: | rm -rf rustowl && mkdir -p rustowl/sysroot/${{ env.toolchain }}/bin - cp target/${{ env.host_tuple }}/${{ env.build_profile }}/rustowl${{ env.exec_ext }} ./rustowl/ - cp target/${{ env.host_tuple }}/${{ env.build_profile }}/rustowlc${{ env.exec_ext }} ./rustowl/sysroot/${{ env.toolchain }}/bin + cp target/${{ matrix.target }}/${{ env.build_profile }}/rustowl${{ env.exec_ext }} ./rustowl/ + cp target/${{ matrix.target }}/${{ env.build_profile }}/rustowlc${{ env.exec_ext }} ./rustowl/sysroot/${{ env.toolchain }}/bin cp README.md ./rustowl cp LICENSE ./rustowl @@ -112,44 +103,38 @@ jobs: cd .. fi - cp ./rustowl/rustowl${{ env.exec_ext }} ./rustowl-${{ env.host_tuple }}${{ env.exec_ext }} - + cp ./rustowl/rustowl${{ env.exec_ext }} ./rustowl-${{ matrix.target }}${{ env.exec_ext }} - name: Upload - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: - name: rustowl-runtime-${{ env.host_tuple }} + name: rustowl-runtime-${{ matrix.target }} path: | - rustowl-${{ env.host_tuple }}${{ env.exec_ext }} + rustowl-${{ matrix.target }}${{ env.exec_ext }} ${{ env.archive_name }} - vscode: - if: github.event.action != 'labeled' || github.event.label.name == 'do-build-check' runs-on: ubuntu-latest permissions: contents: write - steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: 20 - - name: Setup PNPM And Install dependencies - uses: pnpm/action-setup@v4 + uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 with: package_json_file: ./vscode/package.json run_install: | - cwd: ./vscode - - name: Create VSIX run: pnpm build working-directory: ./vscode - - name: Upload - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: rustowl-vscode path: vscode/**/*.vsix diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 8b469284..fdd7814c 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -1,8 +1,6 @@ name: Generate Changelog - on: workflow_dispatch: - jobs: changelogen: runs-on: ubuntu-latest @@ -10,16 +8,15 @@ jobs: contents: write pull-requests: write steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: 0 - + persist-credentials: false - run: | docker pull quay.io/git-chglog/git-chglog:latest docker run -v "$PWD":/workdir quay.io/git-chglog/git-chglog --tag-filter-pattern '^v\d+\.\d+\.\d+$' -o CHANGELOG.md - - name: Create Pull Request - uses: peter-evans/create-pull-request@v8 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0 with: add-paths: | CHANGELOG.md diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index fa5fb984..3e3cd47a 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -1,90 +1,145 @@ name: Basic Checks - on: pull_request: branches: ["main"] workflow_dispatch: workflow_call: - +permissions: + contents: read env: CARGO_TERM_COLOR: always - RUSTC_BOOTSTRAP: 1 - +defaults: + run: + shell: bash jobs: - check: - name: Format & Lint - runs-on: ubuntu-latest + lint: + name: Lint (${{ matrix.target }}) + strategy: + matrix: + include: + - os: ubuntu-24.04 + target: x86_64-unknown-linux-gnu + - os: ubuntu-24.04-arm + target: aarch64-unknown-linux-gnu + - os: macos-15 + target: aarch64-apple-darwin + - os: macos-15-intel + target: x86_64-apple-darwin + - os: windows-2022 + target: x86_64-pc-windows-msvc + - os: windows-11-arm + target: aarch64-pc-windows-msvc + runs-on: ${{ matrix.os }} steps: - name: Checkout - uses: actions/checkout@v6 - - - name: Get Rust version - run: | - echo RUSTUP_TOOLCHAIN=$(cat ./scripts/build/channel) >> $GITHUB_ENV - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@stable + uses: ./.github/actions/rust-toolchain-yq with: - toolchain: ${{ env.RUSTUP_TOOLCHAIN }} - components: clippy,rustfmt,llvm-tools,rust-src,rustc-dev - + targets: ${{ matrix.target }} + components: clippy,llvm-tools,rust-src,rustc-dev - name: Cache dependencies - uses: Swatinem/rust-cache@v2 + uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 with: save-if: ${{ github.ref == 'refs/heads/main' }} - - - name: Check formatting - run: cargo fmt --check - - name: Run clippy - run: cargo clippy --all-targets --all-features -- -D warnings - - test: - name: Build & Test + run: cargo clippy --all-targets --all-features --workspace -- -D warnings + fmt: + name: Check Formatting runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + - name: Install Rust toolchain + uses: ./.github/actions/rust-toolchain-yq + with: + components: rustfmt + - name: Check formatting + run: cargo fmt --check --all + test: + name: Build & Test (${{ matrix.target }}) + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: ubuntu-24.04 + target: x86_64-unknown-linux-gnu + - os: ubuntu-24.04-arm + target: aarch64-unknown-linux-gnu + - os: macos-15 + target: aarch64-apple-darwin + - os: macos-15-intel + target: x86_64-apple-darwin + - os: windows-2022 + target: x86_64-pc-windows-msvc + - os: windows-11-arm + target: aarch64-pc-windows-msvc + steps: + - name: Checkout + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + - name: Install cargo-nextest + uses: taiki-e/install-action@3522286d40783523f9c7880e33f785905b4c20d0 # v2.66.1 + with: + tool: cargo-nextest - name: Cache dependencies - uses: Swatinem/rust-cache@v2 - + uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 + - name: Install runtime deps + run: cargo xtask toolchain echo "Successfully installed runtime dependencies" + - name: Run tests with nextest + run: cargo xtask toolchain cargo nextest run --no-fail-fast + - name: Run doc tests + run: cargo xtask toolchain cargo test --doc - name: Build release - run: ./scripts/build/toolchain cargo build --release - + if: matrix.os != 'windows-11-arm' + run: cargo xtask toolchain cargo build --release + - name: Build Release (Windows ARM) + if: matrix.os == 'windows-11-arm' + run: cargo xtask toolchain cargo build --profile arm-windows-releas - name: Install binary - run: ./scripts/build/toolchain cargo install --path . - + if: matrix.os != 'windows-11-arm' + run: cargo xtask toolchain cargo install --path crates/rustowl + - name: Install binary (Windows ARM) + if: matrix.os == 'windows-11-arm' + run: cargo xtask toolchain cargo install --path crates/rustowl --profile arm-windows-release - name: Test rustowl check run: rustowl check ./perf-tests/dummy-package - vscode: name: VS Code Extension Checks runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: 20 - - name: Setup PNPM And Install dependencies - uses: pnpm/action-setup@v4 + uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 with: package_json_file: ./vscode/package.json run_install: | - cwd: ./vscode - - name: Check formatting run: pnpm prettier -c src working-directory: ./vscode - - name: Lint and type check run: pnpm lint && pnpm check-types working-directory: ./vscode - - name: Run tests run: xvfb-run -a pnpm run test working-directory: ./vscode + cargo-deny: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + - uses: EmbarkStudios/cargo-deny-action@3fd3802e88374d3fe9159b834c7714ec57d6c979 # v2.0.15 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 00000000..f1e8e7ce --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL Advanced" +on: + push: + branches: ["main"] + pull_request: + branches: ["main"] + schedule: + - cron: '20 20 * * 5' +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + runs-on: 'ubuntu-latest' + permissions: + security-events: write + actions: read + contents: read + strategy: + fail-fast: false + matrix: + include: + - language: actions + build-mode: none + - language: javascript-typescript + build-mode: none + - language: rust + build-mode: none + steps: + - name: Checkout repository + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + - name: Initialize CodeQL + uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + queries: security-extended,security-and-quality + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/commitlint.yml b/.github/workflows/commitlint.yml deleted file mode 100644 index d3705149..00000000 --- a/.github/workflows/commitlint.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Commitlint - -on: - push: - branches: - - main - pull_request: - -permissions: - contents: read - -jobs: - commitlint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v6 - with: - fetch-depth: 0 - - name: Setup node - uses: actions/setup-node@v6 - with: - node-version: lts/* - - - name: Setup PNPM And Install dependencies - uses: pnpm/action-setup@v4 - with: - package_json_file: ./scripts/package.json - run_install: | - - cwd: ./scripts - - - name: Validate current commit (last commit) with commitlint - working-directory: ./scripts - if: github.event_name == 'push' - run: pnpm commitlint --last --verbose - - - name: Validate PR commits with commitlint - working-directory: ./scripts - if: github.event_name == 'pull_request' - run: pnpm commitlint --from ${{ github.event.pull_request.base.sha }} --to ${{ github.event.pull_request.head.sha }} --verbose diff --git a/.github/workflows/committed.yml b/.github/workflows/committed.yml new file mode 100644 index 00000000..244ff5f3 --- /dev/null +++ b/.github/workflows/committed.yml @@ -0,0 +1,18 @@ +name: Commitlint +on: + push: + branches: + - main + pull_request: +permissions: + contents: read +jobs: + commitlint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 0 + persist-credentials: false + - name: Lint Commits + uses: crate-ci/committed@dc6f20ddd899fe6d6f0402807884c0a4b3176b53 # v1.1.10 diff --git a/.github/workflows/docker-checks.yml b/.github/workflows/docker-checks.yml index 336bb58c..06c85700 100644 --- a/.github/workflows/docker-checks.yml +++ b/.github/workflows/docker-checks.yml @@ -1,5 +1,4 @@ name: Docker Checks - on: pull_request: branches: ["main"] @@ -12,32 +11,33 @@ on: - "Dockerfile" - ".github/workflows/docker-checks.yml" workflow_dispatch: - +permissions: + contents: read jobs: dockerfile-lint: name: Dockerfile Lint runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Run hadolint - uses: hadolint/hadolint-action@v3.3.0 + uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0 with: dockerfile: Dockerfile - docker-build: name: Docker Build Test runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - name: Build Docker image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 with: context: . push: false @@ -45,7 +45,6 @@ jobs: tags: rustowl:test cache-from: type=gha cache-to: type=gha,mode=max,retention-days=7 - - name: Test Docker image run: | docker run --rm rustowl:test --version diff --git a/.github/workflows/neovim-checks.yml b/.github/workflows/neovim-checks.yml index 3371635c..f88672b6 100644 --- a/.github/workflows/neovim-checks.yml +++ b/.github/workflows/neovim-checks.yml @@ -1,5 +1,4 @@ name: NeoVim Checks - on: pull_request: paths: @@ -10,7 +9,7 @@ on: - selene.toml - vim.yml - nvim-tests/**/* - - scripts/run_nvim_tests.sh + - crates/xtask/**/* - .github/workflows/neovim-checks.yml push: branches: @@ -23,57 +22,56 @@ on: - selene.toml - vim.yml - nvim-tests/**/* - - scripts/run_nvim_tests.sh + - crates/xtask/**/* - .github/workflows/neovim-checks.yml - +permissions: + contents: read env: CARGO_TERM_COLOR: always - jobs: test: name: Run Tests runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Setup Neovim - uses: rhysd/action-setup-vim@v1 + uses: rhysd/action-setup-vim@19e3dd31a84dbc2c5445d65e9b363f616cab96c1 # v1.6.0 with: neovim: true version: v0.11.2 - - name: Setup RustOwl run: | - ./scripts/build/toolchain cargo build --release - ./scripts/build/toolchain cargo install --path . - + cargo xtask toolchain cargo build --release + cargo xtask toolchain cargo install --path crates/rustowl - name: Run Tests - run: ./scripts/run_nvim_tests.sh - + run: cargo xtask nvim-tests style: name: Check Styling Using Stylua runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Run Stylua - uses: JohnnyMorganz/stylua-action@v4 + uses: JohnnyMorganz/stylua-action@479972f01e665acfcba96ada452c36608bdbbb5e # v4.1.0 with: token: ${{ secrets.GITHUB_TOKEN }} version: latest args: --check . - lint: name: Lint Code Using Selene runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Lint Lua code with Selene - uses: YoloWingPixie/selene-lua-linter-action@v1 + uses: YoloWingPixie/selene-lua-linter-action@24ecf180fd5bb4d3b40b4296de61b32179cb79d1 # v1 with: config-path: "selene.toml" working-directory: "." diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yml similarity index 63% rename from .github/workflows/release.yaml rename to .github/workflows/release.yml index 7cef950e..c0d74a51 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yml @@ -1,20 +1,15 @@ name: Release RustOwl - on: push: tags: - v* - permissions: - actions: read - contents: write - + contents: read jobs: check: uses: ./.github/workflows/checks.yml build: uses: ./.github/workflows/build.yaml - meta: name: Check Version runs-on: ubuntu-latest @@ -27,37 +22,47 @@ jobs: - name: Check pre-release id: pre-release run: | - if [[ "${{ github.ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + if [[ "${{ env.github_ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then echo "pre_release=false" >> $GITHUB_OUTPUT else echo "pre_release=true" >> $GITHUB_OUTPUT fi - crates-io-release: name: Create Crates.io release runs-on: ubuntu-latest needs: ["meta"] + environment: release + permissions: + id-token: write # Required for OIDC token exchange steps: - - uses: actions/checkout@v6 - - name: Release crates.io + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + - uses: rust-lang/crates-io-auth-action@b7e9a28eded4986ec6b1fa40eeee8f8f165559ec # v1.0.3 + id: auth + - name: Publish to Crates.io + run: cargo publish if: needs.meta.outputs.pre_release != 'true' - run: | - echo '${{ secrets.CRATES_IO_API_TOKEN }}' | cargo login - cargo publish - + env: + CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }} vscode-release: name: Create Vscode Release runs-on: ubuntu-latest needs: ["meta"] + environment: release steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: 20 + cache: "pnpm" + cache-dependency-path: "vscode/pnpm-lock.yaml" - name: Setup PNPM And Install dependencies if: needs.meta.outputs.pre_release != 'true' - uses: pnpm/action-setup@v4 + uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 with: package_json_file: ./vscode/package.json run_install: | @@ -69,20 +74,24 @@ jobs: working-directory: ./vscode env: VSCE_PAT: ${{ secrets.VSCE_PAT }} - vscodium-release: name: Create Vscodium Release runs-on: ubuntu-latest needs: ["meta"] + environment: release steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: 20 + cache: "pnpm" + cache-dependency-path: "vscode/pnpm-lock.yaml" - name: Setup PNPM And Install dependencies if: needs.meta.outputs.pre_release != 'true' - uses: pnpm/action-setup@v4 + uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0 with: package_json_file: ./vscode/package.json run_install: | @@ -94,27 +103,27 @@ jobs: working-directory: ./vscode env: OVSX_PAT: ${{ secrets.OVSX_PAT }} - winget-release: name: Create Winget Release runs-on: windows-latest needs: ["meta"] steps: - - uses: vedantmgoyal9/winget-releaser@main + - uses: vedantmgoyal9/winget-releaser@4ffc7888bffd451b357355dc214d43bb9f23917e # v2 if: needs.meta.outputs.pre_release != 'true' with: identifier: Cordx56.Rustowl token: ${{ secrets.WINGET_TOKEN }} - aur-release: name: Create AUR Release runs-on: ubuntu-latest needs: ["meta"] steps: - name: Checkout - uses: actions/checkout@v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: AUR Release - uses: KSXGitHub/github-actions-deploy-aur@v4.1.1 + uses: KSXGitHub/github-actions-deploy-aur@2ac5a4c1d7035885d46b10e3193393be8460b6f1 # v4.1.1 if: needs.meta.outputs.pre_release != 'true' with: pkgname: rustowl @@ -130,7 +139,7 @@ jobs: AUR_EMAIL: ${{ secrets.AUR_EMAIL }} AUR_SSH_PRIVATE_KEY: ${{ secrets.AUR_SSH_PRIVATE_KEY }} - name: AUR Release (Bin) - uses: KSXGitHub/github-actions-deploy-aur@v4.1.1 + uses: KSXGitHub/github-actions-deploy-aur@2ac5a4c1d7035885d46b10e3193393be8460b6f1 # v4.1.1 if: needs.meta.outputs.pre_release != 'true' with: pkgname: rustowl-bin @@ -145,39 +154,42 @@ jobs: AUR_USERNAME: ${{ secrets.AUR_USERNAME }} AUR_EMAIL: ${{ secrets.AUR_EMAIL }} AUR_SSH_PRIVATE_KEY: ${{ secrets.AUR_SSH_PRIVATE_KEY }} - github-release: name: Create A GitHub Release runs-on: ubuntu-latest needs: ["meta"] + permissions: + contents: write steps: - - uses: actions/checkout@v6 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: fetch-depth: 0 + persist-credentials: false - name: Setup Node.js - uses: actions/setup-node@v6 + uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0 with: node-version: 20 - name: Generate Release Notes run: | - npx changelogithub@latest --contributors --output release.md + npx changelogithub@latest --output release.md env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Download All Artifacts - uses: actions/download-artifact@v7 + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 with: path: artifacts pattern: rustowl-* merge-multiple: true github-token: ${{ secrets.GITHUB_TOKEN }} - name: Release - uses: softprops/action-gh-release@v2 + uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0 with: files: artifacts/**/* draft: true body_path: release.md prerelease: ${{ needs.meta.outputs.pre_release == 'true' }} - + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} docker-release: name: Release To GitHub Container Registry runs-on: ubuntu-latest @@ -185,26 +197,23 @@ jobs: permissions: contents: read packages: write - steps: - name: Checkout repository - uses: actions/checkout@v6 - + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Login to GitHub Container Registry - uses: docker/login-action@v3 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 - name: Build and push Docker image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 with: context: . push: true diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml index a9506aa7..83c0b185 100644 --- a/.github/workflows/security.yml +++ b/.github/workflows/security.yml @@ -1,16 +1,15 @@ name: Security & Memory Safety - on: pull_request: branches: ["main"] push: branches: ["main"] workflow_dispatch: - +permissions: + contents: read env: CARGO_TERM_COLOR: always RUSTC_BOOTSTRAP: 1 - jobs: security-checks: name: Security & Memory Safety Analysis @@ -27,35 +26,18 @@ jobs: runs-on: ${{ matrix.os }} steps: - name: Checkout repository - uses: actions/checkout@v6 - - - name: Install Rust toolchain (from rust-toolchain.toml) - uses: actions-rust-lang/setup-rust-toolchain@v1 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: + persist-credentials: false + - name: Install Rust toolchain + uses: ./.github/actions/rust-toolchain-yq + with: + targets: ${{ matrix.target }} components: miri,rust-src,llvm-tools-preview,rustc-dev - # Automatically reads from rust-toolchain.toml - cache: false - - - name: Install system dependencies (Linux) - if: matrix.runner_os == 'Linux' - run: | - sudo apt-get update - sudo apt-get install -y valgrind - - - name: Make scripts executable (Unix) - if: runner.os != 'Windows' - run: chmod +x scripts/*.sh - - name: Run comprehensive security checks - shell: bash run: | - # The security script will auto-detect CI environment and install missing tools - # Exit with proper code to fail CI if security tests fail - if ! ./scripts/security.sh; then - echo "::error::Security tests failed" - exit 1 - fi - + # cargo-deny is run in checks.yml; keep security.yml focused. + cargo xtask security --no-deny - name: Create security summary and cleanup if: failure() shell: bash @@ -63,13 +45,13 @@ jobs: # Only create summary and cleanup on failure echo "Security tests failed, creating summary..." - # The security script should have created its own summary - if [ -f "security-logs/security_summary_*.md" ]; then - echo "Security script summary found:" + # The security command should have created its own summary. + if compgen -G "security-logs/security_summary_*.md" > /dev/null; then + echo "Security summary found:" ls -la security-logs/security_summary_*.md echo "::error::Security test failures detected. Check the summary for details." else - echo "Warning: Security script summary not found, creating fallback summary" + echo "Warning: security summary not found, creating fallback summary" mkdir -p security-logs echo "# Security Testing Summary (Failure)" > security-logs/failure-summary.txt echo "Generated: $(date)" >> security-logs/failure-summary.txt @@ -84,7 +66,6 @@ jobs: ls -la security-logs/ echo "Total log directory size: $(du -sh security-logs 2>/dev/null | cut -f1 || echo 'N/A')" fi - - name: Cleanup build artifacts (on success) if: success() shell: bash @@ -95,10 +76,9 @@ jobs: echo "Removing security logs (tests passed)" rm -rf security-logs/ fi - - name: Upload security artifacts (on failure only) if: failure() - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: security-logs-${{ matrix.os }}-${{ github.run_id }} path: | diff --git a/.github/workflows/spelling.yml b/.github/workflows/spelling.yml index 02b629d8..ad194e62 100644 --- a/.github/workflows/spelling.yml +++ b/.github/workflows/spelling.yml @@ -1,23 +1,21 @@ name: Spelling - permissions: contents: read - on: pull_request: push: branches: - main - env: CLICOLOR: 1 - jobs: spelling: name: Spell Check with Typos runs-on: ubuntu-latest steps: - name: Checkout Actions Repository - uses: actions/checkout@v6 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false - name: Spell Check Repo - uses: crate-ci/typos@v1.42.0 + uses: crate-ci/typos@bb4666ad77b539a6b4ce4eda7ebb6de553704021 # v1.42.0 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index af7fe0e8..f7dfe9db 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,18 +1,15 @@ name: Mark stale issues and pull requests - on: schedule: - cron: "0 7 * * *" - jobs: stale: runs-on: ubuntu-latest permissions: issues: write pull-requests: write - steps: - - uses: actions/stale@v10 + - uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1 with: repo-token: ${{ secrets.GITHUB_TOKEN }} stale-issue-message: "🤖 Bot: **Issue** has not seen activity in **30** days and will therefore be marked as stale. It will be closed in 7 days if no further response is found." diff --git a/.github/workflows/validate-pr-title.yml b/.github/workflows/validate-pr-title.yml index 252c44c0..f6b90066 100644 --- a/.github/workflows/validate-pr-title.yml +++ b/.github/workflows/validate-pr-title.yml @@ -1,7 +1,7 @@ name: "Validate Pull Request Title" on: - pull_request_target: + pull_request: types: - opened - edited @@ -13,7 +13,8 @@ jobs: runs-on: ubuntu-latest permissions: pull-requests: read + statuses: write steps: - - uses: amannn/action-semantic-pull-request@v6 + - uses: amannn/action-semantic-pull-request@48f256284bd46cdaab1048c3721360e808335d50 # v6.1.1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml new file mode 100644 index 00000000..502e1355 --- /dev/null +++ b/.github/workflows/zizmor.yml @@ -0,0 +1,19 @@ +name: Zizmor Analysis +on: + push: + branches: ["main"] + pull_request: + branches: ["**"] +permissions: {} +jobs: + zizmor: + runs-on: ubuntu-latest + permissions: + security-events: write + steps: + - name: Checkout repository + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + - name: Run zizmor 🌈 + uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0 diff --git a/.github/zizmor.yml b/.github/zizmor.yml new file mode 100644 index 00000000..841b40fc --- /dev/null +++ b/.github/zizmor.yml @@ -0,0 +1,17 @@ +rules: + cache-poisoning: + ignore: + - release.yml + use-trusted-publishing: + ignore: + - release.yml + dangerous-triggers: + ignore: + - validate-pr-title.yml + - coverage.yml + template-injection: + ignore: + - build.yml + github-env: + ignore: + - action.yml diff --git a/.rust-version-stable b/.rust-version-stable new file mode 100644 index 00000000..7f229af9 --- /dev/null +++ b/.rust-version-stable @@ -0,0 +1 @@ +1.92.0 diff --git a/.typos.toml b/.typos.toml index 19a0e983..892f63c4 100644 --- a/.typos.toml +++ b/.typos.toml @@ -1,5 +1,6 @@ [default.extend-words] enew = "enew" +Ba = "Ba" [files] extend-exclude = ["CHANGELOG.md"] diff --git a/CHANGELOG.md b/CHANGELOG.md index d053a99f..265be035 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -114,7 +114,7 @@ ### 🐞 Bug Fixes - support gsed (macOS) -- version.sh removed and use ./scripts/bump.sh +- version.sh removed and use `cargo xtask bump` - specify pkg-fmt for binstall - restore current newest version diff --git a/Cargo.lock b/Cargo.lock index 85de9d40..770a1fc0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8,46 +8,20 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] -[[package]] -name = "alloca" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7d05ea6aea7e9e64d25b9156ba2fee3fdd659e34e41063cd2fc7cd020d7f4" -dependencies = [ - "cc", -] - -[[package]] -name = "anes" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" - [[package]] name = "anstream" -version = "0.6.20" +version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", @@ -60,9 +34,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.11" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" @@ -75,24 +49,30 @@ dependencies = [ [[package]] name = "anstyle-query" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.10" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + [[package]] name = "arbitrary" version = "1.4.2" @@ -102,45 +82,17 @@ dependencies = [ "derive_arbitrary", ] -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "atomic-waker" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" -[[package]] -name = "auto_impl" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - [[package]] name = "aws-lc-rs" -version = "1.14.0" +version = "1.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b8ff6c09cd57b16da53641caa860168b88c172a5ee163b0288d3d6eea12786" +checksum = "6a88aab2464f1f25453baa7a07c84c5b7684e274054ba06817f382357f77a288" dependencies = [ "aws-lc-sys", "zeroize", @@ -148,11 +100,10 @@ dependencies = [ [[package]] name = "aws-lc-sys" -version = "0.31.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e44d16778acaf6a9ec9899b92cebd65580b83f685446bf2e1f5d3d732f99dcd" +checksum = "b45afffdee1e7c9126814751f88dddc747f41d91da16c9551a0f1e8a11e788a1" dependencies = [ - "bindgen", "cc", "cmake", "dunce", @@ -165,84 +116,47 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" -[[package]] -name = "bindgen" -version = "0.72.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" -dependencies = [ - "bitflags 2.9.3", - "cexpr", - "clang-sys", - "itertools", - "log", - "prettyplease", - "proc-macro2", - "quote", - "regex", - "rustc-hash", - "shlex", - "syn", -] - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" -version = "2.9.3" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34efbcccd345379ca2868b2b2c9d3782e9cc58ba87bc7d79d5b53d9c9ae6f25d" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] -name = "block-buffer" -version = "0.10.4" +name = "borrow-or-share" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] +checksum = "dc0b364ead1874514c8c2855ab558056ebfeb775653e7ae45ff72f28f8f3166c" [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" - -[[package]] -name = "bzip2" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bea8dcd42434048e4f7a304411d9273a411f647446c1234a65ce0554923f4cff" -dependencies = [ - "libbz2-rs-sys", -] +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] name = "camino" -version = "1.1.12" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0b03af37dad7a14518b7691d81acb0f8222604ad3d1b02f6b4bed5188c0cd5" +checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" dependencies = [ - "serde", + "serde_core", ] [[package]] name = "cargo-platform" -version = "0.3.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8abf5d501fd757c2d2ee78d0cc40f606e92e3a63544420316565556ed28485e2" +checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082" dependencies = [ "serde", + "serde_core", ] [[package]] @@ -256,88 +170,38 @@ dependencies = [ "semver", "serde", "serde_json", - "thiserror", + "thiserror 2.0.17", ] -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - [[package]] name = "cc" -version = "1.2.34" +version = "1.2.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc" +checksum = "cd4932aefd12402b36c60956a4fe0035421f544799057659ff86f923657aada3" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", ] [[package]] -name = "cexpr" -version = "0.6.0" +name = "cesu8" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" -dependencies = [ - "nom", -] +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" [[package]] name = "cfg-if" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" - -[[package]] -name = "ciborium" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" -dependencies = [ - "ciborium-io", - "ciborium-ll", - "serde", -] - -[[package]] -name = "ciborium-io" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" - -[[package]] -name = "ciborium-ll" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" -dependencies = [ - "ciborium-io", - "half", -] - -[[package]] -name = "cipher" -version = "0.4.4" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] -name = "clang-sys" -version = "1.8.1" +name = "cfg_aliases" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "clap" @@ -349,6 +213,16 @@ dependencies = [ "clap_derive", ] +[[package]] +name = "clap-verbosity-flag" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d92b1fab272fe943881b77cc6e920d6543e5b1bfadbd5ed81c7c5a755742394" +dependencies = [ + "clap", + "tracing-core", +] + [[package]] name = "clap_builder" version = "4.5.54" @@ -359,6 +233,7 @@ dependencies = [ "anstyle", "clap_lex", "strsim", + "terminal_size", ] [[package]] @@ -394,9 +269,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.5" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "clap_mangen" @@ -410,9 +285,9 @@ dependencies = [ [[package]] name = "cmake" -version = "0.1.54" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" dependencies = [ "cc", ] @@ -424,19 +299,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] -name = "colored" -version = "3.0.0" +name = "combine" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ - "windows-sys 0.59.0", + "bytes", + "memchr", ] [[package]] -name = "constant_time_eq" -version = "0.3.1" +name = "condtype" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" +checksum = "baf0a07a401f374238ab8e2f11a104d2851bf9ce711ec69804834de8af45c7af" + +[[package]] +name = "console" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03e45a4a8926227e4197636ba97a9fc9b00477e9f4bd711395687c5f0734bec4" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "unicode-width", + "windows-sys 0.61.2", +] [[package]] name = "core-foundation" @@ -464,30 +353,6 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - [[package]] name = "crc32fast" version = "1.5.0" @@ -497,41 +362,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "criterion" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d883447757bb0ee46f233e9dc22eb84d93a9508c9b868687b274fc431d886bf" -dependencies = [ - "alloca", - "anes", - "cast", - "ciborium", - "clap", - "criterion-plot", - "itertools", - "num-traits", - "oorandom", - "page_size", - "plotters", - "rayon", - "regex", - "serde", - "serde_json", - "tinytemplate", - "walkdir", -] - -[[package]] -name = "criterion-plot" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed943f81ea2faa8dcecbbfa50164acf95d555afec96a27871663b300e387b2e4" -dependencies = [ - "cast", - "itertools", -] - [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -557,29 +387,14 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - [[package]] name = "dashmap" -version = "5.5.3" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" dependencies = [ "cfg-if", + "crossbeam-utils", "hashbrown 0.14.5", "lock_api", "once_cell", @@ -587,25 +402,21 @@ dependencies = [ ] [[package]] -name = "deflate64" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b" - -[[package]] -name = "deranged" -version = "0.5.4" +name = "derive_arbitrary" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ - "powerfmt", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "derive_arbitrary" -version = "1.4.2" +name = "displaydoc" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", @@ -613,21 +424,24 @@ dependencies = [ ] [[package]] -name = "digest" -version = "0.10.7" +name = "divan" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +checksum = "a405457ec78b8fe08b0e32b4a3570ab5dff6dd16eb9e76a5ee0a9d9cbd898933" dependencies = [ - "block-buffer", - "crypto-common", - "subtle", + "cfg-if", + "clap", + "condtype", + "divan-macros", + "libc", + "regex-lite", ] [[package]] -name = "displaydoc" -version = "0.2.5" +name = "divan-macros" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +checksum = "9556bc800956545d6420a640173e5ba7dfa82f38d3ea5a167eb555bc69ac3323" dependencies = [ "proc-macro2", "quote", @@ -640,12 +454,36 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" +[[package]] +name = "ecow" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78e4f79b296fbaab6ce2e22d52cb4c7f010fe0ebe7a32e34fa25885fd797bd02" +dependencies = [ + "serde", +] + [[package]] name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + [[package]] name = "equivalent" version = "1.0.2" @@ -654,12 +492,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -668,6 +506,17 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "filedescriptor" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e40758ed24c9b2eeb76c35fb0aebc66c626084edd827e07e1552279814c6682d" +dependencies = [ + "libc", + "thiserror 1.0.69", + "winapi", +] + [[package]] name = "filetime" version = "0.2.26" @@ -680,6 +529,12 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f449e6c6c08c865631d4890cfacf252b3d396c9bcc83adb6623cdb02a8336c41" + [[package]] name = "flate2" version = "1.1.5" @@ -691,12 +546,28 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "fluent-uri" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5" +dependencies = [ + "borrow-or-share", + "ref-cast", +] + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "form_urlencoded" version = "1.2.2" @@ -790,49 +661,47 @@ dependencies = [ ] [[package]] -name = "generic-array" -version = "0.14.7" +name = "gag" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +checksum = "a713bee13966e9fbffdf7193af71d54a6b35a0bb34997cd6c9519ebeb5005972" dependencies = [ - "typenum", - "version_check", + "filedescriptor", + "tempfile", ] [[package]] name = "getrandom" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" dependencies = [ "cfg-if", + "js-sys", "libc", - "wasi 0.11.1+wasi-snapshot-preview1", + "wasi", + "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", + "js-sys", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasip2", + "wasm-bindgen", ] -[[package]] -name = "glob" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" - [[package]] name = "h2" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" dependencies = [ "atomic-waker", "bytes", @@ -847,16 +716,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "half" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" -dependencies = [ - "cfg-if", - "crunchy", -] - [[package]] name = "hashbrown" version = "0.14.5" @@ -865,9 +724,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "hashbrown" -version = "0.15.5" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" [[package]] name = "heck" @@ -881,23 +740,13 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - [[package]] name = "http" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", - "fnv", "itoa", ] @@ -932,9 +781,9 @@ checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "hyper" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ "atomic-waker", "bytes", @@ -962,7 +811,6 @@ dependencies = [ "hyper", "hyper-util", "rustls", - "rustls-native-certs", "rustls-pki-types", "tokio", "tokio-rustls", @@ -971,9 +819,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.16" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" dependencies = [ "base64", "bytes", @@ -997,9 +845,9 @@ dependencies = [ [[package]] name = "icu_collections" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", "potential_utf", @@ -1010,9 +858,9 @@ dependencies = [ [[package]] name = "icu_locale_core" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -1023,11 +871,10 @@ dependencies = [ [[package]] name = "icu_normalizer" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", @@ -1038,42 +885,38 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "2.0.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ - "displaydoc", "icu_collections", "icu_locale_core", "icu_properties_data", "icu_provider", - "potential_utf", "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "2.0.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] name = "icu_provider" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", "icu_locale_core", - "stable_deref_trait", - "tinystr", "writeable", "yoke", "zerofrom", @@ -1093,70 +936,152 @@ dependencies = [ ] [[package]] -name = "idna_adapter" -version = "1.2.1" +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "rayon", + "serde", + "serde_core", +] + +[[package]] +name = "indicatif" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88" +dependencies = [ + "console", + "portable-atomic", + "rayon", + "unicode-segmentation", + "unicode-width", + "unit-prefix", + "web-time", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is-docker" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "928bae27f42bc99b60d9ac7334e3a21d10ad8f1835a4e12ec3ec0464765ed1b3" +dependencies = [ + "once_cell", +] + +[[package]] +name = "is-wsl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "173609498df190136aa7dea1a91db051746d339e18476eed5ca40521f02d7aa5" +dependencies = [ + "is-docker", + "once_cell", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itoa" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] -name = "indexmap" -version = "2.11.0" +name = "jiff" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9" +checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50" dependencies = [ - "equivalent", - "hashbrown 0.15.5", + "jiff-static", + "jiff-tzdb-platform", + "log", + "portable-atomic", + "portable-atomic-util", + "serde_core", + "windows-sys 0.61.2", ] [[package]] -name = "inout" -version = "0.1.4" +name = "jiff-static" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +checksum = "e0c84ee7f197eca9a86c6fd6cb771e55eb991632f15f2bc3ca6ec838929e6e78" dependencies = [ - "generic-array", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "ipnet" -version = "2.11.0" +name = "jiff-tzdb" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +checksum = "68971ebff725b9e2ca27a601c5eb38a4c5d64422c4cbab0c535f248087eda5c2" [[package]] -name = "iri-string" -version = "0.7.8" +name = "jiff-tzdb-platform" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +checksum = "875a5a69ac2bab1a891711cf5eccbec1ce0341ea805560dcd90b7a2e925132e8" dependencies = [ - "memchr", - "serde", + "jiff-tzdb", ] [[package]] -name = "is_terminal_polyfill" -version = "1.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" - -[[package]] -name = "itertools" -version = "0.13.0" +name = "jni" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" dependencies = [ - "either", + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", ] [[package]] -name = "itoa" -version = "1.0.15" +name = "jni-sys" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" @@ -1164,58 +1089,48 @@ version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ - "getrandom 0.3.3", + "getrandom 0.3.4", "libc", ] [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" dependencies = [ "once_cell", "wasm-bindgen", ] [[package]] -name = "libbz2-rs-sys" -version = "0.2.2" +name = "lazy_static" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.178" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" - -[[package]] -name = "libloading" -version = "0.8.8" +version = "0.2.180" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" -dependencies = [ - "cfg-if", - "windows-targets 0.53.3", -] +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" [[package]] name = "libredox" -version = "0.1.9" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ - "bitflags 2.9.3", + "bitflags", "libc", - "redox_syscall", + "redox_syscall 0.7.0", ] [[package]] name = "libz-rs-sys" -version = "0.5.1" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "172a788537a2221661b480fee8dc5f96c580eb34fa88764d3205dc356c7e4221" +checksum = "c10501e7805cee23da17c7790e59df2870c0d4043ec6d03f67d31e2b53e77415" dependencies = [ "zlib-rs", ] @@ -1228,17 +1143,16 @@ checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "lock_api" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] @@ -1249,39 +1163,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] -name = "lsp-types" -version = "0.94.1" +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "ls-types" +version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1" +checksum = "7a7deb98ef9daaa7500324351a5bab7c80c644cfb86b4be0c4433b582af93510" dependencies = [ - "bitflags 1.3.2", + "bitflags", + "fluent-uri", + "percent-encoding", "serde", "serde_json", - "serde_repr", - "url", ] [[package]] -name = "lzma-rust2" -version = "0.15.4" +name = "matchers" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48172246aa7c3ea28e423295dd1ca2589a24617cc4e588bb8cfe177cb2c54d95" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" dependencies = [ - "crc", - "sha2", + "regex-automata", ] [[package]] name = "memchr" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] -name = "minimal-lexical" -version = "0.2.1" +name = "mime" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "miniz_oxide" @@ -1295,38 +1214,22 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.4" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", - "wasi 0.11.1+wasi-snapshot-preview1", - "windows-sys 0.59.0", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", + "wasi", + "windows-sys 0.61.2", ] [[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-traits" -version = "0.2.19" +name = "nu-ansi-term" +version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "autocfg", + "windows-sys 0.61.2", ] [[package]] @@ -1339,15 +1242,6 @@ dependencies = [ "libc", ] -[[package]] -name = "num_threads" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" -dependencies = [ - "libc", -] - [[package]] name = "once_cell" version = "1.21.3" @@ -1356,54 +1250,45 @@ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "once_cell_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" [[package]] -name = "oorandom" -version = "11.1.5" +name = "open" +version = "5.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" +checksum = "43bb73a7fa3799b198970490a51174027ba0d4ec504b03cd08caf513d40024bc" +dependencies = [ + "is-wsl", + "libc", + "pathdiff", +] [[package]] name = "openssl-probe" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" - -[[package]] -name = "page_size" -version = "0.6.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" -dependencies = [ - "libc", - "winapi", -] +checksum = "9f50d9b3dabb09ecd771ad0aa242ca6894994c130308ca3d7684634df8037391" [[package]] name = "parking_lot_core" -version = "0.9.11" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.5.18", "smallvec", - "windows-targets 0.52.6", + "windows-link", ] [[package]] -name = "pbkdf2" -version = "0.12.2" +name = "pathdiff" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" -dependencies = [ - "digest", - "hmac", -] +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" [[package]] name = "percent-encoding" @@ -1411,26 +1296,6 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" -[[package]] -name = "pin-project" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "pin-project-lite" version = "0.2.16" @@ -1444,94 +1309,118 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] -name = "pkg-config" -version = "0.3.32" +name = "portable-atomic" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" [[package]] -name = "plotters" -version = "0.3.7" +name = "portable-atomic-util" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", + "portable-atomic", ] [[package]] -name = "plotters-backend" -version = "0.3.7" +name = "potential_utf" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] [[package]] -name = "plotters-svg" -version = "0.3.7" +name = "ppv-lite86" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "plotters-backend", + "zerocopy", ] [[package]] -name = "potential_utf" -version = "0.1.2" +name = "proc-macro2" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" dependencies = [ - "zerovec", + "unicode-ident", ] [[package]] -name = "powerfmt" +name = "process_alive" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppmd-rust" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c834641d8ad1b348c9ee86dec3b9840d805acd5f24daa5f90c788951a52ff59b" +checksum = "d882026f051f810bece627b431b5097f6c7aef71cfd1f2c35dd350085fe5157a" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] [[package]] -name = "prettyplease" -version = "0.2.37" +name = "quinn" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" dependencies = [ - "proc-macro2", - "syn", + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.17", + "tokio", + "tracing", + "web-time", ] [[package]] -name = "proc-macro2" -version = "1.0.101" +name = "quinn-proto" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" dependencies = [ - "unicode-ident", + "aws-lc-rs", + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.17", + "tinyvec", + "tracing", + "web-time", ] [[package]] -name = "process_alive" -version = "0.2.0" +name = "quinn-udp" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d882026f051f810bece627b431b5097f6c7aef71cfd1f2c35dd350085fe5157a" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" dependencies = [ + "cfg_aliases", "libc", - "windows-sys 0.61.2", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.40" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" dependencies = [ "proc-macro2", ] @@ -1542,6 +1431,35 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", +] + [[package]] name = "rayon" version = "1.11.0" @@ -1564,11 +1482,40 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.17" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_syscall" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" +dependencies = [ + "bitflags", +] + +[[package]] +name = "ref-cast" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" dependencies = [ - "bitflags 2.9.3", + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -1585,29 +1532,36 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "722166aa0d7438abbaa4d5cc2c649dac844e8c56d82fb3d33e9c34b5cd268fc6" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] +[[package]] +name = "regex-lite" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d942b98df5e658f56f20d592c7f868833fe38115e65c33003d8cd224b0155da" + [[package]] name = "regex-syntax" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" -version = "0.12.28" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +checksum = "04e9018c9d814e5f30cc16a0f03271aeab3571e609612d9fe78c1aa8d11c2f62" dependencies = [ "base64", "bytes", + "encoding_rs", "futures-core", "h2", "http", @@ -1618,18 +1572,17 @@ dependencies = [ "hyper-util", "js-sys", "log", + "mime", "percent-encoding", "pin-project-lite", + "quinn", "rustls", - "rustls-native-certs", "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", + "rustls-platform-verifier", "sync_wrapper", "tokio", "tokio-rustls", - "tower 0.5.2", + "tower", "tower-http", "tower-service", "url", @@ -1646,7 +1599,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.16", + "getrandom 0.2.17", "libc", "untrusted", "windows-sys 0.52.0", @@ -1670,7 +1623,7 @@ version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" dependencies = [ - "bitflags 2.9.3", + "bitflags", "errno", "libc", "linux-raw-sys", @@ -1693,9 +1646,9 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.8.1" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" dependencies = [ "openssl-probe", "rustls-pki-types", @@ -1705,18 +1658,46 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.12.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" dependencies = [ + "web-time", "zeroize", ] +[[package]] +name = "rustls-platform-verifier" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d99feebc72bae7ab76ba994bb5e121b8d83d910ca40b36e0921f53becc41784" +dependencies = [ + "core-foundation 0.10.1", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework", + "security-framework-sys", + "webpki-root-certs", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" -version = "0.103.6" +version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8572f3c2cb9934231157b45499fc41e1f58c589fdfb81a844ba873265e80f8eb" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ "aws-lc-rs", "ring", @@ -1728,30 +1709,39 @@ dependencies = [ name = "rustowl" version = "1.0.0-rc.1" dependencies = [ + "anyhow", "cargo_metadata", "clap", + "clap-verbosity-flag", "clap_complete", "clap_complete_nushell", "clap_mangen", - "criterion", + "divan", + "ecow", "flate2", - "log", + "foldhash", + "gag", + "indexmap", + "indicatif", + "jiff", + "memchr", "num_cpus", "process_alive", + "rand", "rayon", "regex", "reqwest", - "rustls", "serde", "serde_json", - "simple_logger", "tar", "tempfile", "tikv-jemalloc-sys", "tikv-jemallocator", "tokio", "tokio-util", - "tower-lsp", + "tower-lsp-server", + "tracing", + "tracing-subscriber", "uuid", "zip", ] @@ -1762,12 +1752,6 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" -[[package]] -name = "ryu" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" - [[package]] name = "same-file" version = "1.0.6" @@ -1779,11 +1763,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -1794,11 +1778,11 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "security-framework" -version = "3.3.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ - "bitflags 2.9.3", + "bitflags", "core-foundation 0.10.1", "core-foundation-sys", "libc", @@ -1807,9 +1791,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.14.0" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -1817,11 +1801,12 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" dependencies = [ "serde", + "serde_core", ] [[package]] @@ -1868,48 +1853,12 @@ dependencies = [ ] [[package]] -name = "serde_repr" -version = "0.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha2" -version = "0.10.9" +name = "sharded-slab" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ - "cfg-if", - "cpufeatures", - "digest", + "lazy_static", ] [[package]] @@ -1920,30 +1869,19 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.6" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] [[package]] name = "simd-adler32" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" - -[[package]] -name = "simple_logger" -version = "5.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291bee647ce7310b0ea721bfd7e0525517b4468eb7c7e15eb8bd774343179702" -dependencies = [ - "colored", - "log", - "time", - "windows-sys 0.61.2", -] +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" [[package]] name = "slab" @@ -1959,19 +1897,19 @@ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "socket2" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "strsim" @@ -1987,9 +1925,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.106" +version = "2.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" dependencies = [ "proc-macro2", "quote", @@ -2022,7 +1960,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ - "bitflags 2.9.3", + "bitflags", "core-foundation 0.9.4", "system-configuration-sys", ] @@ -2055,26 +1993,45 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" dependencies = [ "fastrand", - "getrandom 0.3.3", + "getrandom 0.3.4", "once_cell", "rustix", "windows-sys 0.61.2", ] +[[package]] +name = "terminal_size" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" +dependencies = [ + "rustix", + "windows-sys 0.60.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + [[package]] name = "thiserror" -version = "2.0.16" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl", + "thiserror-impl 2.0.17", ] [[package]] name = "thiserror-impl" -version = "2.0.16" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", @@ -2082,78 +2039,70 @@ dependencies = [ ] [[package]] -name = "tikv-jemalloc-sys" -version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +name = "thiserror-impl" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ - "cc", - "libc", + "proc-macro2", + "quote", + "syn", ] [[package]] -name = "tikv-jemallocator" -version = "0.6.1" +name = "thread_local" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ - "libc", - "tikv-jemalloc-sys", + "cfg-if", ] [[package]] -name = "time" -version = "0.3.44" +name = "tikv-jemalloc-sys" +version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" dependencies = [ - "deranged", - "itoa", + "cc", "libc", - "num-conv", - "num_threads", - "powerfmt", - "serde", - "time-core", - "time-macros", ] [[package]] -name = "time-core" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" - -[[package]] -name = "time-macros" -version = "0.2.24" +name = "tikv-jemallocator" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" dependencies = [ - "num-conv", - "time-core", + "libc", + "tikv-jemalloc-sys", ] [[package]] name = "tinystr" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", ] [[package]] -name = "tinytemplate" -version = "1.2.1" +name = "tinyvec" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ - "serde", - "serde_json", + "tinyvec_macros", ] +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" version = "1.49.0" @@ -2183,9 +2132,9 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ "rustls", "tokio", @@ -2204,20 +2153,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "pin-project", - "pin-project-lite", - "tower-layer", - "tower-service", -] - [[package]] name = "tower" version = "0.5.2" @@ -2239,14 +2174,14 @@ version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ - "bitflags 2.9.3", + "bitflags", "bytes", "futures-util", "http", "http-body", "iri-string", "pin-project-lite", - "tower 0.5.2", + "tower", "tower-layer", "tower-service", ] @@ -2258,39 +2193,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] -name = "tower-lsp" -version = "0.20.0" +name = "tower-lsp-server" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508" +checksum = "2f0e711655c89181a6bc6a2cc348131fcd9680085f5b06b6af13427a393a6e72" dependencies = [ - "async-trait", - "auto_impl", "bytes", "dashmap", "futures", "httparse", - "lsp-types", + "ls-types", "memchr", "serde", "serde_json", "tokio", "tokio-util", - "tower 0.4.13", - "tower-lsp-macros", + "tower", "tracing", ] -[[package]] -name = "tower-lsp-macros" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "tower-service" version = "0.3.3" @@ -2299,9 +2220,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.41" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -2310,9 +2231,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", @@ -2321,11 +2242,41 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ + "matchers", + "nu-ansi-term", "once_cell", + "regex-automata", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", ] [[package]] @@ -2335,16 +2286,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] -name = "typenum" -version = "1.18.0" +name = "unicode-ident" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] -name = "unicode-ident" -version = "1.0.18" +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-width" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" + +[[package]] +name = "unit-prefix" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81e544489bf3d8ef66c953931f56617f423cd4b5494be343d9b9d3dda037b9a3" [[package]] name = "untrusted" @@ -2354,9 +2317,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.7" +version = "2.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" dependencies = [ "form_urlencoded", "idna", @@ -2382,16 +2345,17 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" dependencies = [ - "getrandom 0.3.3", + "getrandom 0.3.4", "js-sys", + "rand", "wasm-bindgen", ] [[package]] -name = "version_check" -version = "0.9.5" +name = "valuable" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] name = "walkdir" @@ -2419,45 +2383,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.14.2+wasi-0.2.4" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt", + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" dependencies = [ "cfg-if", "js-sys", @@ -2468,9 +2419,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2478,36 +2429,55 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" dependencies = [ + "bumpalo", "proc-macro2", "quote", "syn", - "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" dependencies = [ "unicode-ident", ] [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" dependencies = [ "js-sys", "wasm-bindgen", ] +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "winapi" version = "0.3.9" @@ -2526,11 +2496,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0978bf7171b3d90bac376700cb56d606feb40f251a475a5d6634613564460b22" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -2539,12 +2509,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-link" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" - [[package]] name = "windows-link" version = "0.2.1" @@ -2553,47 +2517,47 @@ checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-registry" -version = "0.5.3" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ - "windows-link 0.1.3", + "windows-link", "windows-result", "windows-strings", ] [[package]] name = "windows-result" -version = "0.3.4" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ - "windows-link 0.1.3", + "windows-link", ] [[package]] name = "windows-strings" -version = "0.4.2" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ - "windows-link 0.1.3", + "windows-link", ] [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.42.2", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] @@ -2604,7 +2568,7 @@ version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.53.3", + "windows-targets 0.53.5", ] [[package]] @@ -2613,7 +2577,22 @@ version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link 0.2.1", + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", ] [[package]] @@ -2634,21 +2613,27 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.3" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows-link 0.1.3", - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -2657,9 +2642,15 @@ checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_aarch64_msvc" @@ -2669,9 +2660,15 @@ checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_gnu" @@ -2681,9 +2678,9 @@ checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -2693,9 +2690,15 @@ checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_i686_msvc" @@ -2705,9 +2708,15 @@ checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnu" @@ -2717,9 +2726,15 @@ checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_gnullvm" @@ -2729,9 +2744,15 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "windows_x86_64_msvc" @@ -2741,42 +2762,56 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] -name = "wit-bindgen-rt" -version = "0.39.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags 2.9.3", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "xattr" -version = "1.5.1" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" dependencies = [ "libc", "rustix", ] +[[package]] +name = "xtask" +version = "0.0.0" +dependencies = [ + "anyhow", + "clap", + "flate2", + "jiff", + "open", + "regex", + "reqwest", + "serde", + "serde_json", + "tar", + "tempfile", + "tokio", +] + [[package]] name = "yoke" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -2784,9 +2819,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", @@ -2795,51 +2830,57 @@ dependencies = [ ] [[package]] -name = "zerofrom" -version = "0.1.6" +name = "zerocopy" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" dependencies = [ - "zerofrom-derive", + "zerocopy-derive", ] [[package]] -name = "zerofrom-derive" -version = "0.1.6" +name = "zerocopy-derive" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" dependencies = [ "proc-macro2", "quote", "syn", - "synstructure", ] [[package]] -name = "zeroize" -version = "1.8.1" +name = "zerofrom" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ - "zeroize_derive", + "zerofrom-derive", ] [[package]] -name = "zeroize_derive" -version = "1.4.2" +name = "zerofrom-derive" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", "syn", + "synstructure", ] +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + [[package]] name = "zerotrie" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" dependencies = [ "displaydoc", "yoke", @@ -2848,9 +2889,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -2859,9 +2900,9 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", @@ -2874,76 +2915,34 @@ version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bdd8a47718a4ee5fe78e07667cd36f3de80e7c2bfe727c7074245ffc7303c037" dependencies = [ - "aes", "arbitrary", - "bzip2", - "constant_time_eq", "crc32fast", - "deflate64", "flate2", - "generic-array", - "getrandom 0.3.3", - "hmac", "indexmap", - "lzma-rust2", "memchr", - "pbkdf2", - "ppmd-rust", - "sha1", - "time", - "zeroize", "zopfli", - "zstd", ] [[package]] name = "zlib-rs" -version = "0.5.1" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "626bd9fa9734751fc50d6060752170984d7053f5a39061f524cda68023d4db8a" +checksum = "40990edd51aae2c2b6907af74ffb635029d5788228222c4bb811e9351c0caad3" [[package]] name = "zmij" -version = "1.0.2" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4a4e8e9dc5c62d159f04fcdbe07f4c3fb710415aab4754bf11505501e3251d" +checksum = "ac93432f5b761b22864c774aac244fa5c0fd877678a4c37ebf6cf42208f9c9ec" [[package]] name = "zopfli" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edfc5ee405f504cd4984ecc6f14d02d55cfda60fa4b689434ef4102aae150cd7" +checksum = "f05cd8797d63865425ff89b5c4a48804f35ba0ce8d125800027ad6017d2b5249" dependencies = [ "bumpalo", "crc32fast", "log", "simd-adler32", ] - -[[package]] -name = "zstd" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "7.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" -dependencies = [ - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.15+zstd.1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/Cargo.toml b/Cargo.toml index aadfeb00..f4c7116c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,58 +1,52 @@ -[package] -name = "rustowl" -version = "1.0.0-rc.1" -authors = ["cordx56 "] +[workspace] +resolver = "3" +members = ["crates/*"] +default-members = ["crates/rustowl"] +exclude = ["perf-tests/dummy-package"] + +[workspace.package] edition = "2024" -description = "Visualize Ownership and Lifetimes in Rust" documentation = "https://github.com/cordx56/rustowl/blob/main/README.md" -readme = "README.md" repository = "https://github.com/cordx56/rustowl" license = "MPL-2.0" -keywords = ["lifetime", "lsp", "ownership", "visualization"] -categories = ["development-tools", "visualization"] - -[package.metadata.rust-analyzer] -rustc_private = true - -[package.metadata.binstall] -pkg-url = "{ repo }/releases/download/v{ version }/rustowl-{ target }{ archive-suffix }" -pkg-fmt = "tgz" -disabled-strategies = ["quick-install", "compile"] - -[package.metadata.binstall.overrides.x86_64-pc-windows-msvc] -pkg-fmt = "zip" - -[package.metadata.binstall.overrides.aarch64-pc-windows-msvc] -pkg-fmt = "zip" - -[[bench]] -harness = false -name = "rustowl_bench_simple" -[dependencies] +[workspace.dependencies] +anyhow = "1" cargo_metadata = "0.23" -clap = { version = "4", features = ["cargo", "derive"] } +clap = { version = "4", features = ["derive"] } clap_complete = "4" clap_complete_nushell = "4" -flate2 = "1" -log = "0.4" +clap_mangen = "0.2" +clap-verbosity-flag = { version = "3", default-features = false, features = [ + "tracing" +] } +divan = "0.1" +ecow = { version = "0.2", features = ["serde"] } +flate2 = { version = "1", default-features = false, features = ["zlib-rs"] } +foldhash = "0.2.0" +gag = "1" +indexmap = { version = "2", features = ["rayon", "serde"] } +indicatif = { version = "0.18", features = ["improved_unicode", "rayon"] } +jiff = "0.2" +memchr = "2" num_cpus = "1" process_alive = "0.2" +rand = "0.9" rayon = "1" -reqwest = { version = "0.12", default-features = false, features = [ - "http2", - "rustls-tls-native-roots-no-provider", +regex = "1" +reqwest = { version = "0.13", features = [ "socks", - "system-proxy", -] } -rustls = { version = "0.23.36", default-features = false, features = [ - "aws_lc_rs", ] } serde = { version = "1", features = ["derive"] } serde_json = "1" -simple_logger = { version = "5", features = ["stderr"] } -tar = "0.4.44" +tar = "0.4" tempfile = "3" +tikv-jemalloc-sys = { version = "0.6", features = [ + "override_allocator_on_supported_platforms" +] } +tikv-jemallocator = { version = "0.6", features = [ + "override_allocator_on_supported_platforms" +] } tokio = { version = "1", features = [ "fs", "io-std", @@ -64,26 +58,12 @@ tokio = { version = "1", features = [ "sync", "time", ] } -tokio-util = "0.7" -tower-lsp = "0.20" -uuid = { version = "1", features = ["v4"] } - -[dev-dependencies] -criterion = { version = "0.8", features = ["html_reports"] } - -[build-dependencies] -clap = { version = "4", features = ["derive"] } -clap_complete = "4" -clap_complete_nushell = "4" -clap_mangen = "0.2" -regex = "1" - -[target.'cfg(not(target_env = "msvc"))'.dependencies] -tikv-jemalloc-sys = "0.6" -tikv-jemallocator = "0.6" - -[target.'cfg(target_os = "windows")'.dependencies] -zip = "7.0.0" +tokio-util = { version = "0.7", features = ["io-util"] } +tower-lsp-server = "0.23" +tracing = "0.1.44" +tracing-subscriber = { version = "0.3.22", features = ["env-filter"] } +uuid = { version = "1", features = ["fast-rng", "v4"] } +zip = { version = "7", default-features = false, features = ["deflate"] } [profile.release] opt-level = 3 @@ -92,8 +72,8 @@ lto = "fat" codegen-units = 1 [profile.release.package."*"] -strip = "symbols" opt-level = 3 +strip = "symbols" [profile.arm-windows-release] inherits = "release" diff --git a/Dockerfile b/Dockerfile index 3258f780..a85a0ed9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,21 +1,19 @@ -FROM debian:bookworm-slim AS chef +FROM rust:slim-bookworm AS chef WORKDIR /app RUN apt-get update && \ apt-get install -y --no-install-recommends build-essential=12.9 ca-certificates=20230311+deb12u1 curl=7.88.1-10+deb12u14 && \ rm -rf /var/lib/apt/lists/* -COPY scripts/ scripts/ -RUN ./scripts/build/toolchain cargo install cargo-chef --locked +COPY . . +RUN cargo xtask toolchain cargo install cargo-chef --locked FROM chef AS planner -COPY . . -RUN ./scripts/build/toolchain cargo chef prepare --recipe-path recipe.json +RUN cargo xtask toolchain cargo chef prepare --recipe-path recipe.json FROM chef AS builder COPY --from=planner /app/recipe.json recipe.json -RUN ./scripts/build/toolchain cargo chef cook --release --recipe-path recipe.json -COPY . . -RUN ./scripts/build/toolchain cargo build --release +RUN cargo xtask toolchain cargo chef cook --release --recipe-path recipe.json && \ + cargo xtask toolchain cargo build --release # final image FROM debian:bookworm-slim diff --git a/README.md b/README.md index d7e80df6..d40be39f 100644 --- a/README.md +++ b/README.md @@ -108,7 +108,7 @@ Here we describe how to start using RustOwl with VS Code. - You can install `cargo` using `rustup` from [this link](https://rustup.rs/). - Visual Studio Code (VS Code) installed -We tested this guide on macOS Sequoia 15.3.2 on arm64 architecture with VS Code 1.99.3 and `cargo` 1.89.0. +We tested this guide on macOS Sequoia 15.3.2 on arm64 architecture with VS Code 1.99.3 and `cargo` 1.92.0. ### VS Code diff --git a/aur/PKGBUILD b/aur/PKGBUILD index ce1e062b..14320fff 100644 --- a/aur/PKGBUILD +++ b/aur/PKGBUILD @@ -17,7 +17,7 @@ sha256sums=('fa120643aeb48061eb32a7c993dabff88aa4e9d0b32f8ab0f3289b3fb2cf5744') prepare() { cd rustowl-${pkgver} export RUSTC_BOOTSTRAP=1 - export RUSTUP_TOOLCHAIN=1.89.0 + export RUSTUP_TOOLCHAIN=1.92.0 rustup component add rust-src rustc-dev llvm-tools cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')" } @@ -26,7 +26,7 @@ build() { cd rustowl-${pkgver} export CARGO_TARGET_DIR=target export RUSTC_BOOTSTRAP=1 - export RUSTUP_TOOLCHAIN=1.89.0 + export RUSTUP_TOOLCHAIN=1.92.0 export RUSTOWL_RUNTIME_DIRS=/opt/rustowl cargo build --frozen --release --all-features --target $(rustc --print=host-tuple) } @@ -34,7 +34,7 @@ build() { check() { cd rustowl-${pkgver} export RUSTC_BOOTSTRAP=1 - export RUSTUP_TOOLCHAIN=1.89.0 + export RUSTUP_TOOLCHAIN=1.92.0 cargo test --frozen --all-features } diff --git a/aur/PKGBUILD-GIT b/aur/PKGBUILD-GIT index 0163a8d8..17c01d55 100644 --- a/aur/PKGBUILD-GIT +++ b/aur/PKGBUILD-GIT @@ -21,7 +21,7 @@ pkgver() { prepare() { cd "$srcdir/rustowl" export RUSTC_BOOTSTRAP=1 - export RUSTUP_TOOLCHAIN=1.89.0 + export RUSTUP_TOOLCHAIN=1.92.0 rustup component add rust-src rustc-dev llvm-tools cargo fetch --locked --target "$(rustc -vV | sed -n 's/host: //p')" } @@ -30,7 +30,7 @@ build() { cd "$srcdir/rustowl" export CARGO_TARGET_DIR=target export RUSTC_BOOTSTRAP=1 - export RUSTUP_TOOLCHAIN=1.89.0 + export RUSTUP_TOOLCHAIN=1.92.0 export RUSTOWL_RUNTIME_DIRS=/opt/rustowl cargo build --frozen --release --all-features --target $(rustc --print=host-tuple) } @@ -38,7 +38,7 @@ build() { check() { cd "$srcdir/rustowl" export RUSTC_BOOTSTRAP=1 - export RUSTUP_TOOLCHAIN=1.89.0 + export RUSTUP_TOOLCHAIN=1.92.0 cargo test --frozen --all-features } diff --git a/benches/rustowl_bench_simple.rs b/benches/rustowl_bench_simple.rs deleted file mode 100644 index 0ddd0d64..00000000 --- a/benches/rustowl_bench_simple.rs +++ /dev/null @@ -1,87 +0,0 @@ -use criterion::{Criterion, criterion_group, criterion_main}; -use std::hint::black_box; -use std::process::Command; -use std::time::Duration; - -fn bench_rustowl_check(c: &mut Criterion) { - let dummy_package = "./perf-tests/dummy-package"; - - let mut group = c.benchmark_group("rustowl_check"); - group - .sample_size(20) - .measurement_time(Duration::from_secs(300)) - .warm_up_time(Duration::from_secs(5)); - - // Ensure rustowl binary is built - let output = Command::new("cargo") - .args(["build", "--release", "--bin", "rustowl"]) - .output() - .expect("Failed to build rustowl"); - - if !output.status.success() { - panic!( - "Failed to build rustowl: {}", - String::from_utf8_lossy(&output.stderr) - ); - } - - let binary_path = "./target/release/rustowl"; - - group.bench_function("default", |b| { - b.iter(|| { - let output = Command::new(binary_path) - .args(["check", dummy_package]) - .output() - .expect("Failed to run rustowl check"); - black_box(output.status.success()); - }) - }); - - group.bench_function("all_targets", |b| { - b.iter(|| { - let output = Command::new(binary_path) - .args(["check", dummy_package, "--all-targets"]) - .output() - .expect("Failed to run rustowl check with all targets"); - black_box(output.status.success()); - }) - }); - - group.bench_function("all_features", |b| { - b.iter(|| { - let output = Command::new(binary_path) - .args(["check", dummy_package, "--all-features"]) - .output() - .expect("Failed to run rustowl check with all features"); - black_box(output.status.success()); - }) - }); - - group.finish(); -} - -fn bench_rustowl_comprehensive(c: &mut Criterion) { - let dummy_package = "./perf-tests/dummy-package"; - let binary_path = "./target/release/rustowl"; - - let mut group = c.benchmark_group("rustowl_comprehensive"); - group - .sample_size(20) - .measurement_time(Duration::from_secs(200)) - .warm_up_time(Duration::from_secs(5)); - - group.bench_function("comprehensive", |b| { - b.iter(|| { - let output = Command::new(binary_path) - .args(["check", dummy_package, "--all-targets", "--all-features"]) - .output() - .expect("Failed to run comprehensive rustowl check"); - black_box(output.status.success()); - }) - }); - - group.finish(); -} - -criterion_group!(benches, bench_rustowl_check, bench_rustowl_comprehensive); -criterion_main!(benches); diff --git a/committed.toml b/committed.toml new file mode 100644 index 00000000..a9bfae49 --- /dev/null +++ b/committed.toml @@ -0,0 +1,2 @@ +style = "conventional" +ignore_author_re = "(dependabot|renovate)" diff --git a/crates/rustowl/Cargo.toml b/crates/rustowl/Cargo.toml new file mode 100644 index 00000000..0b85385c --- /dev/null +++ b/crates/rustowl/Cargo.toml @@ -0,0 +1,101 @@ +[package] +name = "rustowl" +version = "1.0.0-rc.1" +edition.workspace = true +description = "Visualize Ownership and Lifetimes in Rust" +documentation.workspace = true +readme = "../../README.md" +repository.workspace = true +license.workspace = true +keywords = ["lifetime", "lsp", "ownership", "visualization"] +categories = ["development-tools", "visualization"] + +[package.metadata.rust-analyzer] +rustc_private = true + +[package.metadata.binstall] +pkg-url = "{ repo }/releases/download/v{ version }/rustowl-{ target }{ archive-suffix }" +pkg-fmt = "tgz" +disabled-strategies = ["quick-install", "compile"] + +[package.metadata.binstall.overrides.x86_64-pc-windows-msvc] +pkg-fmt = "zip" + +[package.metadata.binstall.overrides.aarch64-pc-windows-msvc] +pkg-fmt = "zip" + +[[bench]] +harness = false +name = "rustowl_bench_simple" + +[[bench]] +harness = false +name = "line_col_bench" + +[[bench]] +harness = false +name = "cargo_output_parse_bench" + +[[bench]] +harness = false +name = "decos_bench" + +[dependencies] +anyhow.workspace = true +cargo_metadata.workspace = true +clap = { workspace = true, features = ["cargo"] } +clap_complete.workspace = true +clap_complete_nushell.workspace = true +clap-verbosity-flag.workspace = true +ecow.workspace = true +flate2.workspace = true +foldhash.workspace = true +indexmap.workspace = true +indicatif.workspace = true +memchr.workspace = true +num_cpus.workspace = true +process_alive.workspace = true +rayon.workspace = true +reqwest.workspace = true +serde.workspace = true +serde_json.workspace = true +tar.workspace = true +tempfile.workspace = true +tokio.workspace = true +tokio-util.workspace = true +tower-lsp-server.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true +uuid.workspace = true + +[dev-dependencies] +divan.workspace = true +gag.workspace = true +rand.workspace = true + +[build-dependencies] +clap.workspace = true +clap_complete.workspace = true +clap_complete_nushell.workspace = true +clap_mangen.workspace = true +clap-verbosity-flag.workspace = true +jiff.workspace = true +regex.workspace = true + +[target.'cfg(any(target_os = "linux", target_os = "macos"))'.dependencies] +tikv-jemalloc-sys.workspace = true +tikv-jemallocator.workspace = true + +[target.'cfg(target_os = "windows")'.dependencies] +zip.workspace = true + +[features] +default = ["jemalloc"] + +# Use jemalloc as the global allocator on linux/macos. +# Disable with `--no-default-features` (useful for Valgrind). +jemalloc = [] + +# Bench-only helpers used by `cargo bench` targets. +# Off by default to avoid exposing internal APIs. +bench = [] diff --git a/crates/rustowl/benches/cargo_output_parse_bench.rs b/crates/rustowl/benches/cargo_output_parse_bench.rs new file mode 100644 index 00000000..bbd9857a --- /dev/null +++ b/crates/rustowl/benches/cargo_output_parse_bench.rs @@ -0,0 +1,135 @@ +use divan::{AllocProfiler, Bencher, black_box}; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +use tikv_jemallocator::Jemalloc; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::new(Jemalloc); + +#[cfg(any(target_os = "windows", miri))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::system(); + +fn main() { + divan::main(); +} + +// Small but representative cargo message examples. +const COMPILER_ARTIFACT: &str = r#"{"reason":"compiler-artifact","package_id":"foo 0.1.0 (path+file:///tmp/foo)","target":{"kind":["lib"],"crate_types":["lib"],"name":"foo","src_path":"/tmp/foo/src/lib.rs","edition":"2021"}}"#; + +// `Workspace` is a transparent newtype around an IndexMap; a minimal value is `{}`. +const WORKSPACE: &str = r#"{}"#; + +// Cargo emits many JSON messages that we ignore; they still contain a `reason` field. +const OTHER_CARGO_MESSAGE: &str = r#"{"reason":"build-script-executed","package_id":"bar 0.1.0 (path+file:///tmp/bar)","linked_libs":[],"linked_paths":[]}"#; + +#[derive(serde::Deserialize, Clone, Debug)] +struct CargoCheckMessageTarget { + name: String, +} + +#[derive(serde::Deserialize, Clone, Debug)] +#[serde(tag = "reason", rename_all = "kebab-case")] +enum CargoCheckMessage { + CompilerArtifact { + target: CargoCheckMessageTarget, + }, + #[allow(unused)] + BuildFinished {}, +} + +#[derive(serde::Deserialize, Clone, Debug)] +#[serde(transparent)] +struct Workspace(#[allow(dead_code)] std::collections::BTreeMap); + +fn baseline_parse_line(line: &str) -> (usize, bool) { + let mut artifacts = 0usize; + let mut saw_workspace = false; + + if let Ok(CargoCheckMessage::CompilerArtifact { target }) = + serde_json::from_str::(line) + { + black_box(&target.name); + artifacts += 1; + } + if let Ok(_ws) = serde_json::from_str::(line) { + saw_workspace = true; + } + + (artifacts, saw_workspace) +} + +fn optimized_parse_line(buf: &[u8]) -> (usize, bool) { + let mut artifacts = 0usize; + let mut saw_workspace = false; + + let artifact_marker = b"\"reason\":\"compiler-artifact\""; + let reason_marker = b"\"reason\":"; + + if memchr::memmem::find(buf, artifact_marker).is_some() { + if let Ok(CargoCheckMessage::CompilerArtifact { target }) = + serde_json::from_slice::(buf) + { + black_box(&target.name); + artifacts += 1; + } + return (artifacts, false); + } + + if memchr::memmem::find(buf, reason_marker).is_some() { + return (0, false); + } + + if serde_json::from_slice::(buf).is_ok() { + saw_workspace = true; + } + + (artifacts, saw_workspace) +} + +fn make_lines(count: usize) -> Vec { + let mut lines = Vec::with_capacity(count); + for i in 0..count { + if i % 10 == 0 { + lines.push(WORKSPACE.to_string()); + } else if i % 3 == 0 { + lines.push(COMPILER_ARTIFACT.to_string()); + } else { + lines.push(OTHER_CARGO_MESSAGE.to_string()); + } + } + lines +} + +#[divan::bench(sample_count = 30)] +fn parse_baseline(bencher: Bencher) { + let lines = make_lines(5_000); + bencher.bench(|| { + let mut artifacts = 0usize; + let mut workspaces = 0usize; + for line in &lines { + let (a, w) = baseline_parse_line(line); + artifacts += a; + workspaces += usize::from(w); + } + black_box((artifacts, workspaces)); + }); +} + +#[divan::bench(sample_count = 30)] +fn parse_optimized(bencher: Bencher) { + let lines = make_lines(5_000); + let bytes: Vec> = lines.iter().map(|s| s.as_bytes().to_vec()).collect(); + + bencher.bench(|| { + let mut artifacts = 0usize; + let mut workspaces = 0usize; + for buf in &bytes { + let (a, w) = optimized_parse_line(buf); + artifacts += a; + workspaces += usize::from(w); + } + black_box((artifacts, workspaces)); + }); +} diff --git a/crates/rustowl/benches/decos_bench.rs b/crates/rustowl/benches/decos_bench.rs new file mode 100644 index 00000000..6ffd5ea7 --- /dev/null +++ b/crates/rustowl/benches/decos_bench.rs @@ -0,0 +1,173 @@ +use divan::{AllocProfiler, Bencher, black_box}; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +use tikv_jemallocator::Jemalloc; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::new(Jemalloc); + +#[cfg(any(target_os = "windows", miri))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::system(); + +#[cfg(not(feature = "bench"))] +fn main() { + eprintln!("`decos_bench` requires `--features bench`"); +} + +#[cfg(feature = "bench")] +fn main() { + divan::main(); +} + +// Benchmarks repeated cursor decoration queries after a one-time analysis preload. +// +// Run with: +// `cargo bench --bench decos_bench --features bench` +#[cfg(feature = "bench")] +#[divan::bench(sample_count = 20)] +fn cursor_decos_hot_path(bencher: Bencher) { + use rustowl::lsp::backend::Backend; + use rustowl::lsp::decoration::CursorRequest; + use std::path::Path; + use tower_lsp_server::LanguageServer; + use tower_lsp_server::ls_types::{Position, TextDocumentIdentifier, Uri}; + + const DUMMY_PACKAGE: &str = "./perf-tests/dummy-package"; + const TARGET_FILE: &str = "./perf-tests/dummy-package/src/lib.rs"; + + let target_path = std::fs::canonicalize(TARGET_FILE).expect("canonicalize TARGET_FILE"); + let target_uri: Uri = format!("file:///{}", target_path.display()) + .parse() + .expect("valid file URI"); + + let sysroot = std::process::Command::new("rustc") + .args(["--print", "sysroot"]) + .output() + .expect("run rustc") + .stdout; + let sysroot = String::from_utf8_lossy(&sysroot).trim().to_string(); + unsafe { + std::env::set_var("RUSTOWL_SYSROOT", sysroot); + } + + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("failed to build tokio runtime"); + + let (service, _) = tower_lsp_server::LspService::build(Backend::new(1)).finish(); + let backend = service.inner(); + + let ok = rt.block_on(async { + backend + .load_analyzed_state_for_bench(Path::new(DUMMY_PACKAGE), false, false) + .await + }); + assert!(ok, "analysis preload failed; dummy package not analyzed"); + + // Seed the open-doc cache similar to a real LSP client. + rt.block_on(async { + backend + .did_open(tower_lsp_server::ls_types::DidOpenTextDocumentParams { + text_document: tower_lsp_server::ls_types::TextDocumentItem { + uri: target_uri.clone(), + language_id: "rust".to_string(), + version: 1, + text: tokio::fs::read_to_string(&target_path) + .await + .expect("read target file"), + }, + }) + .await; + }); + + let req = CursorRequest { + document: TextDocumentIdentifier { uri: target_uri }, + // Point somewhere on a local variable (`files`). + position: Position { + line: 73, + character: 16, + }, + }; + + // Sanity check: make sure we actually produce decorations. + let warmup = rt + .block_on(async { backend.cursor(req.clone()).await }) + .expect("cursor request failed"); + assert!( + !warmup.decorations.is_empty(), + "cursor warmup produced no decorations" + ); + + bencher.bench(|| { + let decorations = rt.block_on(async { backend.cursor(req.clone()).await }); + black_box(decorations.is_ok()); + }); +} + +#[cfg(feature = "bench")] +#[divan::bench(sample_count = 20)] +fn cursor_decos_disk_fallback(bencher: Bencher) { + use rustowl::lsp::backend::Backend; + use rustowl::lsp::decoration::CursorRequest; + use std::path::Path; + use tower_lsp_server::ls_types::{Position, TextDocumentIdentifier, Uri}; + + const DUMMY_PACKAGE: &str = "./perf-tests/dummy-package"; + const TARGET_FILE: &str = "./perf-tests/dummy-package/src/lib.rs"; + + let target_path = std::fs::canonicalize(TARGET_FILE).expect("canonicalize TARGET_FILE"); + let target_uri: Uri = format!("file:///{}", target_path.display()) + .parse() + .expect("valid file URI"); + + let sysroot = std::process::Command::new("rustc") + .args(["--print", "sysroot"]) + .output() + .expect("run rustc") + .stdout; + let sysroot = String::from_utf8_lossy(&sysroot).trim().to_string(); + unsafe { + std::env::set_var("RUSTOWL_SYSROOT", sysroot); + } + + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("failed to build tokio runtime"); + + let (service, _) = tower_lsp_server::LspService::build(Backend::new(1)).finish(); + let backend = service.inner(); + + let ok = rt.block_on(async { + backend + .load_analyzed_state_for_bench(Path::new(DUMMY_PACKAGE), false, false) + .await + }); + assert!(ok, "analysis preload failed; dummy package not analyzed"); + + // Intentionally do NOT call `did_open`; `cursor` must read from disk. + let req = CursorRequest { + document: TextDocumentIdentifier { uri: target_uri }, + // Point somewhere on a local variable (`files`). + position: Position { + line: 73, + character: 16, + }, + }; + + let warmup = rt + .block_on(async { backend.cursor(req.clone()).await }) + .expect("cursor request failed"); + assert!( + !warmup.decorations.is_empty(), + "cursor warmup produced no decorations" + ); + + bencher.bench(|| { + let decorations = rt.block_on(async { backend.cursor(req.clone()).await }); + black_box(decorations.is_ok()); + }); +} diff --git a/crates/rustowl/benches/line_col_bench.rs b/crates/rustowl/benches/line_col_bench.rs new file mode 100644 index 00000000..9c606064 --- /dev/null +++ b/crates/rustowl/benches/line_col_bench.rs @@ -0,0 +1,110 @@ +use divan::{AllocProfiler, Bencher, black_box}; +use rand::rngs::SmallRng; +use rand::{Rng, SeedableRng}; +use rustowl::models::Loc; +use rustowl::utils::{NormalizedByteCharIndex, index_to_line_char, line_char_to_index}; +use std::cell::RefCell; +use std::sync::Arc; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +use tikv_jemallocator::Jemalloc; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::new(Jemalloc); + +#[cfg(any(target_os = "windows", miri))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::system(); + +fn main() { + divan::main(); +} + +thread_local! { + static SOURCE: RefCell, u32)>> = const { RefCell::new(None) }; + static RNG: RefCell = RefCell::new(SmallRng::seed_from_u64(42)); +} + +fn get_or_init_source() -> (Arc, u32) { + SOURCE.with(|cell| { + let mut borrowed = cell.borrow_mut(); + if borrowed.is_none() { + let mut rng = SmallRng::seed_from_u64(42); + let mut source = String::new(); + for i in 0..10_000u32 { + let len = (i % 40 + 5) as usize; + for _ in 0..len { + let v: u8 = rng.random::(); + source.push(char::from(b'a' + (v % 26))); + } + if i % 17 == 0 { + source.push('\r'); + } + source.push('\n'); + if i % 1111 == 0 { + source.push('🦀'); + } + } + let total = source.chars().filter(|&c| c != '\r').count() as u32; + *borrowed = Some((Arc::::from(source), total)); + } + borrowed.as_ref().unwrap().clone() + }) +} + +#[divan::bench_group(name = "line_col_conversion")] +mod line_col_conversion { + use super::*; + + #[divan::bench] + fn index_to_line_char_bench(bencher: Bencher) { + bencher + .with_inputs(get_or_init_source) + .bench_values(|(source, total)| { + let idx = RNG.with(|rng| Loc(rng.borrow_mut().random_range(0..total))); + let (l, c) = index_to_line_char(&source, idx); + black_box((l, c)); + }); + } + + #[divan::bench] + fn line_char_to_index_bench(bencher: Bencher) { + bencher + .with_inputs(|| get_or_init_source().0) + .bench_values(|source| { + let line = RNG.with(|rng| rng.borrow_mut().random_range(0..10_000u32)); + let idx = line_char_to_index(&source, line, 0); + black_box(idx); + }); + } + + #[divan::bench] + fn loc_from_byte_pos_uncached(bencher: Bencher) { + bencher + .with_inputs(get_or_init_source) + .bench_values(|(source, total)| { + // Pick a random logical char index and approximate as byte position. + // This is a microbench; we mainly care about relative overhead. + let pos = RNG.with(|rng| rng.borrow_mut().random_range(0..total)); + let loc = rustowl::models::Loc::new(&source, pos, 0); + black_box(loc); + }); + } + + #[divan::bench] + fn loc_from_byte_pos_cached(bencher: Bencher) { + bencher + .with_inputs(|| { + let (source, total) = get_or_init_source(); + let index = NormalizedByteCharIndex::new(&source); + (source, total, index) + }) + .bench_values(|(source, total, index)| { + // Keep the index reused across iterations. + let pos = RNG.with(|rng| rng.borrow_mut().random_range(0..total)); + let loc = index.loc_from_byte_pos(pos, 0); + black_box((source, loc)); + }); + } +} diff --git a/crates/rustowl/benches/rustowl_bench_simple.rs b/crates/rustowl/benches/rustowl_bench_simple.rs new file mode 100644 index 00000000..7be8a33f --- /dev/null +++ b/crates/rustowl/benches/rustowl_bench_simple.rs @@ -0,0 +1,109 @@ +use divan::{AllocProfiler, Bencher, black_box}; +use std::process::Command; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +use tikv_jemallocator::Jemalloc; + +#[cfg(all(any(target_os = "linux", target_os = "macos"), not(miri)))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::new(Jemalloc); + +#[cfg(any(target_os = "windows", miri))] +#[global_allocator] +static ALLOC: AllocProfiler = AllocProfiler::system(); + +fn main() { + // Ensure rustowl binary is built before running benchmarks + let output = Command::new("cargo") + .args(["build", "--release", "--bin", "rustowl"]) + .output() + .expect("Failed to build rustowl"); + + if !output.status.success() { + panic!( + "Failed to build rustowl: {}", + String::from_utf8_lossy(&output.stderr) + ); + } + + divan::main(); +} + +const DUMMY_PACKAGE: &str = "./perf-tests/dummy-package"; + +fn rustowl_bin_path() -> std::path::PathBuf { + // `cargo bench -p rustowl` runs the bench binary with CWD set + // to `crates/rustowl`, but `cargo build -p rustowl` writes the binary + // to the workspace root `target/`. + let manifest_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let candidates = [ + manifest_dir.join("../../target/release/rustowl"), + manifest_dir.join("../../target/release/rustowl.exe"), + manifest_dir.join("target/release/rustowl"), + manifest_dir.join("target/release/rustowl.exe"), + ]; + + for path in candidates { + if path.is_file() { + return path; + } + } + + // Fall back to whatever is on PATH; this keeps the benchmark usable + // even if run outside the workspace layout. + std::path::PathBuf::from("rustowl") +} + +#[divan::bench_group(name = "rustowl_check", sample_count = 20)] +mod rustowl_check { + use super::*; + + #[divan::bench] + fn default(bencher: Bencher) { + bencher.bench(|| { + let output = Command::new(rustowl_bin_path()) + .args(["check", DUMMY_PACKAGE]) + .output() + .expect("Failed to run rustowl check"); + black_box(output.status.success()); + }); + } + + #[divan::bench] + fn all_targets(bencher: Bencher) { + bencher.bench(|| { + let output = Command::new(rustowl_bin_path()) + .args(["check", DUMMY_PACKAGE, "--all-targets"]) + .output() + .expect("Failed to run rustowl check with all targets"); + black_box(output.status.success()); + }); + } + + #[divan::bench] + fn all_features(bencher: Bencher) { + bencher.bench(|| { + let output = Command::new(rustowl_bin_path()) + .args(["check", DUMMY_PACKAGE, "--all-features"]) + .output() + .expect("Failed to run rustowl check with all features"); + black_box(output.status.success()); + }); + } +} + +#[divan::bench_group(name = "rustowl_comprehensive", sample_count = 20)] +mod rustowl_comprehensive { + use super::*; + + #[divan::bench] + fn comprehensive(bencher: Bencher) { + bencher.bench(|| { + let output = Command::new(rustowl_bin_path()) + .args(["check", DUMMY_PACKAGE, "--all-targets", "--all-features"]) + .output() + .expect("Failed to run comprehensive rustowl check"); + black_box(output.status.success()); + }); + } +} diff --git a/build.rs b/crates/rustowl/build.rs similarity index 50% rename from build.rs rename to crates/rustowl/build.rs index b7b7fe99..fb4408c1 100644 --- a/build.rs +++ b/crates/rustowl/build.rs @@ -22,6 +22,25 @@ fn main() -> Result<(), Error> { let host_tuple = get_host_tuple(); println!("cargo::rustc-env=HOST_TUPLE={host_tuple}"); + // Git information for detailed version output + // Always set these env vars (empty string if not found, handled at runtime) + println!( + "cargo::rustc-env=GIT_TAG={}", + get_git_tag().unwrap_or_default() + ); + println!( + "cargo::rustc-env=GIT_COMMIT_HASH={}", + get_git_commit_hash().unwrap_or_default() + ); + println!( + "cargo::rustc-env=BUILD_TIME={}", + get_build_time().unwrap_or_default() + ); + println!( + "cargo::rustc-env=RUSTC_VERSION={}", + get_rustc_version().unwrap_or_default() + ); + #[cfg(target_os = "macos")] { println!("cargo::rustc-link-arg-bin=rustowlc=-Wl,-rpath,@executable_path/../lib"); @@ -63,9 +82,21 @@ fn get_toolchain() -> String { } else if let Ok(v) = env::var("TOOLCHAIN_CHANNEL") { format!("{v}-{}", get_host_tuple()) } else { - let v = std::fs::read_to_string("./scripts/build/channel") + // Fallback: parse channel from rust-toolchain.toml. + let v = std::fs::read_to_string("./rust-toolchain.toml") .expect("there are no toolchain specifier"); - format!("{}-{}", v.trim(), get_host_tuple()) + let channel = v + .lines() + .find_map(|line| { + let line = line.trim(); + let rest = line.strip_prefix("channel")?.trim_start(); + let rest = rest.strip_prefix('=')?.trim(); + rest.strip_prefix('"') + .and_then(|s| s.strip_suffix('"')) + .map(|s| s.to_string()) + }) + .expect("failed to parse toolchain channel"); + format!("{}-{}", channel.trim(), get_host_tuple()) } } fn get_channel() -> String { @@ -87,3 +118,52 @@ fn get_host_tuple() -> String { .map(|v| String::from_utf8(v.stdout).unwrap().trim().to_string()) .expect("failed to obtain host-tuple") } + +fn get_git_tag() -> Option { + Command::new("git") + .args(["describe", "--tags", "--abbrev=0"]) + .output() + .ok() + .filter(|output| output.status.success()) + .and_then(|output| { + String::from_utf8(output.stdout) + .ok() + .map(|s| s.trim().to_string()) + }) + .filter(|s| !s.is_empty()) +} + +fn get_git_commit_hash() -> Option { + Command::new("git") + .args(["rev-parse", "--short", "HEAD"]) + .output() + .ok() + .filter(|output| output.status.success()) + .and_then(|output| { + String::from_utf8(output.stdout) + .ok() + .map(|s| s.trim().to_string()) + }) + .filter(|s| !s.is_empty()) +} + +fn get_build_time() -> Option { + use jiff::{Unit, Zoned}; + + let now = Zoned::now().in_tz("UTC").ok()?.round(Unit::Second).ok()?; + Some(now.strftime("%Y-%m-%d %H:%M:%S UTC").to_string()) +} + +fn get_rustc_version() -> Option { + Command::new(env::var("RUSTC").unwrap_or("rustc".to_string())) + .args(["--version"]) + .output() + .ok() + .filter(|output| output.status.success()) + .and_then(|output| { + String::from_utf8(output.stdout) + .ok() + .map(|s| s.trim().to_string()) + }) + .filter(|s| !s.is_empty()) +} diff --git a/src/bin/core/analyze.rs b/crates/rustowl/src/bin/core/analyze.rs similarity index 58% rename from src/bin/core/analyze.rs rename to crates/rustowl/src/bin/core/analyze.rs index 877174d2..57876dad 100644 --- a/src/bin/core/analyze.rs +++ b/crates/rustowl/src/bin/core/analyze.rs @@ -1,18 +1,21 @@ mod polonius_analyzer; +mod shared; mod transform; +#[cfg(test)] +mod transform_tests; + use super::cache; +use ecow::{EcoString, EcoVec}; use rustc_borrowck::consumers::{ - ConsumerOptions, PoloniusInput, PoloniusOutput, get_body_with_borrowck_facts, + BodyWithBorrowckFacts, ConsumerOptions, PoloniusInput, PoloniusOutput, + get_bodies_with_borrowck_facts, }; use rustc_hir::def_id::{LOCAL_CRATE, LocalDefId}; -use rustc_middle::{ - mir::{BasicBlock, Local}, - ty::TyCtxt, -}; -use rustc_span::Span; -use rustowl::models::*; -use std::collections::HashMap; +use rustc_middle::{mir::Local, ty::TyCtxt}; +use rustowl::models::FoldIndexMap as HashMap; +use rustowl::models::range_vec_from_vec; +use rustowl::models::{DeclVec, FnLocal, Function, MirBasicBlock, MirDecl, Range}; use std::future::Future; use std::pin::Pin; @@ -27,25 +30,16 @@ pub struct AnalyzeResult { } pub enum MirAnalyzerInitResult { - Cached(AnalyzeResult), + Cached(Box), Analyzer(MirAnalyzeFuture), } -fn range_from_span(source: &str, span: Span, offset: u32) -> Option { - let from = Loc::new(source, span.lo().0, offset); - let until = Loc::new(source, span.hi().0, offset); - Range::new(from, until) -} -fn sort_locs(v: &mut [(BasicBlock, usize)]) { - v.sort_by(|a, b| a.0.cmp(&b.0).then(a.1.cmp(&b.1))); -} - pub struct MirAnalyzer { file_name: String, local_decls: HashMap, user_vars: HashMap, input: PoloniusInput, - basic_blocks: Vec, + basic_blocks: EcoVec, fn_id: LocalDefId, file_hash: String, mir_hash: String, @@ -56,10 +50,22 @@ pub struct MirAnalyzer { drop_range: HashMap>, } impl MirAnalyzer { - /// initialize analyzer - pub fn init(tcx: TyCtxt<'_>, fn_id: LocalDefId) -> MirAnalyzerInitResult { - let mut facts = - get_body_with_borrowck_facts(tcx, fn_id, ConsumerOptions::PoloniusInputFacts); + /// initialize analyzer for the function and all nested bodies (closures, async blocks) + pub fn batch_init<'tcx>(tcx: TyCtxt<'tcx>, fn_id: LocalDefId) -> Vec { + let bodies = + get_bodies_with_borrowck_facts(tcx, fn_id, ConsumerOptions::PoloniusInputFacts); + + bodies + .into_iter() + .map(|(def_id, facts)| Self::init_one(tcx, def_id, facts)) + .collect() + } + + fn init_one<'tcx>( + tcx: TyCtxt<'tcx>, + fn_id: LocalDefId, + mut facts: BodyWithBorrowckFacts<'tcx>, + ) -> MirAnalyzerInitResult { let input = *facts.input_facts.take().unwrap(); let location_table = facts.location_table.take().unwrap(); @@ -75,7 +81,7 @@ impl MirAnalyzer { let path = file_name.to_path(rustc_span::FileNameDisplayPreference::Local); let source = std::fs::read_to_string(path).unwrap(); let file_name = path.to_string_lossy().to_string(); - log::info!("facts of {fn_id:?} prepared; start analyze of {fn_id:?}"); + tracing::info!("facts of {fn_id:?} prepared; start analyze of {fn_id:?}"); // collect local declared vars // this must be done in local thread @@ -100,15 +106,15 @@ impl MirAnalyzer { *cache = cache::get_cache(&tcx.crate_name(LOCAL_CRATE).to_string()); } if let Some(cache) = cache.as_mut() - && let Some(analyzed) = cache.get_cache(&file_hash, &mir_hash) + && let Some(analyzed) = cache.get_cache(&file_hash, &mir_hash, Some(&file_name)) { - log::info!("MIR cache hit: {fn_id:?}"); - return MirAnalyzerInitResult::Cached(AnalyzeResult { + tracing::info!("MIR cache hit: {fn_id:?}"); + return MirAnalyzerInitResult::Cached(Box::new(AnalyzeResult { file_name, file_hash, mir_hash, - analyzed: analyzed.clone(), - }); + analyzed, + })); } drop(cache); @@ -131,11 +137,11 @@ impl MirAnalyzer { let borrow_data = transform::BorrowMap::new(&facts.borrow_set); let analyzer = Box::pin(async move { - log::info!("start re-computing borrow check with dump: true"); + tracing::info!("start re-computing borrow check with dump: true"); // compute accurate region, which may eliminate invalid region let output_datafrog = PoloniusOutput::compute(&input, polonius_engine::Algorithm::DatafrogOpt, true); - log::info!("borrow check finished"); + tracing::info!("borrow check finished"); let accurate_live = polonius_analyzer::get_accurate_live( &output_datafrog, @@ -181,50 +187,52 @@ impl MirAnalyzer { /// collect declared variables in MIR body /// final step of analysis - fn collect_decls(&self) -> Vec { + fn collect_decls(&self) -> DeclVec { let user_vars = &self.user_vars; let lives = &self.accurate_live; let must_live_at = &self.must_live; let drop_range = &self.drop_range; - self.local_decls - .iter() - .map(|(local, ty)| { - let ty = ty.clone(); - let must_live_at = must_live_at.get(local).cloned().unwrap_or(Vec::new()); - let lives = lives.get(local).cloned().unwrap_or(Vec::new()); - let shared_borrow = self.shared_live.get(local).cloned().unwrap_or(Vec::new()); - let mutable_borrow = self.mutable_live.get(local).cloned().unwrap_or(Vec::new()); - let drop = self.is_drop(*local); - let drop_range = drop_range.get(local).cloned().unwrap_or(Vec::new()); - let fn_local = FnLocal::new(local.as_u32(), self.fn_id.local_def_index.as_u32()); - if let Some((span, name)) = user_vars.get(local).cloned() { - MirDecl::User { - local: fn_local, - name, - span, - ty, - lives, - shared_borrow, - mutable_borrow, - must_live_at, - drop, - drop_range, - } - } else { - MirDecl::Other { - local: fn_local, - ty, - lives, - shared_borrow, - mutable_borrow, - drop, - drop_range, - must_live_at, - } + let mut result = DeclVec::with_capacity(self.local_decls.len()); + + for (local, ty) in &self.local_decls { + let ty: EcoString = ty.as_str().into(); + let must_live_at = must_live_at.get(local).cloned().unwrap_or_default(); + let lives = lives.get(local).cloned().unwrap_or_default(); + let shared_borrow = self.shared_live.get(local).cloned().unwrap_or_default(); + let mutable_borrow = self.mutable_live.get(local).cloned().unwrap_or_default(); + let drop = self.is_drop(*local); + let drop_range = drop_range.get(local).cloned().unwrap_or_default(); + + let fn_local = FnLocal::new(local.as_u32(), self.fn_id.local_def_index.as_u32()); + let decl = if let Some((span, name)) = user_vars.get(local).cloned() { + MirDecl::User { + local: fn_local, + name: EcoString::from(name.as_str()), + span, + ty, + lives: range_vec_from_vec(lives), + shared_borrow: range_vec_from_vec(shared_borrow), + mutable_borrow: range_vec_from_vec(mutable_borrow), + must_live_at: range_vec_from_vec(must_live_at), + drop, + drop_range: range_vec_from_vec(drop_range), } - }) - .collect() + } else { + MirDecl::Other { + local: fn_local, + ty, + lives: range_vec_from_vec(lives), + shared_borrow: range_vec_from_vec(shared_borrow), + mutable_borrow: range_vec_from_vec(mutable_borrow), + drop, + drop_range: range_vec_from_vec(drop_range), + must_live_at: range_vec_from_vec(must_live_at), + } + }; + result.push(decl); + } + result } fn is_drop(&self, local: Local) -> bool { @@ -253,3 +261,35 @@ impl MirAnalyzer { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn analyze_result_is_send_sync() { + fn assert_send_sync() {} + assert_send_sync::(); + } + + #[test] + fn analyze_result_passes_through_cached_variant() { + let result = AnalyzeResult { + file_name: "file.rs".to_string(), + file_hash: "h1".to_string(), + mir_hash: "h2".to_string(), + analyzed: Function { + fn_id: 123, + basic_blocks: EcoVec::new(), + decls: DeclVec::new(), + }, + }; + + let init = MirAnalyzerInitResult::Cached(Box::new(result)); + let MirAnalyzerInitResult::Cached(boxed) = init else { + panic!("expected Cached"); + }; + + assert_eq!(boxed.analyzed.fn_id, 123); + } +} diff --git a/src/bin/core/analyze/polonius_analyzer.rs b/crates/rustowl/src/bin/core/analyze/polonius_analyzer.rs similarity index 64% rename from src/bin/core/analyze/polonius_analyzer.rs rename to crates/rustowl/src/bin/core/analyze/polonius_analyzer.rs index c9490a76..efbb037d 100644 --- a/src/bin/core/analyze/polonius_analyzer.rs +++ b/crates/rustowl/src/bin/core/analyze/polonius_analyzer.rs @@ -3,8 +3,9 @@ use rayon::prelude::*; use rustc_borrowck::consumers::{PoloniusLocationTable, PoloniusOutput}; use rustc_index::Idx; use rustc_middle::mir::Local; -use rustowl::{models::*, utils}; -use std::collections::{HashMap, HashSet}; +use rustowl::models::{FoldIndexMap as HashMap, FoldIndexSet as HashSet}; +use rustowl::models::{MirBasicBlock, Range}; +use rustowl::utils; pub fn get_accurate_live( datafrog: &PoloniusOutput, @@ -29,8 +30,8 @@ pub fn get_borrow_live( basic_blocks: &[MirBasicBlock], ) -> (HashMap>, HashMap>) { let output = datafrog; - let mut shared_borrows = HashMap::new(); - let mut mutable_borrows = HashMap::new(); + let mut shared_borrows = HashMap::default(); + let mut mutable_borrows = HashMap::default(); for (location_idx, borrow_idc) in output.loan_live_at.iter() { let location = location_table.to_rich_location(*location_idx); for borrow_idx in borrow_idc { @@ -86,18 +87,18 @@ pub fn get_must_live( basic_blocks: &[MirBasicBlock], ) -> HashMap> { // obtain a map that region -> region contained locations - let mut region_locations = HashMap::new(); + let mut region_locations = HashMap::default(); for (location_idx, region_idc) in datafrog.origin_live_on_entry.iter() { for region_idx in region_idc { region_locations .entry(*region_idx) - .or_insert_with(HashSet::new) + .or_insert_with(HashSet::default) .insert(*location_idx); } } // obtain a map that borrow index -> local - let mut borrow_local = HashMap::new(); + let mut borrow_local = HashMap::default(); for (local, borrow_idc) in borrow_map.local_map().iter() { for borrow_idx in borrow_idc { borrow_local.insert(*borrow_idx, *local); @@ -105,31 +106,31 @@ pub fn get_must_live( } // check all regions' subset that must be satisfied - let mut subsets = HashMap::new(); + let mut subsets = HashMap::default(); for (_, subset) in datafrog.subset.iter() { for (sup, subs) in subset.iter() { subsets .entry(*sup) - .or_insert_with(HashSet::new) + .or_insert_with(HashSet::default) .extend(subs.iter().copied()); } } // obtain a map that region -> locations // a region must contains the locations - let mut region_must_locations = HashMap::new(); + let mut region_must_locations = HashMap::default(); for (sup, subs) in subsets.iter() { for sub in subs { if let Some(locs) = region_locations.get(sub) { region_must_locations .entry(*sup) - .or_insert_with(HashSet::new) + .or_insert_with(HashSet::default) .extend(locs.iter().copied()); } } } // obtain a map that local -> locations // a local must lives in the locations - let mut local_must_locations = HashMap::new(); + let mut local_must_locations = HashMap::default(); for (_location, region_borrows) in datafrog.origin_contains_loan_at.iter() { for (region, borrows) in region_borrows.iter() { for borrow in borrows { @@ -138,7 +139,7 @@ pub fn get_must_live( { local_must_locations .entry(*local) - .or_insert_with(HashSet::new) + .or_insert_with(HashSet::default) .extend(locs.iter().copied()); } } @@ -180,26 +181,69 @@ pub fn get_range( location_table: &PoloniusLocationTable, basic_blocks: &[MirBasicBlock], ) -> HashMap> { - let mut local_locs = HashMap::new(); + use rustc_borrowck::consumers::RichLocation; + use rustc_middle::mir::BasicBlock; + + #[derive(Default)] + struct LocalLive { + starts: Vec<(BasicBlock, usize)>, + mids: Vec<(BasicBlock, usize)>, + } + + // Collect start/mid locations per local without building an intermediate RichLocation Vec + let mut locals_live: HashMap = HashMap::default(); for (loc_idx, locals) in live_on_entry { - let location = location_table.to_rich_location(loc_idx.index().into()); + let rich = location_table.to_rich_location(loc_idx.index().into()); for local in locals { - local_locs - .entry(local.index()) - .or_insert_with(Vec::new) - .push(location); + let entry = locals_live + .entry(local.index().try_into().unwrap()) + .or_insert_with(LocalLive::default); + match rich { + RichLocation::Start(l) => entry.starts.push((l.block, l.statement_index)), + RichLocation::Mid(l) => entry.mids.push((l.block, l.statement_index)), + } } } - local_locs + + fn statement_location_to_range( + basic_blocks: &[MirBasicBlock], + block: BasicBlock, + statement_index: usize, + ) -> Option { + basic_blocks.get(block.index()).and_then(|bb| { + if statement_index < bb.statements.len() { + bb.statements.get(statement_index).map(|v| v.range()) + } else { + bb.terminator.as_ref().map(|v| v.range()) + } + }) + } + + locals_live .into_par_iter() - .map(|(local, locations)| { - ( - local.into(), - utils::eliminated_ranges(super::transform::rich_locations_to_ranges( - basic_blocks, - &locations, - )), - ) + .map(|(local_idx, mut live)| { + super::shared::sort_locs(&mut live.starts); + super::shared::sort_locs(&mut live.mids); + let n = live.starts.len().min(live.mids.len()); + if n != live.starts.len() || n != live.mids.len() { + tracing::debug!( + "get_range: starts({}) != mids({}); truncating to {}", + live.starts.len(), + live.mids.len(), + n + ); + } + let mut ranges = Vec::with_capacity(n); + for i in 0..n { + if let (Some(s), Some(m)) = ( + statement_location_to_range(basic_blocks, live.starts[i].0, live.starts[i].1), + statement_location_to_range(basic_blocks, live.mids[i].0, live.mids[i].1), + ) && let Some(r) = Range::new(s.from(), m.until()) + { + ranges.push(r); + } + } + (local_idx.into(), utils::eliminated_ranges(ranges)) }) .collect() } diff --git a/crates/rustowl/src/bin/core/analyze/shared.rs b/crates/rustowl/src/bin/core/analyze/shared.rs new file mode 100644 index 00000000..30b33905 --- /dev/null +++ b/crates/rustowl/src/bin/core/analyze/shared.rs @@ -0,0 +1,68 @@ +//! Shared analysis helpers extracted from MIR analyze pipeline. +use rustc_middle::mir::BasicBlock; +use rustc_span::Span; +use rustowl::models::Range; +use rustowl::utils::NormalizedByteCharIndex; + +pub fn range_from_span_indexed( + index: &NormalizedByteCharIndex, + span: Span, + offset: u32, +) -> Option { + let from = index.loc_from_byte_pos(span.lo().0, offset); + let until = index.loc_from_byte_pos(span.hi().0, offset); + Range::new(from, until) +} + +/// Sort (BasicBlock, index) pairs by block then index. +pub fn sort_locs(v: &mut [(BasicBlock, usize)]) { + v.sort_by(|a, b| a.0.cmp(&b.0).then(a.1.cmp(&b.1))); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn sort_locs_sorts_by_block_then_statement_index() { + let mut locs = vec![ + (BasicBlock::from_u32(2), 1), + (BasicBlock::from_u32(1), 99), + (BasicBlock::from_u32(1), 3), + (BasicBlock::from_u32(0), 5), + (BasicBlock::from_u32(2), 0), + ]; + + sort_locs(&mut locs); + + assert_eq!( + locs, + vec![ + (BasicBlock::from_u32(0), 5), + (BasicBlock::from_u32(1), 3), + (BasicBlock::from_u32(1), 99), + (BasicBlock::from_u32(2), 0), + (BasicBlock::from_u32(2), 1), + ] + ); + } + + #[test] + fn range_from_span_indexed_handles_offset_and_unicode() { + use rustc_span::{BytePos, Span}; + + // 'aé' => byte offsets: a(0..1), é(1..3), b(3..4) + let src = "aéb"; + let index = NormalizedByteCharIndex::new(src); + + let span = Span::with_root_ctxt(BytePos(1), BytePos(3)); + let range = range_from_span_indexed(&index, span, 0).expect("valid range"); + assert_eq!(u32::from(range.from()), 1); + assert_eq!(u32::from(range.until()), 2); + + let span_with_offset = Span::with_root_ctxt(BytePos(3), BytePos(4)); + let range = range_from_span_indexed(&index, span_with_offset, 1).expect("valid range"); + assert_eq!(u32::from(range.from()), 1); + assert_eq!(u32::from(range.until()), 2); + } +} diff --git a/crates/rustowl/src/bin/core/analyze/transform.rs b/crates/rustowl/src/bin/core/analyze/transform.rs new file mode 100644 index 00000000..5dbd6297 --- /dev/null +++ b/crates/rustowl/src/bin/core/analyze/transform.rs @@ -0,0 +1,313 @@ +use ecow::EcoVec; +use rayon::prelude::*; +use rustc_borrowck::consumers::{BorrowIndex, BorrowSet, RichLocation}; +use rustc_hir::def_id::LocalDefId; +use rustc_middle::{ + mir::{ + BasicBlock, BasicBlocks, Body, BorrowKind, Local, Location, Operand, Rvalue, StatementKind, + TerminatorKind, VarDebugInfoContents, + }, + ty::{TyCtxt, TypeFoldable, TypeFolder}, +}; +use rustc_span::source_map::SourceMap; +use rustowl::models::{ + FnLocal, FoldIndexMap as HashMap, FoldIndexSet as HashSet, MirBasicBlock, MirRval, + MirStatement, MirTerminator, Range, StatementVec, +}; + +/// RegionEraser to erase region variables from MIR body +/// This is required to hash MIR body +struct RegionEraser<'tcx> { + tcx: TyCtxt<'tcx>, +} +impl<'tcx> TypeFolder> for RegionEraser<'tcx> { + fn cx(&self) -> TyCtxt<'tcx> { + self.tcx + } + fn fold_region( + &mut self, + _r: as rustc_type_ir::Interner>::Region, + ) -> as rustc_type_ir::Interner>::Region { + self.tcx.lifetimes.re_static + } +} + +/// Erase region variables in MIR body +/// Refer: [`RegionEraser`] +pub fn erase_region_variables<'tcx>(tcx: TyCtxt<'tcx>, body: Body<'tcx>) -> Body<'tcx> { + let mut eraser = RegionEraser { tcx }; + + body.fold_with(&mut eraser) +} + +/// collect user defined variables from debug info in MIR +pub fn collect_user_vars( + source: &str, + offset: u32, + body: &Body<'_>, +) -> HashMap { + let index = rustowl::utils::NormalizedByteCharIndex::new(source); + + let mut result = HashMap::with_capacity_and_hasher( + body.var_debug_info.len(), + foldhash::quality::RandomState::default(), + ); + for debug in &body.var_debug_info { + let VarDebugInfoContents::Place(place) = &debug.value else { + continue; + }; + + let Some(range) = + super::shared::range_from_span_indexed(&index, debug.source_info.span, offset) + else { + continue; + }; + + result.insert(place.local, (range, debug.name.as_str().to_owned())); + } + result +} + +/// Collect and transform [`BasicBlocks`] into our data structure [`MirBasicBlock`]s. +pub fn collect_basic_blocks( + fn_id: LocalDefId, + source: &str, + offset: u32, + basic_blocks: &BasicBlocks<'_>, + source_map: &SourceMap, +) -> EcoVec { + // Building the byte→Loc index once per file removes the previous + // `Loc::new` per-span scan hot spot. + let index = rustowl::utils::NormalizedByteCharIndex::new(source); + let fn_u32 = fn_id.local_def_index.as_u32(); + + // A small threshold helps avoid rayon overhead on tiny blocks. + const PAR_THRESHOLD: usize = 64; + + let mut result = EcoVec::with_capacity(basic_blocks.len()); + + for (_bb, bb_data) in basic_blocks.iter_enumerated() { + // `source_map` is not Send, so the visibility filter must run on the + // current thread. + let mut visible = Vec::with_capacity(bb_data.statements.len()); + for stmt in &bb_data.statements { + if stmt.source_info.span.is_visible(source_map) { + visible.push(stmt); + } + } + + let mut bb_statements = StatementVec::with_capacity(visible.len()); + if visible.len() >= PAR_THRESHOLD { + let collected_statements: Vec<_> = visible + .par_iter() + .filter_map(|statement| statement_to_mir(&index, fn_u32, offset, statement)) + .collect(); + bb_statements.extend(collected_statements); + } else { + bb_statements.extend( + visible + .iter() + .filter_map(|statement| statement_to_mir(&index, fn_u32, offset, statement)), + ); + } + + let terminator = + bb_data + .terminator + .as_ref() + .and_then(|terminator| match &terminator.kind { + TerminatorKind::Drop { place, .. } => super::shared::range_from_span_indexed( + &index, + terminator.source_info.span, + offset, + ) + .map(|range| MirTerminator::Drop { + local: FnLocal::new(place.local.as_u32(), fn_u32), + range, + }), + TerminatorKind::Call { + destination, + fn_span, + .. + } => super::shared::range_from_span_indexed(&index, *fn_span, offset).map( + |fn_span| MirTerminator::Call { + destination_local: FnLocal::new(destination.local.as_u32(), fn_u32), + fn_span, + }, + ), + _ => super::shared::range_from_span_indexed( + &index, + terminator.source_info.span, + offset, + ) + .map(|range| MirTerminator::Other { range }), + }); + + result.push(MirBasicBlock { + statements: bb_statements, + terminator, + }); + } + + result +} + +fn statement_to_mir( + index: &rustowl::utils::NormalizedByteCharIndex, + fn_u32: u32, + offset: u32, + statement: &rustc_middle::mir::Statement<'_>, +) -> Option { + match &statement.kind { + StatementKind::Assign(v) => { + let (place, rval) = &**v; + let target_local_index = place.local.as_u32(); + let range_opt = + super::shared::range_from_span_indexed(index, statement.source_info.span, offset); + + let rv = match rval { + Rvalue::Use(Operand::Move(p)) => { + let local = p.local; + range_opt.map(|range| MirRval::Move { + target_local: FnLocal::new(local.as_u32(), fn_u32), + range, + }) + } + Rvalue::Ref(_region, kind, place) => { + let mutable = matches!(kind, BorrowKind::Mut { .. }); + let local = place.local; + let outlive = None; + range_opt.map(|range| MirRval::Borrow { + target_local: FnLocal::new(local.as_u32(), fn_u32), + range, + mutable, + outlive, + }) + } + _ => None, + }; + + range_opt.map(|range| MirStatement::Assign { + target_local: FnLocal::new(target_local_index, fn_u32), + range, + rval: rv, + }) + } + _ => super::shared::range_from_span_indexed(index, statement.source_info.span, offset) + .map(|range| MirStatement::Other { range }), + } +} + +fn statement_location_to_range( + basic_blocks: &[MirBasicBlock], + basic_block: usize, + statement: usize, +) -> Option { + basic_blocks.get(basic_block).and_then(|bb| { + if statement < bb.statements.len() { + bb.statements.get(statement).map(|v| v.range()) + } else { + bb.terminator.as_ref().map(|v| v.range()) + } + }) +} + +pub fn rich_locations_to_ranges( + basic_blocks: &[MirBasicBlock], + locations: &[RichLocation], +) -> Vec { + let mut starts: Vec<(BasicBlock, usize)> = Vec::new(); + let mut mids: Vec<(BasicBlock, usize)> = Vec::new(); + + for rich in locations { + match rich { + RichLocation::Start(l) => { + starts.push((l.block, l.statement_index)); + } + RichLocation::Mid(l) => { + mids.push((l.block, l.statement_index)); + } + } + } + + super::shared::sort_locs(&mut starts); + super::shared::sort_locs(&mut mids); + + let n = starts.len().min(mids.len()); + if n != starts.len() || n != mids.len() { + tracing::debug!( + "rich_locations_to_ranges: starts({}) != mids({}); truncating to {}", + starts.len(), + mids.len(), + n + ); + } + starts[..n] + .par_iter() + .zip(mids[..n].par_iter()) + .filter_map(|(s, m)| { + let sr = statement_location_to_range(basic_blocks, s.0.index(), s.1); + let mr = statement_location_to_range(basic_blocks, m.0.index(), m.1); + match (sr, mr) { + (Some(s), Some(m)) => Range::new(s.from(), m.until()), + _ => None, + } + }) + .collect() +} + +/// Our representation of [`rustc_borrowck::consumers::BorrowData`] +pub enum BorrowData { + Shared { + borrowed: Local, + #[allow(dead_code)] + assigned: Local, + }, + Mutable { + borrowed: Local, + #[allow(dead_code)] + assigned: Local, + }, +} + +/// A map type from [`BorrowIndex`] to [`BorrowData`] +pub struct BorrowMap { + location_map: Vec<(Location, BorrowData)>, + local_map: HashMap>, +} +impl BorrowMap { + /// Get [`BorrowMap`] from [`BorrowSet`] + pub fn new(borrow_set: &BorrowSet<'_>) -> Self { + let mut location_map = Vec::new(); + // BorrowIndex corresponds to Location index + for (location, data) in borrow_set.location_map().iter() { + let data = if data.kind().mutability().is_mut() { + BorrowData::Mutable { + borrowed: data.borrowed_place().local, + assigned: data.assigned_place().local, + } + } else { + BorrowData::Shared { + borrowed: data.borrowed_place().local, + assigned: data.assigned_place().local, + } + }; + location_map.push((*location, data)); + } + let local_map = borrow_set + .local_map() + .iter() + .map(|(local, borrows)| (*local, borrows.iter().copied().collect())) + .collect(); + Self { + location_map, + local_map, + } + } + pub fn get_from_borrow_index(&self, borrow: BorrowIndex) -> Option<&(Location, BorrowData)> { + self.location_map.get(borrow.index()) + } + pub fn local_map(&self) -> &HashMap> { + &self.local_map + } +} diff --git a/crates/rustowl/src/bin/core/analyze/transform_tests.rs b/crates/rustowl/src/bin/core/analyze/transform_tests.rs new file mode 100644 index 00000000..b2b3b978 --- /dev/null +++ b/crates/rustowl/src/bin/core/analyze/transform_tests.rs @@ -0,0 +1,102 @@ +use super::transform; +use rustc_borrowck::consumers::RichLocation; +use rustc_middle::mir::BasicBlock; +use rustowl::models::{MirBasicBlock, MirStatement, Range, StatementVec}; + +fn mk_range(from: u32, until: u32) -> Range { + Range::new(from.into(), until.into()).expect("valid range") +} + +#[test] +fn rich_locations_to_ranges_pairs_start_and_mid() { + let basic_blocks = vec![MirBasicBlock { + statements: StatementVec::from(vec![ + MirStatement::Other { + range: mk_range(10, 11), + }, + MirStatement::Other { + range: mk_range(20, 21), + }, + ]), + terminator: None, + }]; + + let locations = vec![ + RichLocation::Start(rustc_middle::mir::Location { + block: BasicBlock::from_u32(0), + statement_index: 0, + }), + RichLocation::Mid(rustc_middle::mir::Location { + block: BasicBlock::from_u32(0), + statement_index: 1, + }), + ]; + + let ranges = transform::rich_locations_to_ranges(&basic_blocks, &locations); + assert_eq!(ranges.len(), 1); + assert_eq!(u32::from(ranges[0].from()), 10); + assert_eq!(u32::from(ranges[0].until()), 21); +} + +#[test] +fn rich_locations_to_ranges_truncates_mismatched_start_mid_counts() { + let basic_blocks = vec![MirBasicBlock { + statements: StatementVec::from(vec![ + MirStatement::Other { + range: mk_range(1, 2), + }, + MirStatement::Other { + range: mk_range(3, 4), + }, + ]), + terminator: None, + }]; + + let locations = vec![ + RichLocation::Start(rustc_middle::mir::Location { + block: BasicBlock::from_u32(0), + statement_index: 0, + }), + RichLocation::Start(rustc_middle::mir::Location { + block: BasicBlock::from_u32(0), + statement_index: 1, + }), + RichLocation::Mid(rustc_middle::mir::Location { + block: BasicBlock::from_u32(0), + statement_index: 0, + }), + ]; + + let ranges = transform::rich_locations_to_ranges(&basic_blocks, &locations); + assert_eq!(ranges.len(), 1); + assert_eq!(u32::from(ranges[0].from()), 1); + assert_eq!(u32::from(ranges[0].until()), 2); +} + +#[test] +fn rich_locations_to_ranges_uses_terminator_range_when_statement_index_out_of_bounds() { + let basic_blocks = vec![MirBasicBlock { + statements: StatementVec::from(vec![MirStatement::Other { + range: mk_range(10, 11), + }]), + terminator: Some(rustowl::models::MirTerminator::Other { + range: mk_range(40, 50), + }), + }]; + + let locations = vec![ + RichLocation::Start(rustc_middle::mir::Location { + block: BasicBlock::from_u32(0), + statement_index: 999, + }), + RichLocation::Mid(rustc_middle::mir::Location { + block: BasicBlock::from_u32(0), + statement_index: 999, + }), + ]; + + let ranges = transform::rich_locations_to_ranges(&basic_blocks, &locations); + assert_eq!(ranges.len(), 1); + assert_eq!(u32::from(ranges[0].from()), 40); + assert_eq!(u32::from(ranges[0].until()), 50); +} diff --git a/crates/rustowl/src/bin/core/cache.rs b/crates/rustowl/src/bin/core/cache.rs new file mode 100644 index 00000000..54ff8b5f --- /dev/null +++ b/crates/rustowl/src/bin/core/cache.rs @@ -0,0 +1,877 @@ +use indexmap::IndexMap; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_middle::ty::TyCtxt; +use rustc_query_system::ich::StableHashingContext; +use rustc_stable_hash::{FromStableHash, SipHasher128Hash}; +use rustowl::cache::CacheConfig; +use rustowl::models::Function; +use serde::{Deserialize, Serialize}; +use std::fs::OpenOptions; +use std::io::{BufWriter, Write}; +use std::path::Path; +use std::sync::{LazyLock, Mutex}; +use std::time::{SystemTime, UNIX_EPOCH}; + +pub static CACHE: LazyLock>> = LazyLock::new(|| Mutex::new(None)); + +#[derive(Debug, Clone)] +struct StableHashString(String); +impl StableHashString { + pub fn get(self) -> String { + self.0 + } +} +impl FromStableHash for StableHashString { + type Hash = SipHasher128Hash; + fn from(hash: Self::Hash) -> Self { + let byte0 = hash.0[0] as u128; + let byte1 = hash.0[1] as u128; + let byte = (byte0 << 64) | byte1; + Self(format!("{byte:x}")) + } +} + +pub struct Hasher<'a> { + hasher: StableHasher, + hash_ctx: StableHashingContext<'a>, +} + +impl<'tcx> Hasher<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>) -> Self { + Self { + hasher: StableHasher::default(), + hash_ctx: StableHashingContext::new(tcx.sess, tcx.untracked()), + } + } + + fn finish(self) -> String { + self.hasher.finish::().get() + } + + pub fn get_hash( + tcx: TyCtxt<'tcx>, + target: impl HashStable>, + ) -> String { + let mut new = Self::new(tcx); + target.hash_stable(&mut new.hash_ctx, &mut new.hasher); + new.finish() + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct CacheEntry { + /// The cached function data + pub function: Function, + /// Timestamp when this entry was created + pub created_at: u64, + /// Timestamp when this entry was last accessed + pub last_accessed: u64, + /// Number of times this entry has been accessed + pub access_count: u32, + /// File modification time when this entry was cached + pub file_mtime: Option, + /// Size in bytes of the cached data (for memory management) + pub data_size: usize, +} + +impl CacheEntry { + pub fn new(function: Function, file_mtime: Option) -> Self { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_secs()) + .unwrap_or(0); + + // Estimate data size via serialization to capture heap usage + let data_size = serde_json::to_vec(&function).map(|v| v.len()).unwrap_or(0); + + Self { + function, + created_at: now, + last_accessed: now, + access_count: 1, + file_mtime, + data_size, + } + } + + /// Mark this entry as accessed and update statistics + pub fn mark_accessed(&mut self) { + self.last_accessed = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_secs()) + .unwrap_or(self.last_accessed); + self.access_count = self.access_count.saturating_add(1); + } +} + +/// Cache statistics for monitoring and debugging +#[derive(Default, Debug, Clone)] +pub struct CacheStats { + pub hits: u64, + pub misses: u64, + pub evictions: u64, + pub invalidations: u64, // file-change-based removals + pub total_entries: usize, + pub total_memory_bytes: usize, +} + +impl CacheStats { + pub fn hit_rate(&self) -> f64 { + let total = self.hits + self.misses; + if total == 0 { + 0.0 + } else { + self.hits as f64 / total as f64 + } + } +} + +/// Robust cache with intelligent eviction and metadata tracking +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct CacheData { + /// Cache entries with metadata + entries: IndexMap, + /// Runtime statistics (not serialized) + #[serde(skip)] + stats: CacheStats, + /// Version for compatibility checking + version: u32, + /// Cache configuration (not serialized, loaded from environment) + #[serde(skip)] + config: CacheConfig, +} + +/// Current cache version for compatibility checking +const CACHE_VERSION: u32 = 2; + +impl CacheData { + pub fn with_config(config: CacheConfig) -> Self { + Self { + entries: IndexMap::with_capacity(config.max_entries.min(64)), + stats: CacheStats::default(), + version: CACHE_VERSION, + config, + } + } + + /// Create a combined cache key from file and MIR hashes + fn make_key(file_hash: &str, mir_hash: &str) -> String { + format!("{file_hash}:{mir_hash}") + } + + /// Get file modification time for validation + fn get_file_mtime(file_path: &str) -> Option { + std::fs::metadata(file_path) + .ok() + .and_then(|metadata| metadata.modified().ok()) + .and_then(|time| time.duration_since(UNIX_EPOCH).ok()) + .map(|duration| duration.as_secs()) + } + + pub fn get_cache( + &mut self, + file_hash: &str, + mir_hash: &str, + file_path: Option<&str>, + ) -> Option { + let key = Self::make_key(file_hash, mir_hash); + + if self.config.use_lru_eviction { + if let Some(mut entry) = self.entries.shift_remove(&key) { + // Validate file modification time if file path is provided and validation is enabled + if let Some(file_path) = file_path + && self.config.validate_file_mtime + && let Some(cached_mtime) = entry.file_mtime + && let Some(current_mtime) = Self::get_file_mtime(file_path) + && current_mtime > cached_mtime + { + // File has been modified since caching, invalidate this entry + tracing::debug!( + "Cache entry invalidated due to file modification: {}", + file_path + ); + self.stats.invalidations += 1; + self.update_memory_stats(); + self.stats.misses += 1; + return None; + } + + entry.mark_accessed(); + let function = entry.function.clone(); + self.entries.insert(key, entry); + self.update_memory_stats(); + + // Evict if needed after reinsertion to prevent temporary overshoot + self.maybe_evict_entries(); + + self.stats.hits += 1; + return Some(function); + } + } else { + // First, determine if the entry should be invalidated without holding a mutable borrow across removal + let should_invalidate = if let Some(entry) = self.entries.get(&key) { + if let Some(file_path) = file_path + && self.config.validate_file_mtime + && let Some(cached_mtime) = entry.file_mtime + && let Some(current_mtime) = Self::get_file_mtime(file_path) + && current_mtime > cached_mtime + { + true + } else { + false + } + } else { + false + }; + + if should_invalidate { + tracing::debug!( + "Cache entry invalidated due to file modification: {:?}", + file_path + ); + self.entries.swap_remove(&key); + self.stats.invalidations += 1; + self.update_memory_stats(); + self.stats.misses += 1; + return None; + } + + // Normal hit path + if let Some(entry) = self.entries.get_mut(&key) { + entry.mark_accessed(); + self.stats.hits += 1; + return Some(entry.function.clone()); + } + } + self.stats.misses += 1; + None + } + + pub fn insert_cache_with_file_path( + &mut self, + file_hash: String, + mir_hash: String, + analyzed: Function, + file_path: Option<&str>, + ) { + let key = Self::make_key(&file_hash, &mir_hash); + + // Get file modification time if available and validation is enabled + let file_mtime = if self.config.validate_file_mtime { + file_path.and_then(Self::get_file_mtime) + } else { + None + }; + + let entry = CacheEntry::new(analyzed, file_mtime); + + // Check if we need to evict entries before inserting + self.maybe_evict_entries(); + + self.entries.insert(key, entry); + self.update_memory_stats(); + + // Evict again after insertion to prevent temporary overshoot + self.maybe_evict_entries(); + + tracing::debug!( + "Cache entry inserted. Total entries: {}, Memory usage: {} bytes", + self.entries.len(), + self.stats.total_memory_bytes + ); + } + + /// Update memory usage statistics + fn update_memory_stats(&mut self) { + self.stats.total_entries = self.entries.len(); + self.stats.total_memory_bytes = self.entries.values().map(|entry| entry.data_size).sum(); + } + + /// Check if eviction is needed and perform it + fn maybe_evict_entries(&mut self) { + let needs_eviction = self.entries.len() >= self.config.max_entries + || self.stats.total_memory_bytes >= self.config.max_memory_bytes; + + if needs_eviction { + self.evict_entries(); + } + } + + /// Perform intelligent cache eviction + fn evict_entries(&mut self) { + let target_entries = ((self.config.max_entries * 8) / 10).max(1); // Keep >=1 entry + let target_memory = (self.config.max_memory_bytes * 8) / 10; + + let mut evicted_count = 0; + + if self.config.use_lru_eviction { + // LRU eviction: remove least recently used entries + while (self.entries.len() > target_entries + || self.stats.total_memory_bytes > target_memory) + && !self.entries.is_empty() + { + // Find entry with oldest last_accessed time + let oldest_key = self + .entries + .iter() + .min_by_key(|(_, entry)| entry.last_accessed) + .map(|(key, _)| key); + + if let Some(key) = oldest_key { + // Clone the key only when we need to remove it + let key_to_remove = key.clone(); + if let Some(removed) = self.entries.shift_remove(&key_to_remove) { + self.stats.total_memory_bytes = self + .stats + .total_memory_bytes + .saturating_sub(removed.data_size); + evicted_count += 1; + } + } else { + break; + } + } + } else { + // FIFO eviction: remove oldest entries by insertion order + while (self.entries.len() > target_entries + || self.stats.total_memory_bytes > target_memory) + && !self.entries.is_empty() + { + if let Some((_, removed)) = self.entries.shift_remove_index(0) { + self.stats.total_memory_bytes = self + .stats + .total_memory_bytes + .saturating_sub(removed.data_size); + evicted_count += 1; + } + } + } + + self.stats.evictions += evicted_count; + self.update_memory_stats(); + + if evicted_count > 0 { + tracing::info!( + "Evicted {} cache entries. Remaining: {} entries, {} bytes", + evicted_count, + self.entries.len(), + self.stats.total_memory_bytes + ); + } + } + + /// Get cache statistics for monitoring + pub fn get_stats(&self) -> &CacheStats { + &self.stats + } + + /// Check if cache version is compatible + pub fn is_compatible(&self) -> bool { + self.version == CACHE_VERSION + } +} + +/// Get cache data +/// +/// If cache is not enabled, then return None. +/// If file doesn't exist, it returns empty [`CacheData`]. +/// If cache is corrupted or incompatible, it returns a new cache. +pub fn get_cache(krate: &str) -> Option { + if let Some(cache_path) = rustowl::cache::get_cache_path() { + let cache_path = cache_path.join(format!("{krate}.json")); + + // Get configuration from environment + let config = rustowl::cache::get_cache_config(); + + // Try to read and parse the cache file + match std::fs::read_to_string(&cache_path) { + Ok(content) => { + match serde_json::from_str::(&content) { + Ok(mut cache_data) => { + // Check version compatibility + if !cache_data.is_compatible() { + tracing::warn!( + "Cache version incompatible (found: {}, expected: {}), creating new cache", + cache_data.version, + CACHE_VERSION + ); + return Some(CacheData::with_config(config)); + } + + // Restore runtime configuration and statistics + cache_data.config = config; + cache_data.stats = CacheStats::default(); + cache_data.update_memory_stats(); + + tracing::info!( + "Cache loaded: {} entries, {} bytes from {}", + cache_data.entries.len(), + cache_data.stats.total_memory_bytes, + cache_path.display() + ); + + Some(cache_data) + } + Err(e) => { + tracing::warn!( + "Failed to parse cache file ({}), creating new cache: {}", + cache_path.display(), + e + ); + Some(CacheData::with_config(config)) + } + } + } + Err(e) => { + tracing::info!( + "Cache file not found or unreadable ({}), creating new cache: {}", + cache_path.display(), + e + ); + Some(CacheData::with_config(config)) + } + } + } else { + tracing::debug!("Cache disabled via configuration"); + None + } +} + +/// Write cache with atomic operations and robust error handling +pub fn write_cache(krate: &str, cache: &CacheData) { + if let Some(cache_dir) = rustowl::cache::get_cache_path() { + // Ensure cache directory exists + if let Err(e) = std::fs::create_dir_all(&cache_dir) { + tracing::error!( + "Failed to create cache directory {}: {}", + cache_dir.display(), + e + ); + return; + } + + let cache_path = cache_dir.join(format!("{krate}.json")); + let temp_path = cache_dir.join(format!("{krate}.json.tmp")); + + // Serialize cache data + let serialized = match serde_json::to_string_pretty(cache) { + Ok(data) => data, + Err(e) => { + tracing::error!("Failed to serialize cache data: {e}"); + return; + } + }; + + // Write to temporary file first for atomic operation + match write_cache_file(&temp_path, &serialized) { + Ok(()) => { + // Atomically move temporary file to final location + if let Err(e) = std::fs::rename(&temp_path, &cache_path) { + tracing::error!( + "Failed to move cache file from {} to {}: {}", + temp_path.display(), + cache_path.display(), + e + ); + // Clean up temporary file + let _ = std::fs::remove_file(&temp_path); + } else { + let stats = cache.get_stats(); + tracing::info!( + "Cache saved: {} entries, {} bytes, hit rate: {:.1}%, evictions: {}, invalidations: {} to {}", + stats.total_entries, + stats.total_memory_bytes, + stats.hit_rate() * 100.0, + stats.evictions, + stats.invalidations, + cache_path.display() + ); + } + } + Err(e) => { + tracing::error!("Failed to write cache to {}: {}", temp_path.display(), e); + // Clean up temporary file + let _ = std::fs::remove_file(&temp_path); + } + } + } else { + tracing::debug!("Cache disabled, skipping write"); + } +} + +/// Write cache data to file with proper error handling +fn write_cache_file(path: &Path, data: &str) -> Result<(), std::io::Error> { + let file = OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(path)?; + + let mut writer = BufWriter::new(file); + writer.write_all(data.as_bytes())?; + writer.flush()?; + + // Ensure data is written to disk + writer.into_inner()?.sync_all()?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn sample_function(id: u32) -> Function { + Function { + fn_id: id, + basic_blocks: ecow::EcoVec::new(), + decls: ecow::EcoVec::new(), + } + } + + struct EnvGuard { + key: &'static str, + old_value: Option, + } + + impl EnvGuard { + fn set(key: &'static str, value: &std::ffi::OsStr) -> Self { + let old_value = std::env::var_os(key); + unsafe { + std::env::set_var(key, value); + } + Self { key, old_value } + } + } + + impl Drop for EnvGuard { + fn drop(&mut self) { + if let Some(v) = self.old_value.take() { + unsafe { + std::env::set_var(self.key, v); + } + } else { + unsafe { + std::env::remove_var(self.key); + } + } + } + } + + fn cache_dir_guard() -> (tempfile::TempDir, EnvGuard) { + let tmp = tempfile::tempdir().expect("tempdir"); + let guard = EnvGuard::set("RUSTOWL_CACHE_DIR", tmp.path().as_os_str()); + (tmp, guard) + } + + #[test] + fn cache_stats_hit_rate_is_zero_with_no_requests() { + let stats = CacheStats::default(); + assert_eq!(stats.hit_rate(), 0.0); + } + + #[test] + fn cache_stats_hit_rate_divides_hits_by_total() { + let stats = CacheStats { + hits: 2, + misses: 3, + ..CacheStats::default() + }; + assert!((stats.hit_rate() - 0.4).abs() < f64::EPSILON); + } + + #[test] + fn insert_and_get_cache_hits_update_metrics_lru() { + let config = CacheConfig { + max_entries: 32, + max_memory_bytes: usize::MAX, + use_lru_eviction: true, + validate_file_mtime: false, + enable_compression: false, + }; + let mut cache = CacheData::with_config(config); + + cache.insert_cache_with_file_path( + "fh".to_string(), + "mh".to_string(), + sample_function(1), + None, + ); + + let hit = cache.get_cache("fh", "mh", None); + assert!(hit.is_some()); + + let stats = cache.get_stats(); + assert_eq!(stats.hits, 1); + assert_eq!(stats.misses, 0); + assert_eq!(stats.total_entries, 1); + assert!(stats.total_memory_bytes > 0); + } + + #[test] + fn get_cache_miss_updates_metrics_lru() { + let config = CacheConfig { + max_entries: 32, + max_memory_bytes: usize::MAX, + use_lru_eviction: true, + validate_file_mtime: false, + enable_compression: false, + }; + let mut cache = CacheData::with_config(config); + + let miss = cache.get_cache("nope", "missing", None); + assert!(miss.is_none()); + + let stats = cache.get_stats(); + assert_eq!(stats.hits, 0); + assert_eq!(stats.misses, 1); + } + + #[test] + fn insert_and_get_cache_hits_update_metrics_fifo() { + let config = CacheConfig { + max_entries: 32, + max_memory_bytes: usize::MAX, + use_lru_eviction: false, + validate_file_mtime: false, + enable_compression: false, + }; + let mut cache = CacheData::with_config(config); + + cache.insert_cache_with_file_path( + "fh".to_string(), + "mh".to_string(), + sample_function(7), + None, + ); + + let hit = cache.get_cache("fh", "mh", None); + assert!(hit.is_some()); + + let stats = cache.get_stats(); + assert_eq!(stats.hits, 1); + assert_eq!(stats.misses, 0); + } + + #[test] + fn fifo_eviction_happens_over_entry_limit() { + let config = CacheConfig { + max_entries: 2, + max_memory_bytes: 1_000_000, + use_lru_eviction: false, + validate_file_mtime: false, + enable_compression: false, + }; + let mut cache = CacheData::with_config(config); + + cache.insert_cache_with_file_path( + "f1".to_string(), + "m1".to_string(), + sample_function(1), + None, + ); + cache.insert_cache_with_file_path( + "f2".to_string(), + "m2".to_string(), + sample_function(2), + None, + ); + cache.insert_cache_with_file_path( + "f3".to_string(), + "m3".to_string(), + sample_function(3), + None, + ); + + // max_entries=2 keeps at least 1 and targets 80% => 1 entry. + assert!(cache.entries.len() <= 2); + assert!(!cache.entries.is_empty()); + assert!(cache.get_stats().evictions >= 1); + } + + #[test] + fn lru_eviction_happens_over_entry_limit() { + let config = CacheConfig { + max_entries: 2, + max_memory_bytes: 1_000_000, + use_lru_eviction: true, + validate_file_mtime: false, + enable_compression: false, + }; + let mut cache = CacheData::with_config(config); + + cache.insert_cache_with_file_path( + "f1".to_string(), + "m1".to_string(), + sample_function(1), + None, + ); + cache.insert_cache_with_file_path( + "f2".to_string(), + "m2".to_string(), + sample_function(2), + None, + ); + cache.insert_cache_with_file_path( + "f3".to_string(), + "m3".to_string(), + sample_function(3), + None, + ); + + assert!(cache.entries.len() <= 2); + assert!(!cache.entries.is_empty()); + assert!(cache.get_stats().evictions >= 1); + } + + #[test] + fn lru_get_cache_invalidates_on_newer_file_mtime() { + let config = CacheConfig { + max_entries: 32, + max_memory_bytes: usize::MAX, + use_lru_eviction: true, + validate_file_mtime: true, + enable_compression: false, + }; + let mut cache = CacheData::with_config(config); + + let mut file_path = std::env::temp_dir(); + file_path.push("rustowl-cache-mtime-lru.txt"); + std::fs::write(&file_path, "v1").unwrap(); + let file_path = file_path.to_string_lossy().to_string(); + + cache.insert_cache_with_file_path( + "fh".to_string(), + "mh".to_string(), + sample_function(1), + Some(&file_path), + ); + assert!(cache.get_cache("fh", "mh", Some(&file_path)).is_some()); + + // Ensure mtime moves forward even on coarse filesystems. + std::thread::sleep(std::time::Duration::from_secs(1)); + std::fs::write(&file_path, "v2").unwrap(); + + let invalidated = cache.get_cache("fh", "mh", Some(&file_path)); + assert!( + invalidated.is_none(), + "expected invalidation after mtime change" + ); + + let stats = cache.get_stats(); + assert_eq!(stats.invalidations, 1); + assert_eq!(stats.misses, 1); + } + + #[test] + fn fifo_get_cache_invalidates_on_newer_file_mtime() { + let config = CacheConfig { + max_entries: 32, + max_memory_bytes: usize::MAX, + use_lru_eviction: false, + validate_file_mtime: true, + enable_compression: false, + }; + let mut cache = CacheData::with_config(config); + + let mut file_path = std::env::temp_dir(); + file_path.push("rustowl-cache-mtime-fifo.txt"); + std::fs::write(&file_path, "v1").unwrap(); + let file_path = file_path.to_string_lossy().to_string(); + + cache.insert_cache_with_file_path( + "fh".to_string(), + "mh".to_string(), + sample_function(1), + Some(&file_path), + ); + assert!(cache.get_cache("fh", "mh", Some(&file_path)).is_some()); + + std::thread::sleep(std::time::Duration::from_secs(1)); + std::fs::write(&file_path, "v2").unwrap(); + + let invalidated = cache.get_cache("fh", "mh", Some(&file_path)); + assert!( + invalidated.is_none(), + "expected invalidation after mtime change" + ); + + let stats = cache.get_stats(); + assert_eq!(stats.invalidations, 1); + assert_eq!(stats.misses, 1); + } + + #[test] + fn get_cache_returns_new_cache_on_corrupted_json() { + let (_tmp, _guard) = cache_dir_guard(); + + let krate = "corrupt"; + let cache_path = rustowl::cache::get_cache_path() + .unwrap() + .join(format!("{krate}.json")); + std::fs::write(&cache_path, "{not json").unwrap(); + + let loaded = super::get_cache(krate).expect("cache enabled"); + assert!(loaded.entries.is_empty()); + assert!(loaded.is_compatible()); + } + + #[test] + fn get_cache_returns_new_cache_on_version_mismatch() { + let (_tmp, _guard) = cache_dir_guard(); + + let krate = "version_mismatch"; + let cache_path = rustowl::cache::get_cache_path() + .unwrap() + .join(format!("{krate}.json")); + + let config = rustowl::cache::get_cache_config(); + let mut cache = CacheData::with_config(config); + cache.version = CACHE_VERSION.saturating_sub(1); + cache.entries.insert( + CacheData::make_key("fh", "mh"), + CacheEntry::new(sample_function(42), None), + ); + + std::fs::write(&cache_path, serde_json::to_string(&cache).unwrap()).unwrap(); + + let loaded = super::get_cache(krate).expect("cache enabled"); + assert!( + loaded.entries.is_empty(), + "expected migration to start from new cache" + ); + assert!(loaded.is_compatible()); + } + + #[test] + fn write_cache_writes_json_and_renames_atomically() { + let (_tmp, _guard) = cache_dir_guard(); + + let krate = "write_cache"; + let config = rustowl::cache::get_cache_config(); + let mut cache = CacheData::with_config(config); + cache.insert_cache_with_file_path( + "fh".to_string(), + "mh".to_string(), + sample_function(1), + None, + ); + + super::write_cache(krate, &cache); + + let cache_dir = rustowl::cache::get_cache_path().unwrap(); + let final_path = cache_dir.join(format!("{krate}.json")); + let temp_path = cache_dir.join(format!("{krate}.json.tmp")); + + assert!(final_path.is_file()); + assert!( + !temp_path.exists(), + "temp file should be renamed or removed" + ); + + let content = std::fs::read_to_string(&final_path).unwrap(); + let loaded: CacheData = serde_json::from_str(&content).unwrap(); + assert!(loaded.is_compatible()); + assert_eq!(loaded.entries.len(), 1); + } +} diff --git a/crates/rustowl/src/bin/core/mod.rs b/crates/rustowl/src/bin/core/mod.rs new file mode 100644 index 00000000..83ff68bd --- /dev/null +++ b/crates/rustowl/src/bin/core/mod.rs @@ -0,0 +1,262 @@ +mod analyze; +mod cache; + +use analyze::{AnalyzeResult, MirAnalyzer, MirAnalyzerInitResult}; +use ecow::EcoVec; +use rustc_hir::def_id::{LOCAL_CRATE, LocalDefId}; +use rustc_interface::interface; +use rustc_middle::{query::queries, ty::TyCtxt, util::Providers}; +use rustc_session::config; +use rustowl::models::FoldIndexMap as HashMap; +use rustowl::models::{Crate, File, Workspace}; +use std::env; +use std::sync::{LazyLock, Mutex, atomic::AtomicBool}; +use tokio::{ + runtime::{Builder, Runtime}, + task::JoinSet, +}; + +pub struct RustcCallback; +impl rustc_driver::Callbacks for RustcCallback {} + +static ATOMIC_TRUE: AtomicBool = AtomicBool::new(true); +static TASKS: LazyLock>> = + LazyLock::new(|| Mutex::new(JoinSet::new())); + +// make tokio runtime +static RUNTIME: LazyLock = LazyLock::new(|| { + let worker_threads = std::thread::available_parallelism() + .map(|n| (n.get() / 2).clamp(2, 8)) + .unwrap_or(4); + + Builder::new_multi_thread() + .enable_all() + .worker_threads(worker_threads) + .thread_stack_size(128 * 1024 * 1024) + .build() + .unwrap() +}); + +fn override_queries(_session: &rustc_session::Session, local: &mut Providers) { + local.mir_borrowck = mir_borrowck; +} + +fn mir_borrowck(tcx: TyCtxt<'_>, def_id: LocalDefId) -> queries::mir_borrowck::ProvidedValue<'_> { + tracing::info!("start borrowck of {def_id:?}"); + + let analyzers = MirAnalyzer::batch_init(tcx, def_id); + + { + let mut tasks = TASKS.lock().unwrap(); + for analyzer in analyzers { + match analyzer { + MirAnalyzerInitResult::Cached(cached) => { + handle_analyzed_result(tcx, *cached); + } + MirAnalyzerInitResult::Analyzer(analyzer) => { + tasks.spawn_on(async move { analyzer.await.analyze() }, RUNTIME.handle()); + } + } + } + + tracing::info!("there are {} tasks", tasks.len()); + while let Some(Ok(result)) = tasks.try_join_next() { + tracing::info!("one task joined"); + handle_analyzed_result(tcx, result); + } + } + + let mut providers = Providers::default(); + rustc_borrowck::provide(&mut providers); + let original_mir_borrowck = providers.mir_borrowck; + original_mir_borrowck(tcx, def_id) +} + +pub struct AnalyzerCallback; +impl rustc_driver::Callbacks for AnalyzerCallback { + fn config(&mut self, config: &mut interface::Config) { + config.using_internal_features = &ATOMIC_TRUE; + config.opts.unstable_opts.mir_opt_level = Some(0); + config.opts.unstable_opts.polonius = config::Polonius::Next; + config.opts.incremental = None; + config.override_queries = Some(override_queries); + config.make_codegen_backend = None; + } + fn after_expansion<'tcx>( + &mut self, + _compiler: &interface::Compiler, + tcx: TyCtxt<'tcx>, + ) -> rustc_driver::Compilation { + let result = rustc_driver::catch_fatal_errors(|| tcx.analysis(())); + + // join all tasks after all analysis finished + loop { + // First collect any tasks that have already finished + while let Some(Ok(result)) = { + let mut guard = TASKS.lock().unwrap(); + guard.try_join_next() + } { + tracing::info!("one task joined"); + handle_analyzed_result(tcx, result); + } + + // Check if all tasks are done + let has_tasks = { + let guard = TASKS.lock().unwrap(); + !guard.is_empty() + }; + if !has_tasks { + break; + } + + // Wait for at least one more task to finish + let result = { + let mut guard = TASKS.lock().unwrap(); + RUNTIME.block_on(guard.join_next()) + }; + if let Some(Ok(result)) = result { + tracing::info!("one task joined"); + handle_analyzed_result(tcx, result); + } + } + + if let Some(cache) = cache::CACHE.lock().unwrap().as_ref() { + // Log cache statistics before writing + let stats = cache.get_stats(); + tracing::info!( + "Cache statistics: {} hits, {} misses, {:.1}% hit rate, {} evictions", + stats.hits, + stats.misses, + stats.hit_rate() * 100.0, + stats.evictions + ); + cache::write_cache(&tcx.crate_name(LOCAL_CRATE).to_string(), cache); + } + + if result.is_ok() { + rustc_driver::Compilation::Continue + } else { + rustc_driver::Compilation::Stop + } + } +} + +pub fn handle_analyzed_result(tcx: TyCtxt<'_>, analyzed: AnalyzeResult) { + if let Some(cache) = cache::CACHE.lock().unwrap().as_mut() { + // Pass file name for potential file modification time validation + cache.insert_cache_with_file_path( + analyzed.file_hash.clone(), + analyzed.mir_hash.clone(), + analyzed.analyzed.clone(), + Some(&analyzed.file_name), + ); + } + let mut map = HashMap::with_capacity_and_hasher(1, foldhash::quality::RandomState::default()); + map.insert( + analyzed.file_name.to_owned(), + File { + items: EcoVec::from([analyzed.analyzed]), + }, + ); + let krate = Crate(map); + // get currently-compiling crate name + let crate_name = tcx.crate_name(LOCAL_CRATE).to_string(); + let mut ws_map = + HashMap::with_capacity_and_hasher(1, foldhash::quality::RandomState::default()); + ws_map.insert(crate_name.clone(), krate); + let ws = Workspace(ws_map); + + let serialized = serde_json::to_string(&ws).unwrap(); + if let Ok(output_path) = env::var("RUSTOWL_OUTPUT_PATH") { + if let Err(e) = std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(&output_path) + .and_then(|mut f| { + use std::io::Write; + writeln!(f, "{serialized}") + }) + { + tracing::warn!("failed to write RUSTOWL_OUTPUT_PATH={output_path}: {e}"); + } + } else { + println!("{serialized}"); + } +} + +pub fn run_compiler() -> i32 { + let mut args: Vec = env::args().collect(); + + // When used as `RUSTC_WORKSPACE_WRAPPER`, Cargo invokes: + // - Probes: `rustowlc - [--print ...]` + // - Real compiles: `rustowlc ... --crate-name ...` + // Cargo passes the real rustc path as argv[1], which rustc_driver does not expect. + if args.get(1).is_some_and(|a| a.contains("rustc")) { + args.remove(1); + } + + // If invoked directly as `rustowlc rustowlc ...` (single-file mode), strip the duplicated + // argv[1] so the remaining args match rustc_driver expectations. + if args.first() == args.get(1) { + args.remove(1); + } + + let mut crate_name: Option<&str> = None; + if let Some(i) = args.iter().position(|a| a == "--crate-name") { + crate_name = args.get(i + 1).map(String::as_str); + } + + // Always passthrough for rustc probes / printing. + for arg in &args { + if arg == "-vV" || arg == "--version" || arg.starts_with("--print") { + return rustc_driver::catch_with_exit_code(|| { + rustc_driver::run_compiler(&args, &mut RustcCallback) + }); + } + } + + // RustOwl's single-file mode doesn't pass `--crate-name`; we still want analysis. + // Cargo uses `--crate-name ___` during target info probing. + let should_analyze = match crate_name { + Some("___") => false, + Some(_) => true, + None => true, + }; + + if should_analyze { + rustc_driver::catch_with_exit_code(|| { + rustc_driver::run_compiler(&args, &mut AnalyzerCallback); + }) + } else { + rustc_driver::catch_with_exit_code(|| rustc_driver::run_compiler(&args, &mut RustcCallback)) + } +} + +#[cfg(test)] +mod tests { + + #[test] + fn workspace_wrapper_duplicate_argv0_is_detected() { + let args = vec!["rustowlc", "rustowlc", "--help"]; + assert_eq!(args.first(), args.get(1)); + + let deduped: Vec<_> = if args.first() == args.get(1) { + args.into_iter().skip(1).collect() + } else { + args.into_iter().collect() + }; + + assert_eq!(deduped, vec!["rustowlc", "--help"]); + } + + #[test] + fn passthrough_args_are_detected() { + for arg in ["-vV", "--version", "--print=cfg", "--print", "--print=all"] { + assert!(arg == "-vV" || arg == "--version" || arg.starts_with("--print")); + } + + for arg in ["--crate-type", "lib", "-L", "dependency=/path"] { + assert!(!(arg == "-vV" || arg == "--version" || arg.starts_with("--print"))); + } + } +} diff --git a/crates/rustowl/src/bin/rustowl.rs b/crates/rustowl/src/bin/rustowl.rs new file mode 100644 index 00000000..00d97c65 --- /dev/null +++ b/crates/rustowl/src/bin/rustowl.rs @@ -0,0 +1,236 @@ +//! # RustOwl cargo-owlsp +//! +//! An LSP server for visualizing ownership and lifetimes in Rust, designed for debugging and optimization. + +use clap::{CommandFactory, Parser}; +use clap_complete::generate; +use rustowl::{ + Backend, + cli::{Cli, Commands, ToolchainCommands}, + toolchain, utils, +}; +use std::env; +use tower_lsp_server::{LspService, Server}; +use tracing_subscriber::filter::LevelFilter; + +fn log_level_from_args(args: &Cli) -> LevelFilter { + args.verbosity.tracing_level_filter() +} + +#[cfg(all( + any(target_os = "linux", target_os = "macos"), + not(miri), + feature = "jemalloc" +))] +use tikv_jemallocator::Jemalloc; + +// Use jemalloc by default on linux/macos (but keep a feature-gated escape hatch +// so we can run tools like Valgrind with the system allocator). +#[cfg(all( + any(target_os = "linux", target_os = "macos"), + not(miri), + feature = "jemalloc" +))] +#[global_allocator] +static GLOBAL: Jemalloc = Jemalloc; + +/// Handles the execution of RustOwl CLI commands. +/// +/// This function processes a specific CLI command and executes the appropriate +/// subcommand. It handles all CLI operations including analysis checking, cache cleaning, +/// toolchain management, and shell completion generation. +/// +/// # Arguments +/// +/// * `command` - The specific command to execute +/// +/// # Returns +/// +/// This function may exit the process with appropriate exit codes: +/// - Exit code 0 on successful analysis +/// - Exit code 1 on analysis failure or toolchain setup errors +async fn handle_command(command: Commands, rustc_threads: usize) { + match command { + Commands::Check(command_options) => { + let path = command_options.path.unwrap_or_else(|| { + env::current_dir().unwrap_or_else(|_| { + tracing::error!("Failed to get current directory, using '.'"); + std::path::PathBuf::from(".") + }) + }); + + let report = Backend::check_report_with_options( + &path, + command_options.all_targets, + command_options.all_features, + rustc_threads, + ) + .await; + + if report.ok { + match report.total_targets { + Some(total) => { + eprintln!( + "rustowl check: success ({}/{}) in {:.2?}", + report.checked_targets, total, report.duration + ); + } + None => { + eprintln!("rustowl check: success in {:.2?}", report.duration); + } + } + std::process::exit(0); + } + tracing::error!("Analyze failed"); + std::process::exit(1); + } + Commands::Clean => { + if let Ok(meta) = cargo_metadata::MetadataCommand::new().exec() { + let target = meta.target_directory.join("owl"); + tokio::fs::remove_dir_all(&target).await.ok(); + } + } + Commands::Toolchain(command_options) => { + if let Some(arg) = command_options.command { + match arg { + ToolchainCommands::Install { + path, + skip_rustowl_toolchain, + } => { + let path = path.unwrap_or(toolchain::FALLBACK_RUNTIME_DIR.clone()); + if toolchain::setup_toolchain(&path, skip_rustowl_toolchain) + .await + .is_err() + { + std::process::exit(1); + } + } + ToolchainCommands::Uninstall => { + rustowl::toolchain::uninstall_toolchain().await; + } + } + } + } + Commands::Completions(command_options) => { + let shell = command_options.shell; + generate( + shell, + &mut Cli::command(), + "rustowl", + &mut std::io::stdout(), + ); + } + } +} + +/// Handles the case when no command is provided (version display or LSP server mode) +async fn handle_no_command(args: Cli, used_short_flag: bool, rustc_threads: usize) { + if args.version { + if used_short_flag { + println!("rustowl {}", clap::crate_version!()); + } else { + display_version(); + } + return; + } + + start_lsp_server(rustc_threads).await; +} + +/// Displays version information including git tag, commit hash, build time, etc. +fn display_version() { + println!("rustowl {}", clap::crate_version!()); + + let tag = env!("GIT_TAG"); + println!("tag:{}", if tag.is_empty() { "not found" } else { tag }); + + let commit = env!("GIT_COMMIT_HASH"); + println!( + "commit_hash:{}", + if commit.is_empty() { + "not found" + } else { + commit + } + ); + + let build_time = env!("BUILD_TIME"); + println!( + "build_time:{}", + if build_time.is_empty() { + "not found" + } else { + build_time + } + ); + + let rustc_version = env!("RUSTC_VERSION"); + if rustc_version.is_empty() { + println!("build_env:not found"); + } else { + println!("build_env:{},{}", rustc_version, env!("RUSTOWL_TOOLCHAIN")); + } +} + +/// Starts the LSP server +async fn start_lsp_server(rustc_threads: usize) { + eprintln!("RustOwl v{}", clap::crate_version!()); + eprintln!("This is an LSP server. You can use --help flag to show help."); + + let stdin = tokio::io::stdin(); + let stdout = tokio::io::stdout(); + + let (service, socket) = LspService::build(Backend::new(rustc_threads)) + .custom_method("rustowl/cursor", Backend::cursor) + .custom_method("rustowl/analyze", Backend::analyze) + .finish(); + + Server::new(stdin, stdout, socket).serve(service).await; +} + +#[tokio::main] +async fn main() { + let used_short_flag = std::env::args().any(|arg| arg == "-V"); + + let parsed_args = Cli::parse(); + let rustc_threads = parsed_args + .rustc_threads + .unwrap_or(utils::get_default_parallel_count()); + + rustowl::initialize_logging(log_level_from_args(&parsed_args)); + + match parsed_args.command { + Some(command) => handle_command(command, rustc_threads).await, + None => handle_no_command(parsed_args, used_short_flag, rustc_threads).await, + } +} + +#[cfg(test)] +mod tests { + use clap::Parser; + use rustowl::async_test; + + // Command handling in this binary calls `std::process::exit`, which makes it + // hard to test directly. Clap parsing is covered in `src/cli.rs`. + + #[test] + fn test_display_version_function() { + super::display_version(); + } + + #[test] + fn log_level_from_args_uses_cli_verbosity() { + let args = rustowl::cli::Cli::parse_from(["rustowl", "-vv"]); + let level = super::log_level_from_args(&args); + assert_eq!(level, args.verbosity.tracing_level_filter()); + } + + async_test!(handle_no_command_prints_version_for_long_flag, async { + let args = rustowl::cli::Cli::parse_from(["rustowl", "--version"]); + + let output = gag::BufferRedirect::stdout().unwrap(); + super::handle_no_command(args, false, 1).await; + + drop(output); + }); +} diff --git a/crates/rustowl/src/bin/rustowlc.rs b/crates/rustowl/src/bin/rustowlc.rs new file mode 100644 index 00000000..fc0d1023 --- /dev/null +++ b/crates/rustowl/src/bin/rustowlc.rs @@ -0,0 +1,45 @@ +//! # RustOwl rustowlc +//! +//! A compiler implementation for visualizing ownership and lifetimes in Rust, designed for debugging and optimization. + +#![feature(rustc_private)] + +pub extern crate polonius_engine; +pub extern crate rustc_borrowck; +pub extern crate rustc_data_structures; +pub extern crate rustc_driver; +pub extern crate rustc_errors; +pub extern crate rustc_hash; +pub extern crate rustc_hir; +pub extern crate rustc_index; +pub extern crate rustc_interface; +pub extern crate rustc_middle; +pub extern crate rustc_query_system; +pub extern crate rustc_session; +pub extern crate rustc_span; +pub extern crate rustc_stable_hash; +pub extern crate rustc_type_ir; + +// Cited from rustc https://github.com/rust-lang/rust/blob/73cecf3a39bfb5a57982311de238147dd1c34a1f/compiler/rustc/src/main.rs +// MIT License +#[cfg(any(target_os = "linux", target_os = "macos"))] +use tikv_jemalloc_sys as _; + +pub mod core; + +use std::process::exit; + +fn main() { + rustowl::initialize_logging(tracing_subscriber::filter::LevelFilter::INFO); + + // rayon panics without this only on Windows + #[cfg(target_os = "windows")] + { + rayon::ThreadPoolBuilder::new() + .stack_size(4 * 1024 * 1024) + .build_global() + .unwrap(); + } + + exit(core::run_compiler()) +} diff --git a/crates/rustowl/src/cache.rs b/crates/rustowl/src/cache.rs new file mode 100644 index 00000000..f8d626e5 --- /dev/null +++ b/crates/rustowl/src/cache.rs @@ -0,0 +1,312 @@ +use std::env; +use std::path::{Path, PathBuf}; +use tokio::process::Command; + +/// Configuration for cache behavior +#[derive(Clone, Debug)] +pub struct CacheConfig { + /// Maximum number of entries before eviction + pub max_entries: usize, + /// Maximum memory usage in bytes before eviction + pub max_memory_bytes: usize, + /// Enable LRU eviction policy (vs FIFO) + pub use_lru_eviction: bool, + /// Enable file modification time validation + pub validate_file_mtime: bool, + /// Enable compression for cache files + pub enable_compression: bool, +} + +impl Default for CacheConfig { + fn default() -> Self { + Self { + max_entries: 1000, + max_memory_bytes: 100 * 1024 * 1024, // 100MB + use_lru_eviction: true, + validate_file_mtime: true, + enable_compression: false, // Disable by default for compatibility + } + } +} + +pub fn is_cache() -> bool { + !env::var("RUSTOWL_CACHE") + .map(|v| { + let v = v.trim().to_ascii_lowercase(); + v == "false" || v == "0" + }) + .unwrap_or(false) +} + +pub fn set_cache_path(cmd: &mut Command, target_dir: impl AsRef) { + cmd.env("RUSTOWL_CACHE_DIR", target_dir.as_ref().join("cache")); +} + +pub fn get_cache_path() -> Option { + env::var("RUSTOWL_CACHE_DIR") + .ok() + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .map(PathBuf::from) +} + +/// Construct a CacheConfig starting from defaults and overriding fields from environment variables. +/// +/// The following environment variables are recognized (case-sensitive names): +/// - `RUSTOWL_CACHE_MAX_ENTRIES`: parsed as `usize` to set `max_entries`. +/// - `RUSTOWL_CACHE_MAX_MEMORY_MB`: parsed as `usize`; stored as bytes using saturating multiplication by 1024*1024. +/// - `RUSTOWL_CACHE_EVICTION`: case-insensitive; `"lru"` enables LRU eviction, `"fifo"` disables it; other values leave the default. +/// - `RUSTOWL_CACHE_VALIDATE_FILES`: case-insensitive; `"false"` or `"0"` disables file mtime validation, any other value enables it. +/// +/// Returns the assembled `CacheConfig`. +/// +/// # Examples +/// +/// ``` +/// use rustowl::cache::get_cache_config; +/// unsafe { std::env::set_var("RUSTOWL_CACHE_MAX_ENTRIES", "5"); } +/// let cfg = get_cache_config(); +/// assert_eq!(cfg.max_entries, 5); +/// ``` +pub fn get_cache_config() -> CacheConfig { + let mut config = CacheConfig::default(); + + // Configure max entries + if let Ok(max_entries) = env::var("RUSTOWL_CACHE_MAX_ENTRIES") + && let Ok(value) = max_entries.parse::() + { + config.max_entries = value; + } + + // Configure max memory in MB + if let Ok(max_memory_mb) = env::var("RUSTOWL_CACHE_MAX_MEMORY_MB") + && let Ok(value) = max_memory_mb.parse::() + { + config.max_memory_bytes = value.saturating_mul(1024 * 1024); + } + + // Configure eviction policy + if let Ok(policy) = env::var("RUSTOWL_CACHE_EVICTION") { + match policy.trim().to_ascii_lowercase().as_str() { + "lru" => config.use_lru_eviction = true, + "fifo" => config.use_lru_eviction = false, + _ => {} // keep default + } + } + + // Configure file validation + if let Ok(validate) = env::var("RUSTOWL_CACHE_VALIDATE_FILES") { + let v = validate.trim().to_ascii_lowercase(); + config.validate_file_mtime = !(v == "false" || v == "0"); + } + + config +} + +#[cfg(test)] +use std::sync::LazyLock; + +#[cfg(test)] +static ENV_LOCK: LazyLock> = LazyLock::new(|| std::sync::Mutex::new(())); + +#[cfg(test)] +struct EnvGuard { + key: String, + old_value: Option, + _lock: std::sync::MutexGuard<'static, ()>, +} + +#[cfg(test)] +impl EnvGuard { + fn set(key: &str, value: &str) -> Self { + let lock = ENV_LOCK.lock().unwrap(); + let old_value = env::var(key).ok(); + unsafe { + env::set_var(key, value); + } + Self { + key: key.to_owned(), + old_value, + _lock: lock, + } + } +} + +#[cfg(test)] +impl Drop for EnvGuard { + fn drop(&mut self) { + if let Some(v) = self.old_value.take() { + unsafe { + env::set_var(&self.key, v); + } + } else { + unsafe { + env::remove_var(&self.key); + } + } + } +} + +#[cfg(test)] +fn with_env(key: &str, value: &str, f: F) +where + F: FnOnce(), +{ + let guard = EnvGuard::set(key, value); + let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(f)); + drop(guard); + if let Err(panic) = result { + std::panic::resume_unwind(panic); + } +} + +#[test] +fn test_cache_config_default() { + let config = CacheConfig::default(); + assert_eq!(config.max_entries, 1000); + assert_eq!(config.max_memory_bytes, 100 * 1024 * 1024); + assert!(config.use_lru_eviction); + assert!(config.validate_file_mtime); + assert!(!config.enable_compression); +} + +#[test] +fn test_is_cache_default() { + // Remove any existing cache env var for clean test + let old_value = env::var("RUSTOWL_CACHE").ok(); + unsafe { + env::remove_var("RUSTOWL_CACHE"); + } + + assert!(is_cache()); // Should be true by default + + // Restore old value + if let Some(v) = old_value { + unsafe { + env::set_var("RUSTOWL_CACHE", v); + } + } +} + +#[test] +fn test_is_cache_with_true_values() { + for value in ["true", "1", "yes", ""] { + with_env("RUSTOWL_CACHE", value, || { + assert!(is_cache()); + }); + } +} + +#[test] +fn test_get_cache_path() { + // Test with no env var + let old_value = env::var("RUSTOWL_CACHE_DIR").ok(); + unsafe { + env::remove_var("RUSTOWL_CACHE_DIR"); + } + assert!(get_cache_path().is_none()); + if let Some(v) = old_value { + unsafe { + env::set_var("RUSTOWL_CACHE_DIR", v); + } + } + + for value in ["", " "] { + with_env("RUSTOWL_CACHE_DIR", value, || { + assert!(get_cache_path().is_none()); + }); + } + + with_env("RUSTOWL_CACHE_DIR", "/tmp/cache", || { + assert_eq!(get_cache_path().unwrap(), PathBuf::from("/tmp/cache")); + }); + + with_env("RUSTOWL_CACHE_DIR", " /tmp/cache ", || { + assert_eq!(get_cache_path().unwrap(), PathBuf::from("/tmp/cache")); + }); +} + +#[test] +fn test_get_cache_config_with_env_vars() { + // Test max entries configuration + with_env("RUSTOWL_CACHE_MAX_ENTRIES", "500", || { + let config = get_cache_config(); + assert_eq!(config.max_entries, 500); + }); + + // Test that invalid values don't crash the program + with_env("RUSTOWL_CACHE_MAX_ENTRIES", "invalid", || { + let config = get_cache_config(); + // Should fall back to default when parse fails + assert_eq!(config.max_entries, 1000); + }); + // Test max memory configuration + with_env("RUSTOWL_CACHE_MAX_MEMORY_MB", "200", || { + let config = get_cache_config(); + assert_eq!(config.max_memory_bytes, 200 * 1024 * 1024); + }); + + // Test max memory with overflow protection + with_env( + "RUSTOWL_CACHE_MAX_MEMORY_MB", + &usize::MAX.to_string(), + || { + let config = get_cache_config(); + // Should use saturating_mul, so might be different from exact calculation + assert!(config.max_memory_bytes > 0); + }, + ); + + // Test eviction policy configuration + with_env("RUSTOWL_CACHE_EVICTION", "lru", || { + let config = get_cache_config(); + assert!(config.use_lru_eviction); + }); + + with_env("RUSTOWL_CACHE_EVICTION", "LRU", || { + let config = get_cache_config(); + assert!(config.use_lru_eviction); + }); + + with_env("RUSTOWL_CACHE_EVICTION", "fifo", || { + let config = get_cache_config(); + assert!(!config.use_lru_eviction); + }); + + with_env("RUSTOWL_CACHE_EVICTION", "FIFO", || { + let config = get_cache_config(); + assert!(!config.use_lru_eviction); + }); + + // Test invalid eviction policy (should keep default) + with_env("RUSTOWL_CACHE_EVICTION", "invalid", || { + let config = get_cache_config(); + assert!(config.use_lru_eviction); // default is true + }); + + // Test file validation configuration + with_env("RUSTOWL_CACHE_VALIDATE_FILES", "false", || { + let config = get_cache_config(); + assert!(!config.validate_file_mtime); + }); + + with_env("RUSTOWL_CACHE_VALIDATE_FILES", "0", || { + let config = get_cache_config(); + assert!(!config.validate_file_mtime); + }); + + with_env("RUSTOWL_CACHE_VALIDATE_FILES", "true", || { + let config = get_cache_config(); + assert!(config.validate_file_mtime); + }); + + with_env("RUSTOWL_CACHE_VALIDATE_FILES", "1", || { + let config = get_cache_config(); + assert!(config.validate_file_mtime); + }); + + with_env("RUSTOWL_CACHE_VALIDATE_FILES", " FALSE ", || { + let config = get_cache_config(); + assert!(!config.validate_file_mtime); + }); +} diff --git a/src/cli.rs b/crates/rustowl/src/cli.rs similarity index 51% rename from src/cli.rs rename to crates/rustowl/src/cli.rs index 801a98a3..8fe52ce9 100644 --- a/src/cli.rs +++ b/crates/rustowl/src/cli.rs @@ -1,15 +1,15 @@ -use clap::{ArgAction, Args, Parser, Subcommand, ValueHint}; +use clap::{Args, Parser, Subcommand, ValueHint}; #[derive(Debug, Parser)] -#[command(author)] +#[command(author, disable_version_flag = true)] pub struct Cli { - /// Print version. - #[arg(short('V'), long)] + /// Print version info (-V short, --version detailed). + #[arg(short = 'V', long = "version")] pub version: bool, - /// Suppress output. - #[arg(short, long, action(ArgAction::Count))] - pub quiet: u8, + /// Logging verbosity (-v/-vv/-vvv) or quiet (-q/-qq). + #[command(flatten)] + pub verbosity: clap_verbosity_flag::Verbosity, /// Use stdio to communicate with the LSP server. #[arg(long)] @@ -104,3 +104,68 @@ pub struct Completions { #[arg(value_enum)] pub shell: crate::shells::Shell, } + +#[cfg(test)] +mod tests { + use super::*; + use clap::Parser; + + #[test] + fn test_cli_default_parsing() { + let cli = Cli::try_parse_from(["rustowl"]).unwrap(); + assert!(!cli.version); + assert!(!cli.stdio); + assert!(cli.command.is_none()); + assert!(cli.rustc_threads.is_none()); + } + + #[test] + fn test_check_command_with_flags() { + let cli = + Cli::try_parse_from(["rustowl", "check", "--all-targets", "--all-features"]).unwrap(); + match cli.command { + Some(Commands::Check(check)) => { + assert!(check.all_targets); + assert!(check.all_features); + } + _ => panic!("Expected Check command"), + } + } + + #[test] + fn test_toolchain_install_skip_rustowl() { + let cli = Cli::try_parse_from([ + "rustowl", + "toolchain", + "install", + "--skip-rustowl-toolchain", + ]) + .unwrap(); + + match cli.command { + Some(Commands::Toolchain(toolchain)) => match toolchain.command { + Some(ToolchainCommands::Install { + skip_rustowl_toolchain, + .. + }) => assert!(skip_rustowl_toolchain), + _ => panic!("Expected Install subcommand"), + }, + _ => panic!("Expected Toolchain command"), + } + } + + #[test] + fn test_completions_command() { + let cli = Cli::try_parse_from(["rustowl", "completions", "bash"]).unwrap(); + assert!(matches!(cli.command, Some(Commands::Completions(_)))); + } + + #[test] + fn test_invalid_arguments() { + let args = vec!["rustowl", "invalid"]; + assert!(Cli::try_parse_from(args).is_err()); + + let args = vec!["rustowl", "--invalid-flag"]; + assert!(Cli::try_parse_from(args).is_err()); + } +} diff --git a/crates/rustowl/src/error.rs b/crates/rustowl/src/error.rs new file mode 100644 index 00000000..8c7f084e --- /dev/null +++ b/crates/rustowl/src/error.rs @@ -0,0 +1,89 @@ +//! Error handling for RustOwl using anyhow for flexible error handling. + +pub use anyhow::{Context, Result, anyhow, bail}; + +/// Main error type for RustOwl operations. +/// Used for typed errors that need to be matched on. +#[derive(Debug)] +pub enum RustOwlError { + /// I/O operation failed + Io(std::io::Error), + /// Cargo metadata operation failed + CargoMetadata(String), + /// Toolchain operation failed + Toolchain(String), + /// JSON serialization/deserialization failed + Json(serde_json::Error), + /// Cache operation failed + Cache(String), + /// LSP operation failed + Lsp(String), + /// General analysis error + Analysis(String), + /// Configuration error + Config(String), +} + +impl std::fmt::Display for RustOwlError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + RustOwlError::Io(err) => write!(f, "I/O error: {err}"), + RustOwlError::CargoMetadata(msg) => write!(f, "Cargo metadata error: {msg}"), + RustOwlError::Toolchain(msg) => write!(f, "Toolchain error: {msg}"), + RustOwlError::Json(err) => write!(f, "JSON error: {err}"), + RustOwlError::Cache(msg) => write!(f, "Cache error: {msg}"), + RustOwlError::Lsp(msg) => write!(f, "LSP error: {msg}"), + RustOwlError::Analysis(msg) => write!(f, "Analysis error: {msg}"), + RustOwlError::Config(msg) => write!(f, "Configuration error: {msg}"), + } + } +} + +impl std::error::Error for RustOwlError {} + +impl From for RustOwlError { + fn from(err: std::io::Error) -> Self { + RustOwlError::Io(err) + } +} + +impl From for RustOwlError { + fn from(err: serde_json::Error) -> Self { + RustOwlError::Json(err) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_rustowl_error_display() { + let io_err = RustOwlError::Io(std::io::Error::new( + std::io::ErrorKind::NotFound, + "file not found", + )); + assert!(io_err.to_string().contains("I/O error")); + + let cargo_err = RustOwlError::CargoMetadata("invalid metadata".to_string()); + assert_eq!( + cargo_err.to_string(), + "Cargo metadata error: invalid metadata" + ); + + let toolchain_err = RustOwlError::Toolchain("setup failed".to_string()); + assert_eq!(toolchain_err.to_string(), "Toolchain error: setup failed"); + } + + #[test] + fn test_error_from_conversions() { + let io_error = std::io::Error::new(std::io::ErrorKind::PermissionDenied, "access denied"); + let rustowl_error: RustOwlError = io_error.into(); + assert!(matches!(rustowl_error, RustOwlError::Io(_))); + + let json_str = "{ invalid json"; + let json_error = serde_json::from_str::(json_str).unwrap_err(); + let rustowl_error: RustOwlError = json_error.into(); + assert!(matches!(rustowl_error, RustOwlError::Json(_))); + } +} diff --git a/crates/rustowl/src/lib.rs b/crates/rustowl/src/lib.rs new file mode 100644 index 00000000..17c6ffa6 --- /dev/null +++ b/crates/rustowl/src/lib.rs @@ -0,0 +1,218 @@ +//! # RustOwl Library +//! +//! RustOwl is a Language Server Protocol (LSP) implementation for visualizing +//! ownership and lifetimes in Rust code. +//! +//! The core analysis is performed by the `rustowlc` binary (a rustc wrapper). +//! This library provides the common data models and the LSP-side orchestration. +//! +//! ## Core Components +//! +//! - **LSP Backend**: Language server implementation for IDE integration +//! - **Analysis Engine**: Rust compiler integration for ownership analysis +//! - **Caching System**: Intelligent caching for improved performance +//! - **Error Handling**: Comprehensive error reporting with context +//! - **Toolchain Management**: Automatic setup and management of analysis tools +//! +//! ## Usage +//! +//! This library is primarily used by the RustOwl binary for LSP server functionality, +//! but can also be used directly for programmatic analysis of Rust code. + +use std::io::IsTerminal; +use std::io::{self, Write}; +use std::sync::{Mutex, OnceLock}; + +use indicatif::ProgressBar; + +/// Core caching functionality for analysis results +pub mod cache; +/// Command-line interface definitions +pub mod cli; +/// Comprehensive error handling with context +pub mod error; +/// Language Server Protocol implementation +pub mod lsp; +/// Data models for analysis results +pub mod models; +/// Shell completion utilities +pub mod shells; +/// Rust toolchain management +pub mod toolchain; +/// General utility functions +pub mod utils; + +pub use lsp::backend::Backend; + +use tracing_subscriber::{EnvFilter, filter::LevelFilter, fmt, prelude::*}; + +static ACTIVE_PROGRESS_BAR: OnceLock>> = OnceLock::new(); + +fn set_active_progress_bar(pb: Option) { + let cell = ACTIVE_PROGRESS_BAR.get_or_init(|| Mutex::new(None)); + *cell.lock().expect("progress bar mutex poisoned") = pb; +} + +fn with_active_progress_bar(f: impl FnOnce(Option<&ProgressBar>) -> R) -> R { + let cell = ACTIVE_PROGRESS_BAR.get_or_init(|| Mutex::new(None)); + let guard = cell.lock().expect("progress bar mutex poisoned"); + f(guard.as_ref()) +} + +#[derive(Default, Clone, Copy)] +struct IndicatifOrStderrWriter; + +impl<'a> fmt::MakeWriter<'a> for IndicatifOrStderrWriter { + type Writer = IndicatifOrStderr; + + fn make_writer(&'a self) -> Self::Writer { + IndicatifOrStderr + } +} + +struct IndicatifOrStderr; + +impl Write for IndicatifOrStderr { + fn write(&mut self, buf: &[u8]) -> io::Result { + let msg = match std::str::from_utf8(buf) { + Ok(v) => v, + // If it's not valid UTF-8, fall back to raw stderr. + Err(_) => return io::stderr().write(buf), + }; + + with_active_progress_bar(|pb| { + if let Some(pb) = pb { + for line in msg.lines() { + pb.println(line); + } + Ok(buf.len()) + } else { + io::stderr().write_all(buf).map(|()| buf.len()) + } + }) + } + + fn flush(&mut self) -> io::Result<()> { + with_active_progress_bar(|pb| { + if pb.is_some() { + Ok(()) + } else { + io::stderr().flush() + } + }) + } +} + +#[must_use] +pub struct ActiveProgressBarGuard { + previous: Option, +} + +impl ActiveProgressBarGuard { + pub fn set(pb: ProgressBar) -> Self { + let previous = ACTIVE_PROGRESS_BAR + .get_or_init(|| Mutex::new(None)) + .lock() + .expect("progress bar mutex poisoned") + .take(); + set_active_progress_bar(Some(pb)); + Self { previous } + } +} + +impl Drop for ActiveProgressBarGuard { + fn drop(&mut self) { + set_active_progress_bar(self.previous.take()); + } +} + +/// Initializes the logging system with colors and a default log level. +/// +/// If a global subscriber is already set (e.g. by another binary), this +/// silently returns without re-initializing. +pub fn initialize_logging(level: LevelFilter) { + let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| { + // Default: show only rustowl logs at the requested level to avoid + // drowning users in dependency logs. + EnvFilter::new(format!("rustowl={level}")) + }); + + let fmt_layer = fmt::layer() + .with_target(true) + .with_level(true) + .with_thread_ids(false) + .with_thread_names(false) + .with_writer(IndicatifOrStderrWriter) + .with_ansi(std::io::stderr().is_terminal()); + + // Ignore error if already initialized + let _ = tracing_subscriber::registry() + .with(env_filter) + .with(fmt_layer) + .try_init(); +} + +/// Test utilities for Miri-compatible async tests. +/// +/// Miri doesn't support `#[tokio::test]` directly, so we provide a macro +/// that handles the async runtime setup correctly for both regular tests +/// and Miri. +/// +/// See: +#[macro_export] +macro_rules! async_test { + ($name:ident, $body:expr) => { + #[test] + #[cfg_attr(miri, ignore)] + fn $name() { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + rt.block_on($body) + } + }; +} + +// Miri tests finding UB (Undefined Behaviour) +mod miri_tests; + +#[cfg(test)] +mod tests { + use super::*; + use indicatif::ProgressBar; + + #[test] + fn active_progress_bar_guard_restores_previous_progress_bar() { + let pb1 = ProgressBar::hidden(); + let pb2 = ProgressBar::hidden(); + + let _guard1 = ActiveProgressBarGuard::set(pb1.clone()); + super::with_active_progress_bar(|pb| { + assert!(pb.is_some()); + }); + + { + let _guard2 = ActiveProgressBarGuard::set(pb2.clone()); + super::with_active_progress_bar(|pb| { + assert!(pb.is_some()); + }); + } + + super::with_active_progress_bar(|pb| { + assert!(pb.is_some()); + }); + + drop(_guard1); + + super::with_active_progress_bar(|pb| { + assert!(pb.is_none()); + }); + } + + #[test] + fn initialize_logging_is_idempotent() { + initialize_logging(tracing_subscriber::filter::LevelFilter::DEBUG); + initialize_logging(tracing_subscriber::filter::LevelFilter::INFO); + } +} diff --git a/src/lsp.rs b/crates/rustowl/src/lsp.rs similarity index 100% rename from src/lsp.rs rename to crates/rustowl/src/lsp.rs diff --git a/crates/rustowl/src/lsp/analyze.rs b/crates/rustowl/src/lsp/analyze.rs new file mode 100644 index 00000000..1337ef23 --- /dev/null +++ b/crates/rustowl/src/lsp/analyze.rs @@ -0,0 +1,439 @@ +use crate::cache::{is_cache, set_cache_path}; +use crate::models::Workspace; +use crate::toolchain; +use anyhow::bail; +use std::collections::HashSet; +use std::path::{Path, PathBuf}; +use std::process::Stdio; +use std::sync::Arc; +use tokio::{ + io::{AsyncBufReadExt, BufReader}, + process, + sync::{Notify, mpsc}, +}; + +#[derive(serde::Deserialize, Clone, Debug)] +pub struct CargoCheckMessageTarget { + name: String, +} + +#[derive(serde::Deserialize, Clone, Debug)] +#[serde(tag = "reason", rename_all = "kebab-case")] +pub enum CargoCheckMessage { + CompilerArtifact { + target: CargoCheckMessageTarget, + }, + #[allow(unused)] + BuildFinished {}, +} + +pub enum AnalyzerEvent { + CrateChecked { + package: String, + package_index: usize, + package_count: usize, + }, + Analyzed(Workspace), +} + +#[derive(Clone, Debug)] +pub struct Analyzer { + path: PathBuf, + metadata: Option, + rustc_threads: usize, +} + +impl Analyzer { + pub async fn new(path: impl AsRef, rustc_threads: usize) -> crate::error::Result { + let path = path.as_ref().to_path_buf(); + + let mut cargo_cmd = toolchain::setup_cargo_command(rustc_threads).await; + cargo_cmd + .env_remove("RUSTC_WORKSPACE_WRAPPER") + .env_remove("RUSTC_WRAPPER") + // `--config` values are TOML; `""` sets the wrapper to an empty string. + .args([ + "--config", + "build.rustc-wrapper=\"\"", + "--config", + "build.rustc-workspace-wrapper=\"\"", + "metadata", + "--format-version", + "1", + "--filter-platform", + toolchain::HOST_TUPLE, + ]) + .current_dir(if path.is_file() { + path.parent().unwrap() + } else { + &path + }) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + let metadata = match cargo_cmd.output().await { + Ok(output) if output.status.success() => { + let data = String::from_utf8_lossy(&output.stdout); + cargo_metadata::MetadataCommand::parse(data).ok() + } + Ok(output) => { + if tracing::enabled!(tracing::Level::DEBUG) { + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + tracing::debug!( + "`cargo metadata` failed (status={}):\nstdout:\n{}\nstderr:\n{}", + output.status, + stdout.trim(), + stderr.trim() + ); + } + None + } + Err(e) => { + if tracing::enabled!(tracing::Level::DEBUG) { + tracing::debug!("failed to spawn `cargo metadata`: {e}"); + } + None + } + }; + + if let Some(metadata) = metadata { + Ok(Self { + path: metadata.workspace_root.as_std_path().to_path_buf(), + metadata: Some(metadata), + rustc_threads, + }) + } else if path.is_file() && path.extension().map(|v| v == "rs").unwrap_or(false) { + Ok(Self { + path, + metadata: None, + rustc_threads, + }) + } else { + tracing::error!("Invalid analysis target: {}", path.display()); + bail!("Invalid analysis target: {}", path.display()); + } + } + pub fn target_path(&self) -> &Path { + &self.path + } + pub fn workspace_path(&self) -> Option<&Path> { + if self.metadata.is_some() { + Some(&self.path) + } else { + None + } + } + + pub async fn analyze(&self, all_targets: bool, all_features: bool) -> AnalyzeEventIter { + if let Some(metadata) = &self.metadata + && metadata.root_package().is_some() + { + self.analyze_package(metadata, all_targets, all_features) + .await + } else { + self.analyze_single_file(&self.path).await + } + } + + async fn analyze_package( + &self, + metadata: &cargo_metadata::Metadata, + all_targets: bool, + all_features: bool, + ) -> AnalyzeEventIter { + let package_name = metadata.root_package().as_ref().unwrap().name.to_string(); + let target_dir = metadata.target_directory.as_std_path().join("owl"); + tracing::debug!("clear cargo cache"); + let mut command = toolchain::setup_cargo_command(self.rustc_threads).await; + command + .args(["clean", "--package", &package_name]) + .env("CARGO_TARGET_DIR", &target_dir) + .current_dir(&self.path) + .stdout(Stdio::null()) + .stderr(Stdio::null()); + command.spawn().unwrap().wait().await.ok(); + + let mut command = toolchain::setup_cargo_command(self.rustc_threads).await; + + let mut args = vec!["check", "--workspace"]; + if all_targets { + args.push("--all-targets"); + } + if all_features { + args.push("--all-features"); + } + args.extend_from_slice(&["--keep-going", "--message-format=json"]); + + command + .args(args) + .env("CARGO_TARGET_DIR", &target_dir) + .env_remove("RUSTC_WRAPPER") + .current_dir(&self.path) + .stdout(Stdio::piped()) + .kill_on_drop(true); + + if is_cache() { + set_cache_path(&mut command, target_dir); + } + + if !tracing::enabled!(tracing::Level::INFO) { + command.stderr(Stdio::null()); + } + + // Cargo emits `compiler-artifact` per compilation unit. `metadata.packages[*].targets` + // includes lots of targets Cargo won't build for `cargo check` (tests/benches/examples, + // and dependency binaries), which can wildly overcount. + // + // We estimate the total units Cargo will actually build: + // - Workspace members: lib/bin/proc-macro/custom-build; plus test/bench/example with --all-targets + // - Dependencies: lib/proc-macro/custom-build only + let workspace_members: HashSet<_> = metadata.workspace_members.iter().cloned().collect(); + + let package_count = metadata + .packages + .iter() + .map(|p| { + let is_workspace_member = workspace_members.contains(&p.id); + p.targets + .iter() + .filter(|t| { + let always = t.is_lib() + || t.is_proc_macro() + || t.is_custom_build() + || (is_workspace_member && t.is_bin()); + let extra = all_targets + && is_workspace_member + && (t.is_test() || t.is_bench() || t.is_example()); + always || extra + }) + .count() + }) + .sum::() + .max(1); + + tracing::debug!("start analyzing package {package_name}"); + let mut child = command.spawn().unwrap(); + let mut stdout = BufReader::new(child.stdout.take().unwrap()); + + let (sender, receiver) = mpsc::channel(1024); + let notify = Arc::new(Notify::new()); + let notify_c = notify.clone(); + let _handle = tokio::spawn(async move { + // prevent command from dropped + let mut checked_count = 0usize; + + // Cargo emits JSON objects tagged with `{"reason": ...}`. + // rustowlc emits a serialized `Workspace` JSON object. + // + // Distinguish them by attempting to parse any line as a `Workspace` first. + // If that fails, treat it as a cargo message (and optionally parse progress from it). + + let mut buf = Vec::with_capacity(16 * 1024); + loop { + buf.clear(); + match stdout.read_until(b'\n', &mut buf).await { + Ok(0) => break, + Ok(_) => {} + Err(_) => break, + } + + // Trim trailing newline(s) to keep serde_json happy. + while matches!(buf.last(), Some(b'\n' | b'\r')) { + buf.pop(); + } + if buf.is_empty() { + continue; + } + + if let Ok(ws) = serde_json::from_slice::(&buf) { + let event = AnalyzerEvent::Analyzed(ws); + let _ = sender.send(event).await; + continue; + } + + // Not a Workspace line; maybe a Cargo JSON message. + if let Ok(CargoCheckMessage::CompilerArtifact { target }) = + serde_json::from_slice::(&buf) + { + let checked = target.name; + tracing::trace!("crate {checked} checked"); + + checked_count = checked_count.saturating_add(1); + let event = AnalyzerEvent::CrateChecked { + package: checked, + package_index: checked_count, + package_count, + }; + let _ = sender.send(event).await; + } + } + + tracing::debug!("stdout closed"); + notify_c.notify_one(); + }); + + AnalyzeEventIter { + receiver, + notify, + child, + } + } + + async fn analyze_single_file(&self, path: &Path) -> AnalyzeEventIter { + let sysroot = toolchain::get_sysroot().await; + let rustowlc_path = toolchain::get_executable_path("rustowlc").await; + + let mut command = process::Command::new(&rustowlc_path); + command + .arg(&rustowlc_path) // rustowlc triggers when first arg is the path of itself + .arg(format!("--sysroot={}", sysroot.display())) + .arg("--crate-type=lib"); + #[cfg(unix)] + command.arg("-o/dev/null"); + #[cfg(windows)] + command.arg("-oNUL"); + command.arg(path).stdout(Stdio::piped()).kill_on_drop(true); + + toolchain::set_rustc_env(&mut command, &sysroot); + + // When running under `cargo llvm-cov`, ensure the rustowlc subprocess writes its + // coverage somewhere cargo-llvm-cov will pick up. + if let Ok(profile_file) = std::env::var("LLVM_PROFILE_FILE") { + command.env("LLVM_PROFILE_FILE", profile_file); + } + + if !tracing::enabled!(tracing::Level::INFO) { + command.stderr(Stdio::null()); + } + + tracing::debug!("start analyzing {}", path.display()); + let mut child = command.spawn().unwrap(); + let mut stdout = BufReader::new(child.stdout.take().unwrap()); + + let (sender, receiver) = mpsc::channel(1024); + let notify = Arc::new(Notify::new()); + let notify_c = notify.clone(); + let _handle = tokio::spawn(async move { + // prevent command from dropped + + let mut buf = Vec::with_capacity(16 * 1024); + loop { + buf.clear(); + match stdout.read_until(b'\n', &mut buf).await { + Ok(0) => break, + Ok(_) => {} + Err(_) => break, + } + + while matches!(buf.last(), Some(b'\n' | b'\r')) { + buf.pop(); + } + if buf.is_empty() { + continue; + } + + if let Ok(ws) = serde_json::from_slice::(&buf) { + let event = AnalyzerEvent::Analyzed(ws); + let _ = sender.send(event).await; + } + } + + tracing::debug!("stdout closed"); + notify_c.notify_one(); + }); + + AnalyzeEventIter { + receiver, + notify, + child, + } + } +} + +pub struct AnalyzeEventIter { + receiver: mpsc::Receiver, + notify: Arc, + #[allow(unused)] + child: process::Child, +} +impl AnalyzeEventIter { + pub async fn next_event(&mut self) -> Option { + tokio::select! { + v = self.receiver.recv() => v, + _ = self.notify.notified() => { + match self.child.wait().await { + Ok(status) => { + if !status.success() { + tracing::debug!("Analyzer process exited with status: {}", status); + } + } + Err(e) => { + tracing::debug!("Failed to wait for analyzer process: {}", e); + } + } + None + }, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::async_test; + + async_test!( + new_accepts_single_rust_file_and_has_no_workspace_path, + async { + let dir = tempfile::tempdir().unwrap(); + let target = dir.path().join("main.rs"); + std::fs::write(&target, "fn main() {}\n").unwrap(); + + let analyzer = Analyzer::new(&target, 1).await.unwrap(); + assert_eq!(analyzer.target_path(), target.as_path()); + assert_eq!(analyzer.workspace_path(), None); + } + ); + + async_test!(new_rejects_invalid_paths, async { + let dir = tempfile::tempdir().unwrap(); + let target = dir.path().join("not_a_rust_project"); + std::fs::create_dir_all(&target).unwrap(); + + let err = Analyzer::new(&target, 1).await.unwrap_err(); + assert!(err.to_string().contains("Invalid analysis target")); + }); + + async_test!(analyze_single_file_yields_analyzed_event, async { + let dir = tempfile::tempdir().unwrap(); + let target = dir.path().join("lib.rs"); + std::fs::write(&target, "pub fn f() -> i32 { 1 }\n").unwrap(); + + let analyzer = Analyzer::new(&target, 1).await.unwrap(); + let mut iter = analyzer.analyze(false, false).await; + + // Wait for an `Analyzed` event; otherwise fail with some context. + let mut saw_crate_checked = false; + for _ in 0..50 { + match iter.next_event().await { + Some(AnalyzerEvent::CrateChecked { .. }) => { + saw_crate_checked = true; + } + Some(AnalyzerEvent::Analyzed(ws)) => { + // Workspace emitted by rustowlc should be serializable and non-empty. + // We at least expect it to include this file name somewhere. + let json = serde_json::to_string(&ws).unwrap(); + assert!(json.contains("lib.rs")); + return; + } + None => break, + } + } + + panic!( + "did not receive AnalyzerEvent::Analyzed (saw_crate_checked={})", + saw_crate_checked + ); + }); +} diff --git a/crates/rustowl/src/lsp/backend.rs b/crates/rustowl/src/lsp/backend.rs new file mode 100644 index 00000000..a847b056 --- /dev/null +++ b/crates/rustowl/src/lsp/backend.rs @@ -0,0 +1,816 @@ +use super::analyze::{Analyzer, AnalyzerEvent}; +use crate::lsp::{decoration, progress}; +use crate::models::{Crate, Loc}; +use crate::utils; +use std::collections::{BTreeMap, HashMap}; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use tokio::{sync::RwLock, task::JoinSet}; +use tokio_util::sync::CancellationToken; +use tower_lsp_server::jsonrpc; +use tower_lsp_server::ls_types; +use tower_lsp_server::{Client, LanguageServer, LspService}; + +#[derive(serde::Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case")] +pub struct AnalyzeRequest {} +#[derive(serde::Serialize, Clone, Debug)] +pub struct AnalyzeResponse {} + +#[derive(Clone, Copy, Debug)] +pub struct CheckReport { + pub ok: bool, + pub checked_targets: usize, + pub total_targets: Option, + pub duration: std::time::Duration, +} + +/// RustOwl LSP server backend +pub struct Backend { + client: Client, + analyzers: Arc>>, + status: Arc>, + analyzed: Arc>>, + /// Open documents cache to avoid re-reading and re-indexing on each cursor request. + open_docs: Arc>>, + processes: Arc>>, + process_tokens: Arc>>, + work_done_progress: Arc>, + rustc_thread: usize, +} + +#[derive(Clone, Debug)] +struct OpenDoc { + text: Arc, + index: Arc, + line_start_bytes: Arc>, +} + +impl Backend { + pub fn new(rustc_thread: usize) -> impl Fn(Client) -> Self { + move |client: Client| Self { + client, + analyzers: Arc::new(RwLock::new(Vec::new())), + analyzed: Arc::new(RwLock::new(None)), + status: Arc::new(RwLock::new(progress::AnalysisStatus::Finished)), + open_docs: Arc::new(RwLock::new(HashMap::new())), + processes: Arc::new(RwLock::new(JoinSet::new())), + process_tokens: Arc::new(RwLock::new(BTreeMap::new())), + work_done_progress: Arc::new(RwLock::new(false)), + rustc_thread, + } + } + + async fn add_analyze_target(&self, path: &Path) -> bool { + if let Ok(new_analyzer) = Analyzer::new(&path, self.rustc_thread).await { + let mut analyzers = self.analyzers.write().await; + for analyzer in &*analyzers { + if analyzer.target_path() == new_analyzer.target_path() { + return true; + } + } + analyzers.push(new_analyzer); + true + } else { + false + } + } + + pub async fn analyze(&self, _params: AnalyzeRequest) -> jsonrpc::Result { + tracing::debug!("rustowl/analyze request received"); + self.do_analyze().await; + Ok(AnalyzeResponse {}) + } + + async fn do_analyze(&self) { + self.shutdown_subprocesses().await; + self.analyze_with_options(false, false).await; + } + + async fn analyze_with_options(&self, all_targets: bool, all_features: bool) { + tracing::trace!("wait 100ms for rust-analyzer"); + tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; + + tracing::debug!("stop running analysis processes"); + self.shutdown_subprocesses().await; + + tracing::debug!("start analysis"); + { + *self.status.write().await = progress::AnalysisStatus::Analyzing; + } + let analyzers = { self.analyzers.read().await.clone() }; + + tracing::debug!("analyze {} packages...", analyzers.len()); + for analyzer in analyzers { + let analyzed = self.analyzed.clone(); + let client = self.client.clone(); + let work_done_progress = self.work_done_progress.clone(); + let cancellation_token = CancellationToken::new(); + + let cancellation_token_key = { + let token = cancellation_token.clone(); + let mut tokens = self.process_tokens.write().await; + let key = if let Some(key) = tokens.last_entry().map(|v| *v.key()) { + key + 1 + } else { + 1 + }; + tokens.insert(key, token); + key + }; + + let process_tokens = self.process_tokens.clone(); + self.processes.write().await.spawn(async move { + let mut progress_token = None; + if *work_done_progress.read().await { + progress_token = + Some(progress::ProgressToken::begin(client, None::<&str>).await) + }; + + let mut iter = analyzer.analyze(all_targets, all_features).await; + while let Some(event) = tokio::select! { + _ = cancellation_token.cancelled() => None, + event = iter.next_event() => event, + } { + match event { + AnalyzerEvent::CrateChecked { + package, + package_index, + package_count, + } => { + if let Some(token) = &progress_token { + let percentage: u32 = ((package_index * 100 / package_count) + .min(100)) + .try_into() + .unwrap_or(100); + let msg = format!( + "Checking {package} ({}/{})", + package_index.saturating_add(1), + package_count + ); + token.report(Some(msg), Some(percentage)); + } + } + AnalyzerEvent::Analyzed(ws) => { + let write = &mut *analyzed.write().await; + for krate in ws.0.into_values() { + if let Some(write) = write { + write.merge(krate); + } else { + *write = Some(krate); + } + } + } + } + } + // remove cancellation token from list + process_tokens.write().await.remove(&cancellation_token_key); + + if let Some(progress_token) = progress_token { + progress_token.finish(); + } + }); + } + + let processes = self.processes.clone(); + let status = self.status.clone(); + let analyzed = self.analyzed.clone(); + + tokio::spawn(async move { + while { processes.write().await.join_next().await }.is_some() {} + let mut status = status.write().await; + let analyzed = analyzed.write().await; + if *status != progress::AnalysisStatus::Error { + if analyzed.as_ref().map(|v| v.0.len()).unwrap_or(0) == 0 { + *status = progress::AnalysisStatus::Error; + } else { + *status = progress::AnalysisStatus::Finished; + } + } + }); + } + + async fn decos( + &self, + filepath: &Path, + position: Loc, + ) -> std::result::Result, progress::AnalysisStatus> { + let mut selected = decoration::SelectLocal::new(position); + let mut error = progress::AnalysisStatus::Error; + + let analyzed_guard = self.analyzed.read().await; + let Some(analyzed) = analyzed_guard.as_ref() else { + return Err(error); + }; + + // Fast path: LSP file paths should be UTF-8 and match our stored file keys. + // Fall back to the Path comparison if the direct lookup misses. + let mut matched_file = filepath + .to_str() + .and_then(|path_str| analyzed.0.get(path_str)); + + if matched_file.is_none() { + for (filename, file) in analyzed.0.iter() { + if filepath == Path::new(filename) { + matched_file = Some(file); + break; + } + } + } + + let Some(file) = matched_file else { + return Err(error); + }; + + if !file.items.is_empty() { + error = progress::AnalysisStatus::Finished; + } + + for item in &file.items { + utils::mir_visit(item, &mut selected); + } + + let selected_local = selected.selected(); + if selected_local.is_none() { + return Err(error); + } + + let mut calc = decoration::CalcDecos::new(selected_local.iter().copied()); + for item in &file.items { + utils::mir_visit(item, &mut calc); + } + + calc.handle_overlapping(); + let decos = calc.decorations(); + if decos.is_empty() { + Err(error) + } else { + Ok(decos) + } + } + + pub async fn cursor( + &self, + params: decoration::CursorRequest, + ) -> jsonrpc::Result { + let is_analyzed = self.analyzed.read().await.is_some(); + let status = *self.status.read().await; + + let Some(path) = params.path() else { + return Ok(decoration::Decorations { + is_analyzed, + status, + path: None, + decorations: Vec::new(), + }); + }; + + let (_text, index) = if let Some(open) = self.open_docs.read().await.get(&path).cloned() { + (open.text, open.index) + } else if let Ok(text) = tokio::fs::read_to_string(&path).await { + let index = Arc::new(utils::LineCharIndex::new(&text)); + (Arc::new(text), index) + } else { + return Ok(decoration::Decorations { + is_analyzed, + status, + path: Some(path), + decorations: Vec::new(), + }); + }; + + let position = params.position(); + let pos = Loc(index.line_char_to_index(position.line, position.character)); + let (decos, status) = match self.decos(&path, pos).await { + Ok(v) => (v, status), + Err(e) => ( + Vec::new(), + if status == progress::AnalysisStatus::Finished { + e + } else { + status + }, + ), + }; + + let mut decorations = Vec::with_capacity(decos.len()); + decorations.extend(decos.into_iter().map(|v| v.to_lsp_range(index.as_ref()))); + + Ok(decoration::Decorations { + is_analyzed, + status, + path: Some(path), + decorations, + }) + } + + pub async fn check(path: impl AsRef, rustc_thread: usize) -> bool { + Self::check_with_options(path, false, false, rustc_thread).await + } + + pub async fn check_report_with_options( + path: impl AsRef, + all_targets: bool, + all_features: bool, + rustc_thread: usize, + ) -> CheckReport { + use indicatif::{ProgressBar, ProgressDrawTarget, ProgressStyle}; + use std::io::IsTerminal; + + let start = std::time::Instant::now(); + let path = path.as_ref(); + let (service, _) = LspService::build(Backend::new(rustc_thread)).finish(); + let backend = service.inner(); + + if !backend.add_analyze_target(path).await { + return CheckReport { + ok: false, + checked_targets: 0, + total_targets: None, + duration: start.elapsed(), + }; + } + + let progress_bar = if std::io::stderr().is_terminal() { + let progress_bar = ProgressBar::new(0); + progress_bar.set_draw_target(ProgressDrawTarget::stderr()); + progress_bar.set_style( + ProgressStyle::with_template( + "{spinner:.green} {wide_msg} [{bar:40.cyan/blue}] {pos}/{len}", + ) + .unwrap(), + ); + progress_bar.set_message("Analyzing..."); + Some(progress_bar) + } else { + None + }; + + let _progress_guard = progress_bar + .as_ref() + .cloned() + .map(crate::ActiveProgressBarGuard::set); + + // Re-analyze, but consume the iterator and use it to power a CLI progress bar. + backend.shutdown_subprocesses().await; + let analyzers = { backend.analyzers.read().await.clone() }; + + let mut checked_targets = 0usize; + let mut total_targets = None; + let mut last_log_at = std::time::Instant::now(); + let mut analyzed: Option = None; + + for analyzer in analyzers { + let mut iter = analyzer.analyze(all_targets, all_features).await; + while let Some(event) = iter.next_event().await { + match event { + AnalyzerEvent::CrateChecked { + package, + package_index, + package_count, + } => { + checked_targets = package_index; + total_targets = Some(package_count); + + if let Some(pb) = &progress_bar { + pb.set_length(package_count as u64); + pb.set_position(package_index as u64); + pb.set_message(format!("Checking {package}")); + } else if last_log_at.elapsed() >= std::time::Duration::from_secs(1) { + eprintln!("Checking {package} ({package_index}/{package_count})"); + last_log_at = std::time::Instant::now(); + } + } + AnalyzerEvent::Analyzed(ws) => { + for krate in ws.0.into_values() { + if let Some(write) = &mut analyzed { + write.merge(krate); + } else { + analyzed = Some(krate); + } + } + } + } + } + } + + if let Some(pb) = progress_bar { + pb.finish_and_clear(); + } + + let ok = analyzed.as_ref().map(|v| !v.0.is_empty()).unwrap_or(false); + + CheckReport { + ok, + checked_targets, + total_targets, + duration: start.elapsed(), + } + } + + pub async fn check_with_options( + path: impl AsRef, + all_targets: bool, + all_features: bool, + rustc_thread: usize, + ) -> bool { + Self::check_report_with_options(path, all_targets, all_features, rustc_thread) + .await + .ok + } + + #[cfg(feature = "bench")] + pub async fn load_analyzed_state_for_bench( + &self, + path: impl AsRef, + all_targets: bool, + all_features: bool, + ) -> bool { + let path = path.as_ref(); + + if !self.add_analyze_target(path).await { + *self.analyzed.write().await = None; + *self.status.write().await = progress::AnalysisStatus::Error; + return false; + } + + self.shutdown_subprocesses().await; + *self.status.write().await = progress::AnalysisStatus::Analyzing; + + let analyzers = { self.analyzers.read().await.clone() }; + let mut analyzed: Option = None; + + for analyzer in analyzers { + let mut iter = analyzer.analyze(all_targets, all_features).await; + while let Some(event) = iter.next_event().await { + if let AnalyzerEvent::Analyzed(ws) = event { + for krate in ws.0.into_values() { + if let Some(write) = &mut analyzed { + write.merge(krate); + } else { + analyzed = Some(krate); + } + } + } + } + } + + let ok = analyzed.as_ref().map(|v| !v.0.is_empty()).unwrap_or(false); + *self.analyzed.write().await = analyzed; + *self.status.write().await = if ok { + progress::AnalysisStatus::Finished + } else { + progress::AnalysisStatus::Error + }; + + ok + } + + pub async fn shutdown_subprocesses(&self) { + { + let mut tokens = self.process_tokens.write().await; + while let Some((_, token)) = tokens.pop_last() { + token.cancel(); + } + } + self.processes.write().await.shutdown().await; + } +} + +impl LanguageServer for Backend { + async fn initialize( + &self, + params: ls_types::InitializeParams, + ) -> jsonrpc::Result { + let mut workspaces = Vec::new(); + if let Some(wss) = params.workspace_folders { + workspaces.extend( + wss.iter() + .filter_map(|v| v.uri.to_file_path().map(|p| p.into_owned())), + ); + } + for path in workspaces { + self.add_analyze_target(&path).await; + } + self.do_analyze().await; + + let sync_options = ls_types::TextDocumentSyncOptions { + open_close: Some(true), + save: Some(ls_types::TextDocumentSyncSaveOptions::Supported(true)), + change: Some(ls_types::TextDocumentSyncKind::INCREMENTAL), + ..Default::default() + }; + let workspace_cap = ls_types::WorkspaceServerCapabilities { + workspace_folders: Some(ls_types::WorkspaceFoldersServerCapabilities { + supported: Some(true), + change_notifications: Some(ls_types::OneOf::Left(true)), + }), + ..Default::default() + }; + let server_cap = ls_types::ServerCapabilities { + text_document_sync: Some(ls_types::TextDocumentSyncCapability::Options(sync_options)), + workspace: Some(workspace_cap), + ..Default::default() + }; + let init_res = ls_types::InitializeResult { + capabilities: server_cap, + ..Default::default() + }; + let health_checker = async move { + if let Some(process_id) = params.process_id { + loop { + tokio::time::sleep(tokio::time::Duration::from_secs(30)).await; + if !process_alive::state(process_alive::Pid::from(process_id)).is_alive() { + panic!("The client process is dead"); + } + } + } + }; + if params + .capabilities + .window + .and_then(|v| v.work_done_progress) + .unwrap_or(false) + { + *self.work_done_progress.write().await = true; + } + tokio::spawn(health_checker); + Ok(init_res) + } + + async fn did_change_workspace_folders( + &self, + params: ls_types::DidChangeWorkspaceFoldersParams, + ) { + for added in params.event.added { + if let Some(path) = added.uri.to_file_path() + && self.add_analyze_target(&path).await + { + self.do_analyze().await; + } + } + } + + async fn did_open(&self, params: ls_types::DidOpenTextDocumentParams) { + if let Some(path) = params.text_document.uri.to_file_path() + && params.text_document.language_id == "rust" + { + let text = Arc::new(params.text_document.text); + let index = Arc::new(utils::LineCharIndex::new(&text)); + let line_start_bytes = Arc::new(utils::line_start_bytes(&text)); + let path = path.into_owned(); + self.open_docs.write().await.insert( + path.clone(), + OpenDoc { + text, + index, + line_start_bytes, + }, + ); + + if path.is_file() && self.add_analyze_target(&path).await { + self.do_analyze().await; + } + } + } + + async fn did_change(&self, params: ls_types::DidChangeTextDocumentParams) { + if let Some(path) = params.text_document.uri.to_file_path() { + if params.content_changes.is_empty() { + self.open_docs.write().await.remove(path.as_ref()); + } else { + let mut docs = self.open_docs.write().await; + if let Some(open) = docs.get_mut(path.as_ref()) { + // Apply ordered incremental edits. If anything looks odd, drop the cache. + let mut text = (*open.text).clone(); + let mut line_starts = utils::line_start_bytes(&text); + let mut drop_cache = false; + + for change in ¶ms.content_changes { + if let Some(range) = change.range { + let start = utils::line_utf16_to_byte_offset( + &text, + &line_starts, + range.start.line, + range.start.character, + ); + let end = utils::line_utf16_to_byte_offset( + &text, + &line_starts, + range.end.line, + range.end.character, + ); + if start > end || end > text.len() { + drop_cache = true; + break; + } + text.replace_range(start..end, &change.text); + line_starts = utils::line_start_bytes(&text); + } else { + // Full text replacement. + text = change.text.clone(); + line_starts = utils::line_start_bytes(&text); + } + } + + if drop_cache { + docs.remove(path.as_ref()); + } else { + open.text = Arc::new(text); + open.index = Arc::new(utils::LineCharIndex::new(open.text.as_ref())); + open.line_start_bytes = Arc::new(line_starts); + } + } + } + } + + *self.analyzed.write().await = None; + self.shutdown_subprocesses().await; + } + + async fn shutdown(&self) -> jsonrpc::Result<()> { + self.shutdown_subprocesses().await; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tower_lsp_server::ls_types::{ + self, DidChangeTextDocumentParams, DidOpenTextDocumentParams, + }; + + fn tmp_workspace() -> tempfile::TempDir { + tempfile::tempdir().expect("create tempdir") + } + + async fn write_test_workspace(dir: &tempfile::TempDir, file_contents: &str) -> PathBuf { + let root = dir.path(); + tokio::fs::create_dir_all(root.join("src")) + .await + .expect("create src"); + tokio::fs::write( + root.join("Cargo.toml"), + "[package]\nname = \"t\"\nversion = \"0.1.0\"\nedition = \"2021\"\n", + ) + .await + .expect("write Cargo.toml"); + let lib = root.join("src").join("lib.rs"); + tokio::fs::write(&lib, file_contents) + .await + .expect("write lib.rs"); + lib + } + + async fn init_backend( + rustc_thread: usize, + ) -> ( + tower_lsp_server::LspService, + tower_lsp_server::ClientSocket, + ) { + LspService::build(Backend::new(rustc_thread)).finish() + } + + async fn initialize_with_workspace( + backend: &Backend, + workspace: &Path, + ) -> ls_types::InitializeResult { + let uri = ls_types::Uri::from_file_path(workspace).expect("workspace uri"); + let params = ls_types::InitializeParams { + workspace_folders: Some(vec![ls_types::WorkspaceFolder { + uri, + name: "ws".to_string(), + }]), + capabilities: ls_types::ClientCapabilities { + window: Some(ls_types::WindowClientCapabilities { + work_done_progress: Some(true), + ..Default::default() + }), + ..Default::default() + }, + ..Default::default() + }; + + backend.initialize(params).await.expect("initialize") + } + + use crate::async_test; + + async_test!( + initialize_sets_work_done_progress_and_accepts_workspace_folder, + async { + let dir = tmp_workspace(); + let _lib = write_test_workspace(&dir, "pub fn f() -> i32 { 1 }\n").await; + + let (service, _socket) = init_backend(1).await; + let backend = service.inner(); + let init = initialize_with_workspace(backend, dir.path()).await; + + assert!(init.capabilities.text_document_sync.is_some()); + assert!(*backend.work_done_progress.read().await); + + assert!(!backend.analyzers.read().await.is_empty()); + } + ); + + async_test!( + did_open_caches_doc_and_cursor_handles_empty_analysis, + async { + let dir = tmp_workspace(); + let lib = write_test_workspace(&dir, "pub fn f() -> i32 { 1 }\n").await; + + let (service, _socket) = init_backend(1).await; + let backend = service.inner(); + + let uri = ls_types::Uri::from_file_path(&lib).expect("lib uri"); + backend + .did_open(DidOpenTextDocumentParams { + text_document: ls_types::TextDocumentItem { + uri: uri.clone(), + language_id: "rust".to_string(), + version: 1, + text: "pub fn f() -> i32 { 1 }\n".to_string(), + }, + }) + .await; + + assert!(backend.open_docs.read().await.contains_key(&lib)); + + let decorations = backend + .cursor(decoration::CursorRequest { + document: ls_types::TextDocumentIdentifier { uri }, + position: ls_types::Position { + line: 0, + character: 10, + }, + }) + .await + .expect("cursor"); + + assert_eq!(decorations.path.as_deref(), Some(lib.as_path())); + assert!(decorations.decorations.is_empty()); + } + ); + + async_test!( + did_change_drops_open_doc_on_invalid_edit_and_resets_state, + async { + let dir = tmp_workspace(); + let lib = write_test_workspace(&dir, "pub fn f() -> i32 { 1 }\n").await; + + let (service, _socket) = init_backend(1).await; + let backend = service.inner(); + + let uri = ls_types::Uri::from_file_path(&lib).expect("lib uri"); + backend + .did_open(DidOpenTextDocumentParams { + text_document: ls_types::TextDocumentItem { + uri: uri.clone(), + language_id: "rust".to_string(), + version: 1, + text: "pub fn f() -> i32 { 1 }\n".to_string(), + }, + }) + .await; + + assert!(backend.open_docs.read().await.contains_key(&lib)); + + // A clearly invalid edit should cause the backend to drop the cache. + // The simplest portable way is "start > end". + backend + .did_change(DidChangeTextDocumentParams { + text_document: ls_types::VersionedTextDocumentIdentifier { + uri: uri.clone(), + version: 2, + }, + content_changes: vec![ls_types::TextDocumentContentChangeEvent { + range: Some(ls_types::Range { + start: ls_types::Position { + line: 0, + character: 2, + }, + end: ls_types::Position { + line: 0, + character: 1, + }, + }), + range_length: None, + text: "x".to_string(), + }], + }) + .await; + + assert!(!backend.open_docs.read().await.contains_key(&lib)); + assert!(backend.analyzed.read().await.is_none()); + } + ); + + async_test!(check_report_handles_invalid_paths, async { + let report = + Backend::check_report_with_options("/this/path/does/not/exist", false, false, 1).await; + assert!(!report.ok); + assert_eq!(report.checked_targets, 0); + assert!(report.total_targets.is_none()); + }); +} diff --git a/src/lsp/decoration.rs b/crates/rustowl/src/lsp/decoration.rs similarity index 57% rename from src/lsp/decoration.rs rename to crates/rustowl/src/lsp/decoration.rs index 8efd927f..486fc6b5 100644 --- a/src/lsp/decoration.rs +++ b/crates/rustowl/src/lsp/decoration.rs @@ -1,10 +1,12 @@ -use crate::{lsp::progress, models::*, utils}; -use std::collections::HashSet; +use crate::lsp::progress; +use crate::models::FoldIndexSet as HashSet; +use crate::models::{FnLocal, Loc, MirDecl, MirRval, MirStatement, MirTerminator, Range}; +use crate::utils; use std::path::PathBuf; -use tower_lsp::lsp_types; +use tower_lsp_server::ls_types; -// TODO: Variable name should be checked? -//const ASYNC_MIR_VARS: [&str; 2] = ["_task_context", "__awaitee"]; +// Variable names that should be filtered out during analysis +const ASYNC_MIR_VARS: [&str; 2] = ["_task_context", "__awaitee"]; const ASYNC_RESUME_TY: [&str; 2] = [ "std::future::ResumeTy", "impl std::future::Future", @@ -57,29 +59,29 @@ pub enum Deco { }, } impl Deco { - pub fn to_lsp_range(&self, s: &str) -> Deco { - match self.clone() { + pub fn to_lsp_range(&self, index: &utils::LineCharIndex) -> Deco { + match self { Deco::Lifetime { local, range, hover_text, overlapped, } => { - let start = utils::index_to_line_char(s, range.from()); - let end = utils::index_to_line_char(s, range.until()); - let start = lsp_types::Position { + let start = index.index_to_line_char(range.from()); + let end = index.index_to_line_char(range.until()); + let start = ls_types::Position { line: start.0, character: start.1, }; - let end = lsp_types::Position { + let end = ls_types::Position { line: end.0, character: end.1, }; Deco::Lifetime { - local, - range: lsp_types::Range { start, end }, - hover_text, - overlapped, + local: *local, + range: ls_types::Range { start, end }, + hover_text: hover_text.clone(), + overlapped: *overlapped, } } Deco::ImmBorrow { @@ -88,21 +90,21 @@ impl Deco { hover_text, overlapped, } => { - let start = utils::index_to_line_char(s, range.from()); - let end = utils::index_to_line_char(s, range.until()); - let start = lsp_types::Position { + let start = index.index_to_line_char(range.from()); + let end = index.index_to_line_char(range.until()); + let start = ls_types::Position { line: start.0, character: start.1, }; - let end = lsp_types::Position { + let end = ls_types::Position { line: end.0, character: end.1, }; Deco::ImmBorrow { - local, - range: lsp_types::Range { start, end }, - hover_text, - overlapped, + local: *local, + range: ls_types::Range { start, end }, + hover_text: hover_text.clone(), + overlapped: *overlapped, } } Deco::MutBorrow { @@ -111,21 +113,21 @@ impl Deco { hover_text, overlapped, } => { - let start = utils::index_to_line_char(s, range.from()); - let end = utils::index_to_line_char(s, range.until()); - let start = lsp_types::Position { + let start = index.index_to_line_char(range.from()); + let end = index.index_to_line_char(range.until()); + let start = ls_types::Position { line: start.0, character: start.1, }; - let end = lsp_types::Position { + let end = ls_types::Position { line: end.0, character: end.1, }; Deco::MutBorrow { - local, - range: lsp_types::Range { start, end }, - hover_text, - overlapped, + local: *local, + range: ls_types::Range { start, end }, + hover_text: hover_text.clone(), + overlapped: *overlapped, } } Deco::Move { @@ -134,21 +136,21 @@ impl Deco { hover_text, overlapped, } => { - let start = utils::index_to_line_char(s, range.from()); - let end = utils::index_to_line_char(s, range.until()); - let start = lsp_types::Position { + let start = index.index_to_line_char(range.from()); + let end = index.index_to_line_char(range.until()); + let start = ls_types::Position { line: start.0, character: start.1, }; - let end = lsp_types::Position { + let end = ls_types::Position { line: end.0, character: end.1, }; Deco::Move { - local, - range: lsp_types::Range { start, end }, - hover_text, - overlapped, + local: *local, + range: ls_types::Range { start, end }, + hover_text: hover_text.clone(), + overlapped: *overlapped, } } Deco::Call { @@ -157,21 +159,21 @@ impl Deco { hover_text, overlapped, } => { - let start = utils::index_to_line_char(s, range.from()); - let end = utils::index_to_line_char(s, range.until()); - let start = lsp_types::Position { + let start = index.index_to_line_char(range.from()); + let end = index.index_to_line_char(range.until()); + let start = ls_types::Position { line: start.0, character: start.1, }; - let end = lsp_types::Position { + let end = ls_types::Position { line: end.0, character: end.1, }; Deco::Call { - local, - range: lsp_types::Range { start, end }, - hover_text, - overlapped, + local: *local, + range: ls_types::Range { start, end }, + hover_text: hover_text.clone(), + overlapped: *overlapped, } } Deco::SharedMut { @@ -180,21 +182,21 @@ impl Deco { hover_text, overlapped, } => { - let start = utils::index_to_line_char(s, range.from()); - let end = utils::index_to_line_char(s, range.until()); - let start = lsp_types::Position { + let start = index.index_to_line_char(range.from()); + let end = index.index_to_line_char(range.until()); + let start = ls_types::Position { line: start.0, character: start.1, }; - let end = lsp_types::Position { + let end = ls_types::Position { line: end.0, character: end.1, }; Deco::SharedMut { - local, - range: lsp_types::Range { start, end }, - hover_text, - overlapped, + local: *local, + range: ls_types::Range { start, end }, + hover_text: hover_text.clone(), + overlapped: *overlapped, } } @@ -204,21 +206,21 @@ impl Deco { hover_text, overlapped, } => { - let start = utils::index_to_line_char(s, range.from()); - let end = utils::index_to_line_char(s, range.until()); - let start = lsp_types::Position { + let start = index.index_to_line_char(range.from()); + let end = index.index_to_line_char(range.until()); + let start = ls_types::Position { line: start.0, character: start.1, }; - let end = lsp_types::Position { + let end = ls_types::Position { line: end.0, character: end.1, }; Deco::Outlive { - local, - range: lsp_types::Range { start, end }, - hover_text, - overlapped, + local: *local, + range: ls_types::Range { start, end }, + hover_text: hover_text.clone(), + overlapped: *overlapped, } } } @@ -229,25 +231,25 @@ pub struct Decorations { pub is_analyzed: bool, pub status: progress::AnalysisStatus, pub path: Option, - pub decorations: Vec>, + pub decorations: Vec>, } #[derive(serde::Deserialize, Clone, Debug)] #[serde(rename_all = "snake_case")] pub struct CursorRequest { - pub position: lsp_types::Position, - pub document: lsp_types::TextDocumentIdentifier, + pub position: ls_types::Position, + pub document: ls_types::TextDocumentIdentifier, } impl CursorRequest { pub fn path(&self) -> Option { - self.document.uri.to_file_path().ok() + self.document.uri.to_file_path().map(|p| p.into_owned()) } - pub fn position(&self) -> lsp_types::Position { + pub fn position(&self) -> ls_types::Position { self.position } } -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, PartialEq)] enum SelectReason { Var, Move, @@ -288,8 +290,8 @@ impl SelectLocal { } } (SelectReason::Call, SelectReason::Call) => { - // TODO: select narrower when callee is method - if old_range.size() < range.size() { + // Select narrower range for method calls (prefer tighter spans) + if range.size() < old_range.size() { self.selected = Some((reason, local, range)); } } @@ -307,13 +309,25 @@ impl SelectLocal { } impl utils::MirVisitor for SelectLocal { fn visit_decl(&mut self, decl: &MirDecl) { - let (local, ty) = match decl { - MirDecl::User { local, ty, .. } => (local, ty), - MirDecl::Other { local, ty, .. } => (local, ty), + let (local, ty, name) = match decl { + MirDecl::User { + local, ty, name, .. + } => (local, ty, Some(name)), + MirDecl::Other { local, ty, .. } => (local, ty, None), }; + + // Filter out async-related types if ASYNC_RESUME_TY.contains(&ty.as_str()) { return; } + + // Filter out async-related variable names + if let Some(var_name) = name + && ASYNC_MIR_VARS.contains(&var_name.as_str()) + { + return; + } + self.candidate_local_decls.push(*local); if let MirDecl::User { local, span, .. } = decl { self.select(SelectReason::Var, *local, *span); @@ -396,12 +410,12 @@ impl CalcDecos { let mut j = 0; while j < i { - let prev = &self.decorations[j]; - if prev == &self.decorations[i] { + if self.decorations[j] == self.decorations[i] { self.decorations.remove(i); continue 'outer; } - let (prev_range, prev_overlapped) = match prev { + + let (prev_range, prev_overlapped) = match &self.decorations[j] { Deco::Lifetime { range, overlapped, .. } @@ -431,11 +445,24 @@ impl CalcDecos { } if let Some(common) = utils::common_range(current_range, prev_range) { + // Mark both decorations as overlapped on true intersection. + match &mut self.decorations[i] { + Deco::Lifetime { overlapped, .. } + | Deco::ImmBorrow { overlapped, .. } + | Deco::MutBorrow { overlapped, .. } + | Deco::Move { overlapped, .. } + | Deco::Call { overlapped, .. } + | Deco::SharedMut { overlapped, .. } + | Deco::Outlive { overlapped, .. } => { + *overlapped = true; + } + } + let mut new_decos = Vec::new(); let non_overlapping = utils::exclude_ranges(vec![prev_range], vec![common]); for range in non_overlapping { - let new_deco = match prev { + let new_deco = match &self.decorations[j] { Deco::Lifetime { local, hover_text, .. } => Deco::Lifetime { @@ -591,9 +618,9 @@ impl utils::MirVisitor for CalcDecos { }; // merge Drop object lives let drop_copy_live = if *drop { - utils::eliminated_ranges(drop_range.clone()) + utils::eliminated_ranges_small(drop_range.clone()) } else { - utils::eliminated_ranges(lives.clone()) + utils::eliminated_ranges_small(lives.clone()) }; for range in &drop_copy_live { self.decorations.push(Deco::Lifetime { @@ -603,8 +630,9 @@ impl utils::MirVisitor for CalcDecos { overlapped: false, }); } - let mut borrow_ranges = shared_borrow.clone(); - borrow_ranges.extend_from_slice(mutable_borrow); + let mut borrow_ranges = Vec::with_capacity(shared_borrow.len() + mutable_borrow.len()); + borrow_ranges.extend(shared_borrow.iter().copied()); + borrow_ranges.extend(mutable_borrow.iter().copied()); let shared_mut = utils::common_ranges(&borrow_ranges); for range in shared_mut { self.decorations.push(Deco::SharedMut { @@ -614,7 +642,7 @@ impl utils::MirVisitor for CalcDecos { overlapped: false, }); } - let outlive = utils::exclude_ranges(must_live_at.clone(), drop_copy_live); + let outlive = utils::exclude_ranges_small(must_live_at.clone(), drop_copy_live); for range in outlive { self.decorations.push(Deco::Outlive { local, @@ -709,4 +737,275 @@ impl utils::MirVisitor for CalcDecos { } } -// TODO: new test +#[cfg(test)] +mod tests { + use super::*; + use crate::models::{FnLocal, Loc, MirDecl, Range}; + use crate::utils::MirVisitor; + use ecow::EcoVec; + + #[test] + fn test_async_variable_filtering() { + let mut selector = SelectLocal::new(Loc(10)); + + // Test that async variables are filtered out + let mut lives_vec: EcoVec = EcoVec::new(); + lives_vec.push(Range::new(Loc(0), Loc(20)).unwrap()); + + let mut drop_range_vec: EcoVec = EcoVec::new(); + drop_range_vec.push(Range::new(Loc(15), Loc(25)).unwrap()); + + let async_var_decl = MirDecl::User { + local: FnLocal::new(1, 1), + name: "_task_context".into(), + ty: "i32".into(), + lives: lives_vec, + shared_borrow: EcoVec::new(), + mutable_borrow: EcoVec::new(), + drop_range: drop_range_vec, + must_live_at: EcoVec::new(), + drop: false, + span: Range::new(Loc(5), Loc(15)).unwrap(), + }; + + selector.visit_decl(&async_var_decl); + + // The async variable should be filtered out, so no candidates should be added + assert!(selector.candidate_local_decls.is_empty()); + } + + #[test] + fn test_regular_variable_not_filtered() { + let mut selector = SelectLocal::new(Loc(10)); + + // Test that regular variables are not filtered out + let mut lives_vec: EcoVec = EcoVec::new(); + lives_vec.push(Range::new(Loc(0), Loc(20)).unwrap()); + + let mut drop_range_vec: EcoVec = EcoVec::new(); + drop_range_vec.push(Range::new(Loc(15), Loc(25)).unwrap()); + + let regular_var_decl = MirDecl::User { + local: FnLocal::new(1, 1), + name: "my_var".into(), + ty: "i32".into(), + lives: lives_vec, + shared_borrow: EcoVec::new(), + mutable_borrow: EcoVec::new(), + drop_range: drop_range_vec, + must_live_at: EcoVec::new(), + drop: false, + span: Range::new(Loc(5), Loc(15)).unwrap(), + }; + + selector.visit_decl(®ular_var_decl); + + // The regular variable should not be filtered out + assert_eq!(selector.candidate_local_decls.len(), 1); + assert_eq!(selector.candidate_local_decls[0], FnLocal::new(1, 1)); + } + + #[test] + fn test_call_selection_prefers_narrower_range() { + let mut selector = SelectLocal::new(Loc(10)); + let local = FnLocal::new(1, 1); + + // Add local to candidates + selector.candidate_local_decls.push(local); + + // First call with wider range + let wide_range = Range::new(Loc(5), Loc(20)).unwrap(); + selector.select(SelectReason::Call, local, wide_range); + + // Second call with narrower range + let narrow_range = Range::new(Loc(8), Loc(15)).unwrap(); + selector.select(SelectReason::Call, local, narrow_range); + + // Should select the narrower range (method call preference) + let selected = selector.selected(); + assert_eq!(selected, Some(local)); + + // Verify the selected range is the narrower one + if let Some((reason, _, range)) = selector.selected { + assert_eq!(reason, SelectReason::Call); + assert_eq!(range, narrow_range); + } + } + + #[test] + fn select_local_ignores_non_candidates() { + let mut selector = SelectLocal::new(Loc(10)); + let local = FnLocal::new(1, 1); + + // Not adding it to candidates means select() should ignore it. + selector.select( + SelectReason::Var, + local, + Range::new(Loc(0), Loc(20)).unwrap(), + ); + + assert!(selector.selected().is_none()); + } + + #[test] + fn select_local_var_prefers_narrower_range() { + let mut selector = SelectLocal::new(Loc(10)); + let local = FnLocal::new(1, 1); + selector.candidate_local_decls.push(local); + + let wide = Range::new(Loc(0), Loc(20)).unwrap(); + let narrow = Range::new(Loc(8), Loc(11)).unwrap(); + + selector.select(SelectReason::Var, local, wide); + selector.select(SelectReason::Var, local, narrow); + + assert_eq!(selector.selected(), Some(local)); + let (reason, selected_local, selected_range) = selector.selected.unwrap(); + assert_eq!(reason, SelectReason::Var); + assert_eq!(selected_local, local); + assert_eq!(selected_range, narrow); + } + + #[test] + fn select_local_var_wins_over_borrow_selection() { + let mut selector = SelectLocal::new(Loc(10)); + let local = FnLocal::new(1, 1); + selector.candidate_local_decls.push(local); + + let borrow_range = Range::new(Loc(9), Loc(12)).unwrap(); + selector.select(SelectReason::Borrow, local, borrow_range); + + let var_range = Range::new(Loc(9), Loc(11)).unwrap(); + selector.select(SelectReason::Var, local, var_range); + + assert_eq!(selector.selected(), Some(local)); + let (reason, _, range) = selector.selected.unwrap(); + assert_eq!(reason, SelectReason::Var); + assert_eq!(range, var_range); + } + + #[test] + fn calc_decos_dedupes_call_ranges() { + let local = FnLocal::new(1, 1); + + // Candidate is populated by visiting its declaration. + let decl = MirDecl::User { + local, + name: "x".into(), + ty: "i32".into(), + lives: EcoVec::new(), + shared_borrow: EcoVec::new(), + mutable_borrow: EcoVec::new(), + drop_range: EcoVec::new(), + must_live_at: EcoVec::new(), + drop: false, + span: Range::new(Loc(0), Loc(1)).unwrap(), + }; + + let mut select = SelectLocal::new(Loc(5)); + select.visit_decl(&decl); + assert!(select.selected().is_none()); + + let selected = [local]; + let mut calc = CalcDecos::new(selected); + + // A narrow call span exists first. + calc.visit_term(&MirTerminator::Call { + destination_local: local, + fn_span: Range::new(Loc(4), Loc(6)).unwrap(), + }); + + // The super-range call should be ignored (it would only add noise). + calc.visit_term(&MirTerminator::Call { + destination_local: local, + fn_span: Range::new(Loc(0), Loc(10)).unwrap(), + }); + + // And a sub-range should replace the existing one. + calc.visit_term(&MirTerminator::Call { + destination_local: local, + fn_span: Range::new(Loc(4), Loc(5)).unwrap(), + }); + + let decorations = calc.decorations(); + let call_count = decorations + .iter() + .filter(|d| matches!(d, Deco::Call { .. })) + .count(); + assert_eq!(call_count, 1); + + let call_range = decorations.iter().find_map(|d| { + if let Deco::Call { range, .. } = d { + Some(*range) + } else { + None + } + }); + assert_eq!(call_range, Some(Range::new(Loc(4), Loc(5)).unwrap())); + } + + #[test] + fn calc_decos_sets_overlapped_on_intersection() { + let local = FnLocal::new(1, 1); + let selected = [local]; + let mut calc = CalcDecos::new(selected); + + calc.decorations.push(Deco::ImmBorrow { + local, + range: Range::new(Loc(0), Loc(10)).unwrap(), + hover_text: "immutable borrow".to_string(), + overlapped: false, + }); + calc.decorations.push(Deco::Move { + local, + range: Range::new(Loc(5), Loc(15)).unwrap(), + hover_text: "variable moved".to_string(), + overlapped: false, + }); + + calc.handle_overlapping(); + + // Both should have overlapped=true once overlap is detected. + let overlapped = calc + .decorations + .iter() + .filter(|d| match d { + Deco::ImmBorrow { overlapped, .. } => *overlapped, + Deco::Move { overlapped, .. } => *overlapped, + _ => false, + }) + .count(); + assert_eq!(overlapped, 2); + } + + #[test] + fn calc_decos_does_not_mark_touching_ranges_as_overlapping() { + let local = FnLocal::new(1, 1); + let selected = [local]; + let mut calc = CalcDecos::new(selected); + + // Touching at the boundary (until == from) should not count as overlap. + calc.decorations.push(Deco::ImmBorrow { + local, + range: Range::new(Loc(0), Loc(10)).unwrap(), + hover_text: "immutable borrow".to_string(), + overlapped: false, + }); + calc.decorations.push(Deco::Move { + local, + range: Range::new(Loc(10), Loc(20)).unwrap(), + hover_text: "variable moved".to_string(), + overlapped: false, + }); + + calc.handle_overlapping(); + + let any_overlapped = calc.decorations.iter().any(|d| match d { + Deco::ImmBorrow { overlapped, .. } => *overlapped, + Deco::Move { overlapped, .. } => *overlapped, + _ => false, + }); + + assert!(!any_overlapped); + } +} diff --git a/crates/rustowl/src/lsp/progress.rs b/crates/rustowl/src/lsp/progress.rs new file mode 100644 index 00000000..2c9f1794 --- /dev/null +++ b/crates/rustowl/src/lsp/progress.rs @@ -0,0 +1,157 @@ +use serde::Serialize; +use tower_lsp_server::{Client, ls_types}; + +pub trait ProgressClient: Clone + Send + Sync + 'static { + fn send_request(&self, token: ls_types::NumberOrString); + fn send_progress(&self, token: ls_types::NumberOrString, value: ls_types::ProgressParamsValue); +} + +impl ProgressClient for Client { + fn send_request(&self, token: ls_types::NumberOrString) { + let client = self.clone(); + tokio::spawn(async move { + client + .send_request::( + ls_types::WorkDoneProgressCreateParams { token }, + ) + .await + .ok(); + }); + } + + fn send_progress(&self, token: ls_types::NumberOrString, value: ls_types::ProgressParamsValue) { + let client = self.clone(); + tokio::spawn(async move { + client + .send_notification::(ls_types::ProgressParams { + token, + value, + }) + .await; + }); + } +} + +#[derive(Serialize, Clone, Copy, PartialEq, Eq, Debug)] +#[serde(rename_all = "snake_case")] +pub enum AnalysisStatus { + Analyzing, + Finished, + Error, +} + +pub struct ProgressToken { + client: Option, + token: Option, +} + +impl ProgressToken { + pub async fn begin(client: Client, message: Option) -> Self { + ProgressToken::::begin_with_client(client, message) + } +} + +impl ProgressToken { + pub fn begin_with_client(client: C, message: Option) -> Self { + let token = ls_types::NumberOrString::String(format!("{}", uuid::Uuid::new_v4())); + client.send_request(token.clone()); + + let value = ls_types::ProgressParamsValue::WorkDone(ls_types::WorkDoneProgress::Begin( + ls_types::WorkDoneProgressBegin { + title: "RustOwl".to_owned(), + cancellable: Some(false), + message: message.map(|v| v.to_string()), + percentage: Some(0), + }, + )); + client.send_progress(token.clone(), value); + + Self { + client: Some(client), + token: Some(token), + } + } + + pub fn report(&self, message: Option, percentage: Option) { + if let (Some(client), Some(token)) = (self.client.clone(), self.token.clone()) { + let value = ls_types::ProgressParamsValue::WorkDone( + ls_types::WorkDoneProgress::Report(ls_types::WorkDoneProgressReport { + cancellable: Some(false), + message: message.map(|v| v.to_string()), + percentage, + }), + ); + client.send_progress(token, value); + } + } + + pub fn finish(mut self) { + let value = ls_types::ProgressParamsValue::WorkDone(ls_types::WorkDoneProgress::End( + ls_types::WorkDoneProgressEnd { message: None }, + )); + if let (Some(client), Some(token)) = (self.client.take(), self.token.take()) { + client.send_progress(token, value); + } + } +} + +impl Drop for ProgressToken { + fn drop(&mut self) { + let value = ls_types::ProgressParamsValue::WorkDone(ls_types::WorkDoneProgress::End( + ls_types::WorkDoneProgressEnd { message: None }, + )); + if let (Some(client), Some(token)) = (self.client.take(), self.token.take()) { + client.send_progress(token, value); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::{Arc, Mutex}; + + #[derive(Clone, Default)] + struct TestClient { + requests: Arc>>, + notifications: Arc>>, + } + + impl ProgressClient for TestClient { + fn send_request(&self, token: ls_types::NumberOrString) { + self.requests.lock().unwrap().push(token); + } + + fn send_progress( + &self, + token: ls_types::NumberOrString, + value: ls_types::ProgressParamsValue, + ) { + self.notifications.lock().unwrap().push((token, value)); + } + } + + #[test] + fn progress_token_begin_report_finish_sends_events() { + let client = TestClient::default(); + let token = ProgressToken::begin_with_client(client.clone(), Some("hello")); + assert_eq!(client.requests.lock().unwrap().len(), 1); + assert_eq!(client.notifications.lock().unwrap().len(), 1); + + token.report(Some("step"), Some(50)); + assert_eq!(client.notifications.lock().unwrap().len(), 2); + + token.finish(); + assert_eq!(client.notifications.lock().unwrap().len(), 3); + } + + #[test] + fn progress_token_drop_sends_end_once() { + let client = TestClient::default(); + let token = ProgressToken::begin_with_client(client.clone(), None::<&str>); + assert_eq!(client.notifications.lock().unwrap().len(), 1); + + drop(token); + assert_eq!(client.notifications.lock().unwrap().len(), 2); + } +} diff --git a/crates/rustowl/src/miri_tests.rs b/crates/rustowl/src/miri_tests.rs new file mode 100644 index 00000000..9782b9d1 --- /dev/null +++ b/crates/rustowl/src/miri_tests.rs @@ -0,0 +1,676 @@ +//! # Miri Memory Safety Tests +//! +//! This module contains tests specifically designed to run under Miri +//! to validate memory safety and undefined behavior detection in RustOwl's core functionality. +//! +//! These tests avoid external dependencies and process spawning that Miri doesn't support, +//! focusing on pure Rust code paths that can be fully analyzed for memory safety. +//! +//! ## What These Tests Cover: +//! +//! ### Core Data Models & Memory Safety: +//! - **Loc arithmetic**: Position tracking with overflow/underflow protection +//! - **Range validation**: Bounds checking and edge case handling +//! - **FnLocal operations**: Hash map usage and equality checks +//! - **File model**: Vector operations and memory management +//! - **Workspace/Crate hierarchy**: Complex nested HashMap operations +//! - **MirVariable variants**: Enum handling and pattern matching +//! - **Function structures**: Complex nested data structure operations +//! +//! ### Memory Management Patterns: +//! - **String handling**: Unicode support and concatenation safety +//! - **Collection operations**: HashMap/Vector operations with complex nesting +//! - **Clone operations**: Deep copying of complex structures +//! - **Serialization structures**: Data integrity for serde-compatible types +//! - **Capacity management**: Pre-allocation and memory growth patterns +//! +//! ### What Miri Validates: +//! - No use-after-free bugs +//! - No buffer overflows/underflows +//! - No uninitialized memory access +//! - No data races (in single-threaded context) +//! - Proper pointer provenance +//! - Memory leak detection +//! - Undefined behavior in arithmetic operations +//! +//! ## Limitations: +//! These tests cannot cover RustOwl functionality that requires: +//! - Process spawning (cargo metadata calls) +//! - File system operations +//! - Network operations +//! - External tool integration +//! +//! However, they thoroughly validate the core algorithms and data structures +//! that form the foundation of RustOwl's analysis capabilities. +//! +//! ## Usage: +//! ```bash +//! MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" cargo miri test --lib +//! ``` + +#[cfg(test)] +mod miri_memory_safety_tests { + use crate::models::FoldIndexMap as HashMap; + use crate::models::*; + + #[test] + fn test_loc_arithmetic_memory_safety() { + // Test Loc model creation and arithmetic operations for memory safety + let loc = Loc::new("test string with unicode 🦀", 5, 0); + let loc2 = loc + 2; + let loc3 = loc2 - 1; + + // Test arithmetic operations don't cause memory issues + assert_eq!(loc3.0, loc.0 + 1); + + // Test boundary conditions + let loc_zero = Loc(0); + let loc_underflow = loc_zero - 10; // Should saturate to 0 + assert_eq!(loc_underflow.0, 0); + + // Test large values (but avoid overflow) + let loc_large = Loc(u32::MAX - 10); + let loc_add = loc_large + 5; // Safe addition + assert_eq!(loc_add.0, u32::MAX - 5); + } + + #[test] + fn test_range_creation_and_validation() { + // Test Range creation with various scenarios + let valid_range = Range::new(Loc(0), Loc(10)).unwrap(); + assert_eq!(valid_range.from().0, 0); + assert_eq!(valid_range.until().0, 10); + assert_eq!(valid_range.size(), 10); + + // Test invalid range (until <= from) + let invalid_range = Range::new(Loc(10), Loc(5)); + assert!(invalid_range.is_none()); + + // Test edge case: same positions + let same_pos_range = Range::new(Loc(5), Loc(5)); + assert!(same_pos_range.is_none()); + + // Test large ranges + let large_range = Range::new(Loc(0), Loc(u32::MAX)).unwrap(); + assert_eq!(large_range.size(), u32::MAX); + } + + #[test] + fn test_fn_local_operations() { + // Test FnLocal model creation and operations + let fn_local1 = FnLocal::new(42, 100); + let fn_local2 = FnLocal::new(43, 100); + let fn_local3 = FnLocal::new(42, 100); + + // Test equality and inequality + assert_eq!(fn_local1, fn_local3); + assert_ne!(fn_local1, fn_local2); + + // Test hashing (via HashMap insertion) + let mut map = HashMap::default(); + map.insert(fn_local1, "first"); + map.insert(fn_local2, "second"); + map.insert(fn_local3, "third"); // Should overwrite first + + assert_eq!(map.len(), 2); + assert_eq!(map.get(&fn_local1), Some(&"third")); + assert_eq!(map.get(&fn_local2), Some(&"second")); + } + + #[test] + fn test_file_model_operations() { + // Test File model with various operations + let mut file = File::new(); + + // Test vector operations + assert_eq!(file.items.len(), 0); + assert!(file.items.is_empty()); + + // Test vector capacity and memory management + file.items.reserve(1000); + assert!(file.items.capacity() >= 1000); + + // Test cloning (deep copy) + let file_clone = file.clone(); + assert_eq!(file.items.len(), file_clone.items.len()); + } + + #[test] + fn test_workspace_operations() { + // Test Workspace and Crate models + let mut workspace = Workspace(HashMap::default()); + let mut crate1 = Crate(HashMap::default()); + let mut crate2 = Crate(HashMap::default()); + + // Add some files to crates + crate1.0.insert("lib.rs".to_string(), File::new()); + crate1.0.insert("main.rs".to_string(), File::new()); + + crate2.0.insert("helper.rs".to_string(), File::new()); + + // Add crates to workspace + workspace.0.insert("crate1".to_string(), crate1); + workspace.0.insert("crate2".to_string(), crate2); + + assert_eq!(workspace.0.len(), 2); + assert!(workspace.0.contains_key("crate1")); + assert!(workspace.0.contains_key("crate2")); + + // Test workspace merging + let mut other_workspace = Workspace(HashMap::default()); + let crate3 = Crate(HashMap::default()); + other_workspace.0.insert("crate3".to_string(), crate3); + + workspace.merge(other_workspace); + assert_eq!(workspace.0.len(), 3); + assert!(workspace.0.contains_key("crate3")); + } + + #[test] + fn test_mir_variables_operations() { + // Test MirVariables collection operations + let mut mir_vars = MirVariables::new(); + + // Test creation of MirVariable variants + let user_var = MirVariable::User { + index: 1, + live: Range::new(Loc(0), Loc(10)).unwrap(), + dead: Range::new(Loc(10), Loc(20)).unwrap(), + }; + + let other_var = MirVariable::Other { + index: 2, + live: Range::new(Loc(5), Loc(15)).unwrap(), + dead: Range::new(Loc(15), Loc(25)).unwrap(), + }; + + // Test insertion using push method + mir_vars.push(user_var); + mir_vars.push(other_var); + + // Test converting to vector + let vars_vec = mir_vars.clone().to_vec(); + assert_eq!(vars_vec.len(), 2); + + // Test that we can find our variables + let has_user_var = vars_vec + .iter() + .any(|v| matches!(v, MirVariable::User { index: 1, .. })); + let has_other_var = vars_vec + .iter() + .any(|v| matches!(v, MirVariable::Other { index: 2, .. })); + + assert!(has_user_var); + assert!(has_other_var); + + // Test duplicate insertion (should not duplicate) + mir_vars.push(user_var); + let final_vec = mir_vars.to_vec(); + assert_eq!(final_vec.len(), 2); // Still 2, not 3 + } + + #[test] + fn test_function_model_complex_operations() { + // Test Function model with complex nested structures + let function = Function::new(42); + + // Test cloning of complex nested structures + let function_clone = function.clone(); + assert_eq!(function.fn_id, function_clone.fn_id); + assert_eq!( + function.basic_blocks.len(), + function_clone.basic_blocks.len() + ); + assert_eq!(function.decls.len(), function_clone.decls.len()); + + // Test memory layout and alignment + let function_size = std::mem::size_of::(); + assert!(function_size > 0); + + // Test that we can create multiple instances without memory issues + let mut functions = Vec::new(); + for i in 0..100 { + functions.push(Function::new(i)); + } + + assert_eq!(functions.len(), 100); + assert_eq!(functions[50].fn_id, 50); + + // Test vector capacity management + let large_function = Function::with_capacity(999, 1000, 500); + + assert!(large_function.basic_blocks.capacity() >= 1000); + assert!(large_function.decls.capacity() >= 500); + } + + #[test] + fn test_string_handling_memory_safety() { + // Test string operations that could cause memory issues + let mut strings = Vec::new(); + + // Test various string operations + for i in 0..50 { + let s = format!("test_string_{i}"); + strings.push(s); + } + + // Test string concatenation + let mut concatenated = String::new(); + for s in &strings { + concatenated.push_str(s); + concatenated.push(' '); + } + + assert!(!concatenated.is_empty()); + + // Test unicode handling + let unicode_string = "🦀 Rust 🔥 Memory Safety 🛡️".to_string(); + + // Ensure unicode doesn't cause memory issues + assert!(unicode_string.len() > unicode_string.chars().count()); + } + + #[test] + fn test_collections_memory_safety() { + // Test various collection operations for memory safety + let mut map: HashMap> = HashMap::default(); + + // Insert data with complex nesting + for i in 0..20 { + let key = format!("key_{i}"); + let mut vec = Vec::new(); + + for j in 0..5 { + vec.push(FnLocal::new(j, i)); + } + + map.insert(key, vec); + } + + assert_eq!(map.len(), 20); + + // Test iteration and borrowing + for (key, vec) in &map { + assert!(key.starts_with("key_")); + assert_eq!(vec.len(), 5); + + for fn_local in vec { + assert!(fn_local.id < 5); + assert!(fn_local.fn_id < 20); + } + } + + // Test modification during iteration (using drain) + let mut keys_to_remove = Vec::new(); + for key in map.keys() { + if key.ends_with("_1") || key.ends_with("_2") { + keys_to_remove.push(key.clone()); + } + } + + for key in keys_to_remove { + map.swap_remove(&key); + } + + assert_eq!(map.len(), 18); // 20 - 2 + } + + #[test] + fn test_serialization_structures() { + // Test that our serializable structures don't have memory issues + // when working with the underlying data (without actual serialization) + + let range = Range::new(Loc(10), Loc(20)).unwrap(); + let fn_local = FnLocal::new(1, 2); + + // Test that Clone and PartialEq work correctly + let range_clone = range; + let fn_local_clone = fn_local; + + assert_eq!(range, range_clone); + assert_eq!(fn_local, fn_local_clone); + + // Test Debug formatting (without actually printing) + let debug_string = format!("{range:?}"); + assert!(debug_string.contains("Range")); + + let debug_fn_local = format!("{fn_local:?}"); + assert!(debug_fn_local.contains("FnLocal")); + } + + /// Exercises complex string creation, mutation, searching, slicing, and deduplication to help detect memory-safety issues. + /// + /// Builds patterned strings, prepends a prefix and appends a suffix to each, verifies prefix/suffix invariants and + /// that slicing via `find` yields expected substrings, then deduplicates with a `HashSet` and asserts the deduplicated + /// count does not exceed the number of original distinct bases. + /// + /// # Examples + /// + /// ``` + /// // construct and mutate a few patterned strings, then dedupe + /// let mut v = Vec::new(); + /// for i in 0..3 { v.push(format!("test_{}", i)); } + /// for s in &mut v { s.insert_str(0, "prefix_"); s.push_str("_suffix"); } + /// for s in &v { assert!(s.starts_with("prefix_") && s.ends_with("_suffix")); } + /// if let Some(pos) = v[0].find("test_") { let slice = &v[0][pos..]; assert!(slice.starts_with("test_")); } + /// let set: std::collections::HashSet<_> = v.into_iter().collect(); + /// assert!(set.len() <= 3); + /// ``` + #[test] + fn test_advanced_string_operations() { + // Test more complex string operations for memory safety + let mut strings = Vec::with_capacity(100); + + // Test string creation with various patterns + for i in 0..50 { + let s = format!("test_{i}"); + strings.push(s); + } + + // Test string manipulation + for s in &mut strings { + s.push_str("_suffix"); + s.insert_str(0, "prefix_"); + } + + // Test string searching and slicing + for s in &strings { + assert!(s.starts_with("prefix_")); + assert!(s.ends_with("_suffix")); + + if let Some(pos) = s.find("test_") { + let slice = &s[pos..]; + assert!(slice.starts_with("test_")); + } + } + + // Test string deduplication + let mut unique_strings = std::collections::HashSet::new(); + for s in strings { + unique_strings.insert(s); + } + assert_eq!(unique_strings.len(), 50); + } + + #[test] + fn test_complex_nested_structures() { + // Test deeply nested data structures for memory safety + let mut workspace = Workspace(HashMap::default()); + for crate_idx in 0..10 { + let mut crate_data = Crate(HashMap::default()); + for file_idx in 0..5 { + let mut file = File::new(); + + for func_idx in 0..3 { + let mut function = Function::new(func_idx + file_idx * 3 + crate_idx * 15); + + // Add basic blocks + for bb_idx in 0..4 { + let mut basic_block = MirBasicBlock::new(); + + // Add statements + for stmt_idx in 0..6 { + let range = + Range::new(Loc(stmt_idx * 10), Loc(stmt_idx * 10 + 5)).unwrap(); + + basic_block.statements.push(MirStatement::Other { range }); + } + + // Add terminator + if bb_idx % 2 == 0 { + basic_block.terminator = Some(MirTerminator::Other { + range: Range::new(Loc(60), Loc(65)).unwrap(), + }); + } + + function.basic_blocks.push(basic_block); + } + + file.items.push(function); + } + + crate_data.0.insert(format!("file_{file_idx}.rs"), file); + } + + workspace.0.insert(format!("crate_{crate_idx}"), crate_data); + } + + // Verify structure + assert_eq!(workspace.0.len(), 10); + + for (crate_name, crate_data) in &workspace.0 { + assert!(crate_name.starts_with("crate_")); + assert_eq!(crate_data.0.len(), 5); + + for (file_name, file_data) in &crate_data.0 { + assert!(file_name.starts_with("file_")); + assert_eq!(file_data.items.len(), 3); + + for function in &file_data.items { + assert_eq!(function.basic_blocks.len(), 4); + + for (bb_idx, basic_block) in function.basic_blocks.iter().enumerate() { + assert_eq!(basic_block.statements.len(), 6); + if bb_idx % 2 == 0 { + assert!(basic_block.terminator.is_some()); + } else { + assert!(basic_block.terminator.is_none()); + } + } + } + } + } + } + + #[test] + fn test_memory_intensive_range_operations() { + // Test range operations with many ranges for memory safety + let mut ranges = Vec::with_capacity(1000); + + // Create overlapping ranges + for i in 0..500 { + let start = i * 2; + let end = start + 10; + if let Some(range) = Range::new(Loc(start), Loc(end)) { + ranges.push(range); + } + } + + // Test range merging and elimination + let eliminated = crate::utils::eliminated_ranges(ranges.clone()); + assert!(eliminated.len() < ranges.len()); // Should merge some ranges + // Ensure eliminated ranges are non-overlapping + assert!( + eliminated + .windows(2) + .all(|w| crate::utils::common_range(w[0], w[1]).is_none()) + ); + // Test range exclusion + let excludes = vec![ + Range::new(Loc(50), Loc(100)).unwrap(), + Range::new(Loc(200), Loc(250)).unwrap(), + ]; + + let excluded = crate::utils::exclude_ranges(ranges, excludes.clone()); + assert!(!excluded.is_empty()); + + // Verify no excluded ranges overlap with exclude regions + for range in &excluded { + for exclude in &excludes { + assert!(crate::utils::common_range(*range, *exclude).is_none()); + } + } + } + + #[test] + fn test_mir_variable_enum_exhaustive() { + // Test all MirVariable enum variants and operations + let user_vars = (0..20) + .map(|i| MirVariable::User { + index: i, + live: Range::new(Loc(i * 10), Loc(i * 10 + 5)).unwrap(), + dead: Range::new(Loc(i * 10 + 5), Loc(i * 10 + 10)).unwrap(), + }) + .collect::>(); + + let other_vars = (0..20) + .map(|i| MirVariable::Other { + index: i + 100, + live: Range::new(Loc(i * 15), Loc(i * 15 + 7)).unwrap(), + dead: Range::new(Loc(i * 15 + 7), Loc(i * 15 + 14)).unwrap(), + }) + .collect::>(); + + // Test pattern matching and extraction + for var in &user_vars { + match var { + MirVariable::User { index, live, dead } => { + assert!(*index < 20); + assert!(live.size() == 5); + assert!(dead.size() == 5); + assert_eq!(live.until(), dead.from()); + } + _ => panic!("Expected User variant"), + } + } + + for var in &other_vars { + match var { + MirVariable::Other { index, live, dead } => { + assert!(*index >= 100); + assert!(live.size() == 7); + assert!(dead.size() == 7); + assert_eq!(live.until(), dead.from()); + } + _ => panic!("Expected Other variant"), + } + } + + // Test collection operations + let mut all_vars = MirVariables::with_capacity(40); + for var in user_vars.into_iter().chain(other_vars.into_iter()) { + all_vars.push(var); + } + + let final_vars = all_vars.to_vec(); + assert_eq!(final_vars.len(), 40); + } + + #[test] + fn test_cache_config_memory_safety() { + // Test cache configuration structures for memory safety + use crate::cache::CacheConfig; + + let mut configs = Vec::new(); + + // Create configurations with various settings + for i in 0..50 { + let config = CacheConfig { + max_entries: 1000 + i, + max_memory_bytes: (100 + i) * 1024 * 1024, + use_lru_eviction: i % 2 == 0, + validate_file_mtime: i % 3 == 0, + enable_compression: i % 4 == 0, + }; + configs.push(config); + } + + // Test cloning and manipulation + for config in &configs { + let cloned = config.clone(); + assert_eq!(config.max_entries, cloned.max_entries); + assert_eq!(config.max_memory_bytes, cloned.max_memory_bytes); + assert_eq!(config.use_lru_eviction, cloned.use_lru_eviction); + assert_eq!(config.validate_file_mtime, cloned.validate_file_mtime); + assert_eq!(config.enable_compression, cloned.enable_compression); + } + + // Test debug formatting + for config in &configs { + let debug_str = format!("{config:?}"); + assert!(debug_str.contains("CacheConfig")); + assert!(debug_str.contains(&config.max_entries.to_string())); + } + } + + /// Verifies Loc arithmetic is safe around integer boundaries (no wrapping; saturates at zero). + /// + /// Tests addition and subtraction on extreme and intermediate Loc values to ensure operations + /// do not wrap on overflow and underflow and that subtraction saturates at zero where appropriate. + /// + /// # Examples + /// + /// ```rust + /// # use crate::models::Loc; // adjust path as needed + /// let max = Loc(u32::MAX); + /// let min = Loc(0); + /// assert!((max + 1).0 >= max.0); + /// assert_eq!((min - 1).0, 0); + /// ``` + #[test] + fn test_advanced_arithmetic_safety() { + // Test arithmetic operations for overflow/underflow safety + + // Test Loc arithmetic with extreme values + let max_loc = Loc(u32::MAX); + let min_loc = Loc(0); + + // Test addition near overflow + let result = max_loc + 1; + assert_eq!(result.0, max_loc.0); // Saturates at max + let result = max_loc + (-1); + assert_eq!(result.0, u32::MAX - 1); // Should subtract correctly + + // Test subtraction near underflow + let result = min_loc - 1; + assert_eq!(result.0, 0); // Should saturate at 0 + + let result = min_loc + (-10); + assert_eq!(result.0, 0); // Should saturate at 0 + + // Test with intermediate values + let mid_loc = Loc(u32::MAX / 2); + let result = mid_loc + (u32::MAX / 2) as i32; + assert_eq!(result.0, u32::MAX - 1); // Exact expected value + let result = mid_loc - (u32::MAX / 2) as i32; + assert_eq!(result.0, 0); // Exact expected value + } + + #[test] + fn test_concurrent_like_operations() { + // Test operations that might be used in concurrent contexts + // (single-threaded but stress-testing for memory safety) + + use std::sync::Arc; + + let workspace = Arc::new(Workspace(FoldIndexMap::default())); + let mut handles = Vec::new(); + + // Simulate concurrent-like access patterns + for i in 0..10 { + let workspace_clone = Arc::clone(&workspace); + + // Create some work that would be done in different "threads" + let work = move || { + let _crate_name = format!("crate_{i}"); + let _workspace_ref = &*workspace_clone; + + // Simulate reading from workspace + for j in 0..5 { + let _key = format!("key_{j}"); + // Would normally do workspace_ref.0.get(&key) + } + }; + + handles.push(work); + } + + // Execute all "work" sequentially (since this is single-threaded) + for work in handles { + work(); + } + + // Test that Arc and reference counting works correctly + assert_eq!(Arc::strong_count(&workspace), 1); // Only our reference remains + } +} diff --git a/crates/rustowl/src/models.rs b/crates/rustowl/src/models.rs new file mode 100644 index 00000000..1785a2e4 --- /dev/null +++ b/crates/rustowl/src/models.rs @@ -0,0 +1,797 @@ +//! Data models for RustOwl ownership and lifetime analysis. +//! +//! This module contains the core data structures used to represent +//! ownership information, lifetimes, and analysis results extracted +//! from Rust code via compiler integration. + +use ecow::{EcoString, EcoVec}; +use foldhash::quality::RandomState as FoldHasher; +use indexmap::{IndexMap, IndexSet}; +use serde::{Deserialize, Serialize}; + +/// An IndexMap with FoldHasher for fast + high-quality hashing. +pub type FoldIndexMap = IndexMap; + +/// An IndexSet with FoldHasher for fast + high-quality hashing. +pub type FoldIndexSet = IndexSet; + +/// Represents a local variable within a function scope. +/// +/// This structure uniquely identifies a local variable by combining +/// its local ID within the function and the function ID itself. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct FnLocal { + /// Local variable ID within the function + pub id: u32, + /// Function ID this local belongs to + pub fn_id: u32, +} + +impl FnLocal { + /// Creates a new function-local variable identifier. + /// + /// # Arguments + /// * `id` - The local variable ID within the function + /// * `fn_id` - The function ID this local belongs to + pub fn new(id: u32, fn_id: u32) -> Self { + Self { id, fn_id } + } +} + +/// Represents a character position in source code. +/// +/// This is a character-based position that handles Unicode correctly +/// and automatically filters out carriage return characters to match +/// compiler behavior. +#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] +#[serde(transparent)] +pub struct Loc(pub u32); + +impl Loc { + /// Creates a new location from source text and byte position. + /// + /// Converts a byte position to a character position, handling Unicode + /// correctly and filtering out CR characters as the compiler does. + /// + /// # Arguments + /// * `source` - The source code text + /// * `byte_pos` - Byte position in the source + /// * `offset` - Offset to subtract from byte position + pub fn new(source: &str, byte_pos: u32, offset: u32) -> Self { + let byte_pos = byte_pos.saturating_sub(offset); + let byte_pos = byte_pos as usize; + + // This method is intentionally allocation-free. Hot paths should prefer + // `utils::NormalizedByteCharIndex` to avoid repeatedly scanning `source`. + // + // Note: rustc byte positions are reported as if `\r` doesn't exist. + // So our byte counting must ignore CR too. + let mut char_count = 0u32; + let mut normalized_byte_count = 0usize; + + for ch in source.chars() { + if ch == '\r' { + continue; + } + if normalized_byte_count >= byte_pos { + break; + } + + normalized_byte_count += ch.len_utf8(); + if normalized_byte_count <= byte_pos { + char_count += 1; + } + } + + Self(char_count) + } +} + +impl std::ops::Add for Loc { + type Output = Loc; + /// Adds a signed offset to this `Loc`, saturating to avoid underflow or overflow. + /// + /// For non-negative offsets, the location is increased with saturation at `u32::MAX`. + /// For negative offsets, the absolute value is subtracted with saturation at `0`. + /// + /// # Examples + /// + /// ``` + /// use rustowl::models::Loc; + /// let a = Loc(5); + /// assert_eq!(a + 3, Loc(8)); + /// + /// let b = Loc(0); + /// assert_eq!(b + -10, Loc(0)); // saturates at zero, does not underflow + /// + /// let c = Loc(u32::MAX - 1); + /// assert_eq!(c + 10, Loc(u32::MAX)); // saturates at u32::MAX, does not overflow + /// ``` + fn add(self, rhs: i32) -> Self::Output { + if rhs >= 0 { + // Use saturating_add to prevent overflow + Loc(self.0.saturating_add(rhs as u32)) + } else { + // rhs is negative, so subtract the absolute value + let abs_rhs = (-rhs) as u32; + Loc(self.0.saturating_sub(abs_rhs)) + } + } +} + +impl std::ops::Sub for Loc { + type Output = Loc; + /// Subtracts a signed offset from this `Loc`, using saturating arithmetic. + /// + /// For non-negative `rhs` the function subtracts `rhs` (saturating at 0 to prevent underflow). + /// If `rhs` is negative the absolute value is added (saturating on overflow). + /// + /// # Examples + /// + /// ``` + /// # use rustowl::models::Loc; + /// let a = Loc(10); + /// assert_eq!(a - 3, Loc(7)); // normal subtraction + /// assert_eq!(a - (-2), Loc(12)); // negative rhs -> addition + /// let zero = Loc(0); + /// assert_eq!(zero - 1, Loc(0)); // saturates at 0, no underflow + /// let max = Loc(u32::MAX); + /// assert_eq!(max - (-1), Loc(u32::MAX)); // saturating add prevents overflow + /// ``` + fn sub(self, rhs: i32) -> Self::Output { + if rhs >= 0 { + Loc(self.0.saturating_sub(rhs as u32)) + } else { + // rhs is negative, so we're actually adding the absolute value + let abs_rhs = (-rhs) as u32; + Loc(self.0.saturating_add(abs_rhs)) + } + } +} + +impl From for Loc { + fn from(value: u32) -> Self { + Self(value) + } +} + +impl From for u32 { + fn from(value: Loc) -> Self { + value.0 + } +} + +/// Represents a character range in source code. +/// +/// A range is defined by a starting and ending location, where the +/// ending location is exclusive (half-open interval). +#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] +pub struct Range { + from: Loc, + until: Loc, +} + +impl Range { + /// Creates a new range if the end position is after the start position. + /// + /// # Arguments + /// * `from` - Starting location (inclusive) + /// * `until` - Ending location (exclusive) + /// + /// # Returns + /// `Some(Range)` if valid, `None` if `until <= from` + pub fn new(from: Loc, until: Loc) -> Option { + if until.0 <= from.0 { + None + } else { + Some(Self { from, until }) + } + } + + /// Returns the starting location of the range. + pub fn from(&self) -> Loc { + self.from + } + + /// Returns the ending location of the range. + pub fn until(&self) -> Loc { + self.until + } + + /// Returns the size of the range in characters. + pub fn size(&self) -> u32 { + self.until.0 - self.from.0 + } +} + +/// Represents a MIR (Mid-level IR) variable with lifetime information. +/// +/// MIR variables can be either user-defined variables or compiler-generated +/// temporaries, each with their own live and dead ranges. +#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] +#[serde(rename_all = "snake_case", tag = "type")] +pub enum MirVariable { + /// A user-defined variable + User { + /// Variable index within the function + index: u32, + /// Range where the variable is live + live: Range, + /// Range where the variable is dead/dropped + dead: Range, + }, + /// A compiler-generated temporary or other variable + Other { + index: u32, + live: Range, + dead: Range, + }, +} + +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)] +#[serde(transparent)] +pub struct MirVariables(IndexMap); + +impl Default for MirVariables { + fn default() -> Self { + Self::new() + } +} + +impl MirVariables { + pub fn new() -> Self { + Self(IndexMap::with_capacity(8)) + } + + pub fn with_capacity(capacity: usize) -> Self { + Self(IndexMap::with_capacity(capacity)) + } + + pub fn push(&mut self, var: MirVariable) { + let index = match &var { + MirVariable::User { index, .. } | MirVariable::Other { index, .. } => *index, + }; + self.0.entry(index).or_insert(var); + } + + pub fn to_vec(self) -> Vec { + self.0.into_values().collect() + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct File { + pub items: EcoVec, +} + +impl Default for File { + fn default() -> Self { + Self::new() + } +} + +impl File { + pub fn new() -> Self { + Self { + items: EcoVec::new(), + } + } + + pub fn with_capacity(capacity: usize) -> Self { + Self { + items: EcoVec::with_capacity(capacity), + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(transparent)] +pub struct Workspace(pub FoldIndexMap); + +impl Workspace { + pub fn merge(&mut self, other: Self) { + let Workspace(crates) = other; + for (name, krate) in crates { + if let Some(insert) = self.0.get_mut(&name) { + insert.merge(krate); + } else { + self.0.insert(name, krate); + } + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug, Default)] +#[serde(transparent)] +pub struct Crate(pub FoldIndexMap); + +impl Crate { + pub fn merge(&mut self, other: Self) { + let Crate(files) = other; + for (file, mir) in files { + match self.0.get_mut(&file) { + Some(existing) => { + let mut seen_ids = FoldIndexSet::with_capacity_and_hasher( + existing.items.len(), + FoldHasher::default(), + ); + seen_ids.extend(existing.items.iter().map(|i| i.fn_id)); + + // `EcoVec` doesn't offer `retain`/`append`, so rebuild the delta. + let new_items: EcoVec = mir + .items + .iter() + .filter(|&item| seen_ids.insert(item.fn_id)) + .cloned() + .collect(); + + if !new_items.is_empty() { + let mut merged = + EcoVec::with_capacity(existing.items.len() + new_items.len()); + merged.extend(existing.items.iter().cloned()); + merged.extend(new_items); + existing.items = merged; + } + } + None => { + self.0.insert(file, mir); + } + } + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case", tag = "type")] +pub enum MirRval { + Move { + target_local: FnLocal, + range: Range, + }, + Borrow { + target_local: FnLocal, + range: Range, + mutable: bool, + outlive: Option, + }, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case", tag = "type")] +pub enum MirStatement { + StorageLive { + target_local: FnLocal, + range: Range, + }, + StorageDead { + target_local: FnLocal, + range: Range, + }, + Assign { + target_local: FnLocal, + range: Range, + rval: Option, + }, + Other { + range: Range, + }, +} +impl MirStatement { + pub fn range(&self) -> Range { + match self { + Self::StorageLive { range, .. } => *range, + Self::StorageDead { range, .. } => *range, + Self::Assign { range, .. } => *range, + Self::Other { range } => *range, + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(rename_all = "snake_case", tag = "type")] +pub enum MirTerminator { + Drop { + local: FnLocal, + range: Range, + }, + Call { + destination_local: FnLocal, + fn_span: Range, + }, + Other { + range: Range, + }, +} +impl MirTerminator { + pub fn range(&self) -> Range { + match self { + Self::Drop { range, .. } => *range, + Self::Call { fn_span, .. } => *fn_span, + Self::Other { range } => *range, + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct MirBasicBlock { + pub statements: StatementVec, + pub terminator: Option, +} + +impl Default for MirBasicBlock { + fn default() -> Self { + Self::new() + } +} + +impl MirBasicBlock { + pub fn new() -> Self { + Self { + statements: StatementVec::new(), + terminator: None, + } + } + + pub fn with_capacity(capacity: usize) -> Self { + Self { + statements: StatementVec::with_capacity(capacity), + terminator: None, + } + } +} + +// Type aliases for commonly cloned collections. +// +// These were previously `SmallVec` to optimize for small inline sizes. +// We now use `EcoVec` to make cloning across the LSP boundary cheap. +pub type RangeVec = EcoVec; +pub type StatementVec = EcoVec; +pub type DeclVec = EcoVec; + +pub fn range_vec_into_vec(ranges: RangeVec) -> Vec { + ranges.into_iter().collect() +} + +pub fn range_vec_from_vec(vec: Vec) -> RangeVec { + vec.into() +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum MirDecl { + User { + local: FnLocal, + name: EcoString, + span: Range, + ty: EcoString, + lives: RangeVec, + shared_borrow: RangeVec, + mutable_borrow: RangeVec, + drop: bool, + drop_range: RangeVec, + must_live_at: RangeVec, + }, + Other { + local: FnLocal, + ty: EcoString, + lives: RangeVec, + shared_borrow: RangeVec, + mutable_borrow: RangeVec, + drop: bool, + drop_range: RangeVec, + must_live_at: RangeVec, + }, +} + +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct Function { + pub fn_id: u32, + pub basic_blocks: EcoVec, + pub decls: DeclVec, +} + +impl Function { + pub fn new(fn_id: u32) -> Self { + Self { + fn_id, + basic_blocks: EcoVec::new(), + decls: DeclVec::new(), + } + } + + /// Creates a `Function` with preallocated capacity for basic blocks and declarations. + /// + /// `fn_id` is the function identifier. `bb_capacity` is the initial capacity reserved + /// for the function's basic block list. `decl_capacity` is the initial capacity reserved + /// for the function's declarations. + /// + /// # Examples + /// + /// ``` + /// use rustowl::models::Function; + /// let f = Function::with_capacity(42, 8, 16); + /// assert_eq!(f.fn_id, 42); + /// assert!(f.basic_blocks.capacity() >= 8); + /// assert!(f.decls.capacity() >= 16); + /// ``` + pub fn with_capacity(fn_id: u32, bb_capacity: usize, decl_capacity: usize) -> Self { + Self { + fn_id, + basic_blocks: EcoVec::with_capacity(bb_capacity), + decls: DeclVec::with_capacity(decl_capacity), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_loc_creation_with_unicode() { + let source = "hello 🦀 world\r\ngoodbye 🌍 world"; + // Test character position conversion + let _loc = Loc::new(source, 8, 0); // Should point to space before 🦀 + + // Verify that CR characters are filtered out. + // rustc byte positions are reported as if `\r` doesn't exist, so the same + // `byte_pos` should map to the same `Loc`. + let source_with_cr = "hello\r\n world"; + let loc_with_cr = Loc::new(source_with_cr, 7, 0); + let loc_without_cr = Loc::new("hello\n world", 7, 0); + assert_eq!(loc_with_cr.0, loc_without_cr.0); + } + + #[test] + fn test_workspace_merge_operations() { + let mut workspace1 = Workspace(FoldIndexMap::default()); + let mut workspace2 = Workspace(FoldIndexMap::default()); + + // Setup workspace1 with a crate + let mut crate1 = Crate(FoldIndexMap::default()); + crate1.0.insert("lib.rs".to_string(), File::new()); + workspace1.0.insert("my_crate".to_string(), crate1); + + // Setup workspace2 with the same crate name but different file + let mut crate2 = Crate(FoldIndexMap::default()); + crate2.0.insert("main.rs".to_string(), File::new()); + workspace2.0.insert("my_crate".to_string(), crate2); + + // Setup workspace2 with a different crate + let crate3 = Crate(FoldIndexMap::default()); + workspace2.0.insert("other_crate".to_string(), crate3); + + workspace1.merge(workspace2); + + // Should have 2 crates total + assert_eq!(workspace1.0.len(), 2); + assert!(workspace1.0.contains_key("my_crate")); + assert!(workspace1.0.contains_key("other_crate")); + + // my_crate should have both files after merge + let merged_crate = &workspace1.0["my_crate"]; + assert_eq!(merged_crate.0.len(), 2); + assert!(merged_crate.0.contains_key("lib.rs")); + assert!(merged_crate.0.contains_key("main.rs")); + } + + #[test] + fn test_crate_merge_with_duplicate_functions() { + let mut crate1 = Crate(FoldIndexMap::default()); + let mut crate2 = Crate(FoldIndexMap::default()); + + // Create files with functions + let mut file1 = File::new(); + file1.items.push(Function::new(1)); + file1.items.push(Function::new(2)); + + let mut file2 = File::new(); + file2.items.push(Function::new(2)); // Duplicate fn_id + file2.items.push(Function::new(3)); + + crate1.0.insert("test.rs".to_string(), file1); + crate2.0.insert("test.rs".to_string(), file2); + + crate1.merge(crate2); + + let merged_file = &crate1.0["test.rs"]; + // Should have 3 unique functions (1, 2, 3) with duplicate 2 filtered out + assert_eq!(merged_file.items.len(), 3); + + // Check that function IDs are unique + let mut ids: Vec = merged_file.items.iter().map(|f| f.fn_id).collect(); + ids.sort(); + assert_eq!(ids, vec![1, 2, 3]); + } + + #[test] + fn test_mir_statement_range_extraction() { + let range = Range::new(Loc(10), Loc(20)).unwrap(); + let fn_local = FnLocal::new(1, 42); + + let storage_live = MirStatement::StorageLive { + target_local: fn_local, + range, + }; + assert_eq!(storage_live.range(), range); + + let storage_dead = MirStatement::StorageDead { + target_local: fn_local, + range, + }; + assert_eq!(storage_dead.range(), range); + + let assign = MirStatement::Assign { + target_local: fn_local, + range, + rval: None, + }; + assert_eq!(assign.range(), range); + + let other = MirStatement::Other { range }; + assert_eq!(other.range(), range); + } + + #[test] + fn test_range_vec_conversions() { + let ranges = vec![ + Range::new(Loc(0), Loc(5)).unwrap(), + Range::new(Loc(10), Loc(15)).unwrap(), + ]; + + let range_vec = range_vec_from_vec(ranges.clone()); + let converted_back = range_vec_into_vec(range_vec); + + assert_eq!(ranges, converted_back); + } + + #[test] + fn test_mir_variable_comprehensive_scenarios() { + // Test comprehensive MirVariable scenarios + let base_range = Range::new(Loc(10), Loc(50)).unwrap(); + let live_range = Range::new(Loc(15), Loc(40)).unwrap(); + let dead_range = Range::new(Loc(40), Loc(45)).unwrap(); + + let variables = vec![ + MirVariable::User { + index: 0, + live: live_range, + dead: dead_range, + }, + MirVariable::User { + index: u32::MAX, + live: base_range, + dead: Range::new(Loc(50), Loc(60)).unwrap(), + }, + MirVariable::Other { + index: 0, + live: live_range, + dead: dead_range, + }, + MirVariable::Other { + index: 12345, + live: base_range, + dead: live_range, + }, + MirVariable::Other { + index: 999, + live: Range::new(Loc(0), Loc(10)).unwrap(), + dead: Range::new(Loc(10), Loc(20)).unwrap(), + }, + ]; + + for variable in variables { + // Test serialization roundtrip + let json = serde_json::to_string(&variable).unwrap(); + let deserialized: MirVariable = serde_json::from_str(&json).unwrap(); + + // Extract and compare components + let (orig_index, orig_live, orig_dead) = match &variable { + MirVariable::User { index, live, dead } => (index, live, dead), + MirVariable::Other { index, live, dead } => (index, live, dead), + }; + + let (deser_index, deser_live, deser_dead) = match &deserialized { + MirVariable::User { index, live, dead } => (index, live, dead), + MirVariable::Other { index, live, dead } => (index, live, dead), + }; + + assert_eq!(orig_index, deser_index); + assert_eq!(orig_live, deser_live); + assert_eq!(orig_dead, deser_dead); + + // Verify ranges are valid + assert!(orig_live.from() < orig_live.until()); + assert!(orig_dead.from() < orig_dead.until()); + } + } + + #[test] + fn test_serialization_format_consistency() { + // Test that serialization format is consistent and predictable + let function = Function::new(42); + let range = Range::new(Loc(10), Loc(20)).unwrap(); + let fn_local = FnLocal::new(1, 2); + + let variable = MirVariable::User { + index: 5, + live: range, + dead: Range::new(Loc(20), Loc(30)).unwrap(), + }; + + let statement = MirStatement::Assign { + target_local: fn_local, + range, + rval: None, + }; + + let terminator = MirTerminator::Other { range }; + + // Test multiple serialization rounds produce same result + for _ in 0..3 { + let json1 = serde_json::to_string(&function).unwrap(); + let json2 = serde_json::to_string(&function).unwrap(); + assert_eq!(json1, json2, "Serialization should be deterministic"); + + let json1 = serde_json::to_string(&variable).unwrap(); + let json2 = serde_json::to_string(&variable).unwrap(); + assert_eq!( + json1, json2, + "Variable serialization should be deterministic" + ); + + let json1 = serde_json::to_string(&statement).unwrap(); + let json2 = serde_json::to_string(&statement).unwrap(); + assert_eq!( + json1, json2, + "Statement serialization should be deterministic" + ); + + let json1 = serde_json::to_string(&terminator).unwrap(); + let json2 = serde_json::to_string(&terminator).unwrap(); + assert_eq!( + json1, json2, + "Terminator serialization should be deterministic" + ); + } + } + + #[test] + fn test_memory_usage_optimization() { + // Test memory usage optimization for data structures + use std::mem; + + // Test that core types have reasonable memory footprint + let function = Function::new(0); + let function_size = mem::size_of_val(&function); + assert!( + function_size <= 8192, + "Function should be compact: {function_size} bytes" + ); + + let range = Range::new(Loc(0), Loc(100)).unwrap(); + let range_size = mem::size_of_val(&range); + assert!( + range_size <= 16, + "Range should be compact: {range_size} bytes" + ); + + let fn_local = FnLocal::new(0, 0); + let fn_local_size = mem::size_of_val(&fn_local); + assert!( + fn_local_size <= 16, + "FnLocal should be compact: {fn_local_size} bytes" + ); + + // Spot-check `EcoVec` remains a compact container. + let vec = EcoVec::::new(); + let vec_size = mem::size_of_val(&vec); + assert!(vec_size > 0); + + let mut vec = EcoVec::::new(); + for i in 0..4 { + vec.push(Function::new(i)); + } + assert_eq!(vec.len(), 4); + } +} diff --git a/crates/rustowl/src/shells.rs b/crates/rustowl/src/shells.rs new file mode 100644 index 00000000..81cfe14a --- /dev/null +++ b/crates/rustowl/src/shells.rs @@ -0,0 +1,471 @@ +use clap_complete_nushell::Nushell; + +use std::fmt::Display; +use std::path::Path; +use std::str::FromStr; + +use clap::ValueEnum; + +use clap_complete::Generator; +use clap_complete::shells; + +/// Extended shell support including Nushell +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, ValueEnum)] +#[non_exhaustive] +#[value(rename_all = "lower")] +pub enum Shell { + /// Bourne Again `SHell` (bash) + Bash, + /// Elvish shell + Elvish, + /// Friendly Interactive `SHell` (fish) + Fish, + /// `PowerShell` + PowerShell, + /// Z `SHell` (zsh) + Zsh, + /// Nushell + Nushell, +} + +impl Display for Shell { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Shell::Bash => write!(f, "bash"), + Shell::Elvish => write!(f, "elvish"), + Shell::Fish => write!(f, "fish"), + Shell::PowerShell => write!(f, "powershell"), + Shell::Zsh => write!(f, "zsh"), + Shell::Nushell => write!(f, "nushell"), + } + } +} + +impl FromStr for Shell { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "bash" => Ok(Shell::Bash), + "elvish" => Ok(Shell::Elvish), + "fish" => Ok(Shell::Fish), + "powershell" => Ok(Shell::PowerShell), + "zsh" => Ok(Shell::Zsh), + "nushell" => Ok(Shell::Nushell), + _ => Err(format!("invalid variant: {s}")), + } + } +} + +impl Generator for Shell { + fn file_name(&self, name: &str) -> String { + match self { + Shell::Bash => shells::Bash.file_name(name), + Shell::Elvish => shells::Elvish.file_name(name), + Shell::Fish => shells::Fish.file_name(name), + Shell::PowerShell => shells::PowerShell.file_name(name), + Shell::Zsh => shells::Zsh.file_name(name), + Shell::Nushell => Nushell.file_name(name), + } + } + + fn generate(&self, cmd: &clap::Command, buf: &mut dyn std::io::Write) { + match self { + Shell::Bash => shells::Bash.generate(cmd, buf), + Shell::Elvish => shells::Elvish.generate(cmd, buf), + Shell::Fish => shells::Fish.generate(cmd, buf), + Shell::PowerShell => shells::PowerShell.generate(cmd, buf), + Shell::Zsh => shells::Zsh.generate(cmd, buf), + Shell::Nushell => Nushell.generate(cmd, buf), + } + } +} + +impl Shell { + /// Parse a shell from a path to the executable for the shell + pub fn from_shell_path>(path: P) -> Option { + let path = path.as_ref(); + let name = path.file_stem()?.to_str()?; + + match name { + "bash" => Some(Shell::Bash), + "zsh" => Some(Shell::Zsh), + "fish" => Some(Shell::Fish), + "elvish" => Some(Shell::Elvish), + "powershell" | "powershell_ise" => Some(Shell::PowerShell), + "nu" | "nushell" => Some(Shell::Nushell), + _ => None, + } + } + + /// Determine the user's current shell from the environment + pub fn from_env() -> Option { + if let Some(env_shell) = std::env::var_os("SHELL") { + Shell::from_shell_path(env_shell) + } else if cfg!(windows) { + Some(Shell::PowerShell) + } else { + None + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_shell_display() { + assert_eq!(Shell::Bash.to_string(), "bash"); + assert_eq!(Shell::Zsh.to_string(), "zsh"); + assert_eq!(Shell::Fish.to_string(), "fish"); + assert_eq!(Shell::Elvish.to_string(), "elvish"); + assert_eq!(Shell::PowerShell.to_string(), "powershell"); + assert_eq!(Shell::Nushell.to_string(), "nushell"); + } + + #[test] + fn test_shell_from_shell_path() { + assert_eq!(Shell::from_shell_path("/bin/bash"), Some(Shell::Bash)); + assert_eq!(Shell::from_shell_path("/usr/bin/zsh"), Some(Shell::Zsh)); + assert_eq!( + Shell::from_shell_path("/usr/local/bin/fish"), + Some(Shell::Fish) + ); + assert_eq!(Shell::from_shell_path("/opt/elvish"), Some(Shell::Elvish)); + // PowerShell on Windows could be powershell.exe or powershell_ise.exe + assert_eq!( + Shell::from_shell_path("powershell"), + Some(Shell::PowerShell) + ); + assert_eq!( + Shell::from_shell_path("powershell_ise"), + Some(Shell::PowerShell) + ); + assert_eq!( + Shell::from_shell_path("powershell.exe"), + Some(Shell::PowerShell) + ); + assert_eq!(Shell::from_shell_path("/usr/bin/nu"), Some(Shell::Nushell)); + assert_eq!(Shell::from_shell_path("nu.exe"), Some(Shell::Nushell)); + assert_eq!( + Shell::from_shell_path("/usr/bin/nushell"), + Some(Shell::Nushell) + ); + + assert_eq!(Shell::from_shell_path("/bin/unknown"), None); + } + + #[test] + fn test_shell_from_str_case_insensitive() { + use std::str::FromStr; + + // Test uppercase variants + assert_eq!(::from_str("BASH"), Ok(Shell::Bash)); + assert_eq!(::from_str("ZSH"), Ok(Shell::Zsh)); + assert_eq!(::from_str("FISH"), Ok(Shell::Fish)); + assert_eq!( + ::from_str("POWERSHELL"), + Ok(Shell::PowerShell) + ); + assert_eq!(::from_str("NUSHELL"), Ok(Shell::Nushell)); + + // Test mixed case variants + assert_eq!(::from_str("BaSh"), Ok(Shell::Bash)); + assert_eq!( + ::from_str("PowerShell"), + Ok(Shell::PowerShell) + ); + assert_eq!(::from_str("NuShell"), Ok(Shell::Nushell)); + } + + #[test] + fn test_shell_from_str_error_messages() { + use std::str::FromStr; + + let result = ::from_str("invalid"); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "invalid variant: invalid"); + + let result = ::from_str("cmd"); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "invalid variant: cmd"); + + let result = ::from_str(""); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "invalid variant: "); + } + + #[test] + fn test_shell_file_name_generation() { + // Test file name generation for different shells + let shells = [ + (Shell::Bash, "rustowl"), + (Shell::Zsh, "rustowl"), + (Shell::Fish, "rustowl"), + (Shell::PowerShell, "rustowl"), + (Shell::Elvish, "rustowl"), + (Shell::Nushell, "rustowl"), + ]; + + for (shell, app_name) in shells { + let filename = shell.file_name(app_name); + assert!(!filename.is_empty()); + assert!(filename.contains(app_name)); + } + } + + #[test] + fn test_shell_generate_different_commands() { + // Test generation basic functionality + use clap::Command; + + let cmd = Command::new("test-app").bin_name("test-app"); + + // Test with one shell to verify basic functionality + let shell = Shell::Bash; + let mut buf = Vec::new(); + shell.generate(&cmd, &mut buf); + assert!(!buf.is_empty(), "Generated completion should not be empty"); + + // Verify it contains some expected content + let content = String::from_utf8_lossy(&buf); + assert!(content.contains("test-app"), "Should contain app name"); + } + + #[test] + fn test_shell_enum_properties() { + // Test enum properties and traits + let shell = Shell::Bash; + + // Test Clone + let cloned = shell; + assert_eq!(shell, cloned); + + // Test Copy + let copied = shell; + assert_eq!(shell, copied); + + // Test Hash consistency + use std::collections::HashMap; + let mut map = HashMap::new(); + map.insert(shell, "value"); + assert_eq!(map.get(&Shell::Bash), Some(&"value")); + + // Test PartialEq + assert_eq!(Shell::Bash, Shell::Bash); + assert_ne!(Shell::Bash, Shell::Zsh); + } + + #[test] + fn test_shell_display_format_consistency() { + // Test that display format is consistent with from_str parsing + use std::str::FromStr; + + let shells = [ + Shell::Bash, + Shell::Elvish, + Shell::Fish, + Shell::PowerShell, + Shell::Zsh, + Shell::Nushell, + ]; + + for shell in shells { + let display_str = shell.to_string(); + let parsed_shell = ::from_str(&display_str).unwrap(); + assert_eq!( + shell, parsed_shell, + "Display and parse should roundtrip for {shell:?}" + ); + } + } + + #[test] + fn test_shell_value_enum_integration() { + // Test that Shell works properly as a clap ValueEnum + use clap::ValueEnum; + + // Test value_variants + let variants = Shell::value_variants(); + assert_eq!(variants.len(), 6); + assert!(variants.contains(&Shell::Bash)); + assert!(variants.contains(&Shell::Nushell)); + + // Test to_possible_value + for variant in variants { + let possible_value = variant.to_possible_value(); + assert!(possible_value.is_some()); + let pv = possible_value.unwrap(); + assert!(!pv.get_name().is_empty()); + } + } + + #[test] + fn test_shell_edge_cases() { + // Test edge cases and boundary conditions + + // Test with empty path components + assert_eq!(Shell::from_shell_path(""), None); + assert_eq!(Shell::from_shell_path("/"), None); + assert_eq!(Shell::from_shell_path("/."), None); + + // Test with paths that have no file stem + assert_eq!(Shell::from_shell_path("/usr/bin/"), None); + assert_eq!(Shell::from_shell_path(".bashrc"), None); + + // Test with symlink-like names (common in some distributions) + assert_eq!(Shell::from_shell_path("/usr/bin/sh"), None); // sh is not supported + assert_eq!(Shell::from_shell_path("/bin/dash"), None); // dash is not supported + + // Test case sensitivity in file stem extraction + assert_eq!(Shell::from_shell_path("/usr/bin/BASH"), None); // Case matters for file stem + } + + #[test] + fn test_shell_unicode_path_handling() { + // Test shell detection with Unicode paths + let unicode_paths = vec![ + ("/usr/bin/测试/bash", Some(Shell::Bash)), + ("/home/用户/bin/zsh", Some(Shell::Zsh)), + ("/opt/русский/fish", Some(Shell::Fish)), + ("/Applications/العربية/nu", Some(Shell::Nushell)), + ("/usr/local/bin/日本語/elvish", Some(Shell::Elvish)), + ("~/🦀/powershell", Some(Shell::PowerShell)), + ("/path/with spaces/bash", Some(Shell::Bash)), + ("/path\twith\ttabs/zsh", Some(Shell::Zsh)), + ]; + + for (path, expected) in unicode_paths { + let result = Shell::from_shell_path(path); + assert_eq!(result, expected, "Failed for Unicode path: {path}"); + } + } + + #[test] + fn test_shell_generator_stress_testing() { + // Test that shell enum has expected variants (safer test) + let shells = [ + Shell::Bash, + Shell::Zsh, + Shell::Fish, + Shell::PowerShell, + Shell::Elvish, + Shell::Nushell, + ]; + + // Test that all shells can be displayed properly + for shell in shells { + let shell_name = shell.to_string(); + assert!(!shell_name.is_empty(), "Shell {shell:?} should have a name"); + + // Test file name generation + let filename = shell.file_name("test"); + assert!( + filename.contains("test"), + "Filename should contain app name" + ); + } + } + + #[test] + fn test_shell_env_detection_comprehensive() { + // Test comprehensive environment detection patterns + use std::path::Path; + + let shell_env_patterns = vec![ + ("/bin/bash", Some(Shell::Bash)), + ("/usr/bin/zsh", Some(Shell::Zsh)), + ("/usr/local/bin/fish", Some(Shell::Fish)), + ("/opt/homebrew/bin/elvish", Some(Shell::Elvish)), + ("/usr/bin/pwsh", None), // pwsh not directly supported + ("powershell.exe", Some(Shell::PowerShell)), // Windows executable + ("/snap/bin/nu", Some(Shell::Nushell)), + ("/usr/local/bin/nushell", Some(Shell::Nushell)), + ("/bin/sh", None), // sh not supported + ("/bin/tcsh", None), // tcsh not supported + ("/bin/csh", None), // csh not supported + ("/usr/bin/ksh", None), // ksh not supported + ]; + + for (shell_path, expected) in shell_env_patterns { + let path = Path::new(shell_path); + let detected = Shell::from_shell_path(path); + assert_eq!(detected, expected, "Failed for shell path: {shell_path}"); + + // Test that the path operations work correctly + if let Some(file_stem) = path.file_stem() { + let stem_str = file_stem.to_string_lossy(); + + // Verify our detection logic matches expectations + let manual_detection = match stem_str.as_ref() { + "bash" => Some(Shell::Bash), + "zsh" => Some(Shell::Zsh), + "fish" => Some(Shell::Fish), + "elvish" => Some(Shell::Elvish), + "powershell" | "powershell_ise" => Some(Shell::PowerShell), + "nu" | "nushell" => Some(Shell::Nushell), + _ => None, + }; + + assert_eq!( + detected, manual_detection, + "Detection mismatch for: {stem_str}" + ); + } + } + } + + #[test] + fn test_shell_variant_exhaustive_coverage() { + // Test all shell variants comprehensively + use clap::ValueEnum; + + let all_variants = Shell::value_variants(); + assert_eq!(all_variants.len(), 6); + + for &variant in all_variants { + // Test Display trait + let display_str = variant.to_string(); + assert!(!display_str.is_empty()); + assert!(!display_str.contains(' ')); + assert!( + display_str + .chars() + .all(|c| c.is_ascii_lowercase() || c.is_ascii_alphabetic()) + ); + + // Test FromStr roundtrip + let parsed = ::from_str(&display_str).unwrap(); + assert_eq!(variant, parsed); + + // Test Debug trait + let debug_str = format!("{variant:?}"); + assert!(!debug_str.is_empty()); + + // Test Clone trait + let cloned = variant; + assert_eq!(variant, cloned); + + // Test Copy trait (implicit with Clone for Copy types) + let copied = variant; + assert_eq!(variant, copied); + + // Test Hash trait + use std::collections::HashMap; + let mut map = HashMap::new(); + map.insert(variant, format!("value for {variant:?}")); + assert!(map.contains_key(&variant)); + + // Test PartialEq + assert_eq!(variant, variant); + + // Test Eq (implicit) + assert!(variant == variant); + + // Test generator methods + let filename = variant.file_name("test"); + assert!(!filename.is_empty()); + } + } +} diff --git a/crates/rustowl/src/toolchain.rs b/crates/rustowl/src/toolchain.rs new file mode 100644 index 00000000..3c23c688 --- /dev/null +++ b/crates/rustowl/src/toolchain.rs @@ -0,0 +1,1283 @@ +use std::env; +use std::io::Read; +use std::time::Duration; + +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::sync::LazyLock; + +use flate2::read::GzDecoder; +use tar::{Archive, EntryType}; + +use tokio::fs::OpenOptions; +use tokio::fs::{create_dir_all, read_to_string, remove_dir_all, rename}; +use tokio::io::AsyncWriteExt; + +pub const TOOLCHAIN: &str = env!("RUSTOWL_TOOLCHAIN"); +pub const HOST_TUPLE: &str = env!("HOST_TUPLE"); +const TOOLCHAIN_CHANNEL: &str = env!("TOOLCHAIN_CHANNEL"); +const TOOLCHAIN_DATE: Option<&str> = option_env!("TOOLCHAIN_DATE"); + +pub static FALLBACK_RUNTIME_DIR: LazyLock = LazyLock::new(|| { + let opt = PathBuf::from("/opt/rustowl"); + if sysroot_from_runtime(&opt).is_dir() { + return opt; + } + let same = env::current_exe().unwrap().parent().unwrap().to_path_buf(); + if sysroot_from_runtime(&same).is_dir() { + return same; + } + env::home_dir().unwrap().join(".rustowl") +}); + +fn recursive_read_dir(path: impl AsRef) -> Vec { + let mut paths = Vec::new(); + if path.as_ref().is_dir() { + for entry in std::fs::read_dir(&path).unwrap().flatten() { + let path = entry.path(); + if path.is_dir() { + paths.extend_from_slice(&recursive_read_dir(&path)); + } else { + paths.push(path); + } + } + } + paths +} + +pub fn sysroot_from_runtime(runtime: impl AsRef) -> PathBuf { + runtime.as_ref().join("sysroot").join(TOOLCHAIN) +} + +fn sysroot_looks_installed(sysroot: &Path) -> bool { + let rustc = if cfg!(windows) { "rustc.exe" } else { "rustc" }; + let cargo = if cfg!(windows) { "cargo.exe" } else { "cargo" }; + + sysroot.join("bin").join(rustc).is_file() + && sysroot.join("bin").join(cargo).is_file() + && sysroot.join("lib").is_dir() +} + +async fn get_runtime_dir() -> PathBuf { + let sysroot = sysroot_from_runtime(&*FALLBACK_RUNTIME_DIR); + if FALLBACK_RUNTIME_DIR.is_dir() && sysroot_looks_installed(&sysroot) { + return FALLBACK_RUNTIME_DIR.clone(); + } + + tracing::debug!("sysroot not found (or incomplete); start setup toolchain"); + if let Err(e) = setup_toolchain(&*FALLBACK_RUNTIME_DIR, false).await { + tracing::error!("{e:?}"); + std::process::exit(1); + } + + FALLBACK_RUNTIME_DIR.clone() +} + +pub async fn get_sysroot() -> PathBuf { + if let Ok(override_path) = env::var("RUSTOWL_SYSROOT") { + let override_path = PathBuf::from(override_path); + if override_path.is_dir() { + return override_path; + } + } + + sysroot_from_runtime(get_runtime_dir().await) +} + +const DOWNLOAD_CAP_BYTES: u64 = 2_000_000_000; + +#[derive(Clone, Copy, Debug)] +struct DownloadCaps { + max_download: u64, + max_retries: usize, + retry_backoff: Duration, +} + +impl DownloadCaps { + const DEFAULT: Self = Self { + max_download: DOWNLOAD_CAP_BYTES, + max_retries: 5, + retry_backoff: Duration::from_millis(250), + }; +} + +fn hash_url_for_filename(url: &str) -> String { + use std::hash::{Hash, Hasher}; + + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + url.hash(&mut hasher); + format!("{:016x}", hasher.finish()) +} + +fn spool_dir_for_runtime(runtime: &Path) -> PathBuf { + runtime.join(".rustowl-cache").join("downloads") +} + +#[cfg(test)] +mod unit_tests { + use super::*; + + #[test] + fn hash_url_for_filename_is_stable_and_hex() { + let url = "https://example.com/archive.tar.gz"; + let a = hash_url_for_filename(url); + let b = hash_url_for_filename(url); + assert_eq!(a, b); + assert_eq!(a.len(), 16); + assert!( + a.chars() + .all(|c| c.is_ascii_digit() || ('a'..='f').contains(&c)) + ); + } + + #[test] + fn spool_dir_is_under_runtime_cache() { + let runtime = PathBuf::from("/tmp/rustowl-runtime"); + assert_eq!( + spool_dir_for_runtime(&runtime), + runtime.join(".rustowl-cache").join("downloads") + ); + } + + #[test] + fn extracted_components_are_staged_under_spool_dir() { + let spool = Path::new("/home/user/.rustowl/.rustowl-cache/downloads"); + assert_eq!(extract_base_dir_for_spool(spool), spool.join("extract")); + } + + #[test] + fn sysroot_from_runtime_uses_toolchain_component() { + let runtime = PathBuf::from("/opt/rustowl"); + assert_eq!( + sysroot_from_runtime(&runtime), + runtime.join("sysroot").join(TOOLCHAIN) + ); + } + + #[test] + fn sysroot_looks_installed_checks_expected_layout() { + let tmp = tempfile::tempdir().expect("tempdir"); + let sysroot = tmp.path().join("sysroot"); + std::fs::create_dir_all(sysroot.join("bin")).unwrap(); + std::fs::create_dir_all(sysroot.join("lib")).unwrap(); + + let rustc = if cfg!(windows) { "rustc.exe" } else { "rustc" }; + let cargo = if cfg!(windows) { "cargo.exe" } else { "cargo" }; + + assert!(!sysroot_looks_installed(&sysroot)); + + std::fs::write(sysroot.join("bin").join(rustc), "").unwrap(); + assert!(!sysroot_looks_installed(&sysroot)); + + std::fs::write(sysroot.join("bin").join(cargo), "").unwrap(); + assert!(sysroot_looks_installed(&sysroot)); + } + + #[test] + fn safe_join_tar_path_rejects_escape_attempts() { + let dest = Path::new("/safe/root"); + assert!(safe_join_tar_path(dest, Path::new("../evil")).is_err()); + assert!(safe_join_tar_path(dest, Path::new("/abs/path")).is_err()); + + let ok = safe_join_tar_path(dest, Path::new("dir/file.txt")).expect("ok"); + assert_eq!(ok, dest.join("dir").join("file.txt")); + } + + #[test] + #[cfg_attr(miri, ignore)] + fn unpack_tarball_gz_skips_symlinks() { + use flate2::Compression; + use flate2::write::GzEncoder; + use tar::Builder; + + let temp = tempfile::tempdir().expect("tempdir"); + let dest = temp.path().join("out"); + std::fs::create_dir_all(&dest).unwrap(); + + let mut tar_buf = Vec::new(); + { + let gz = GzEncoder::new(&mut tar_buf, Compression::default()); + let mut builder = Builder::new(gz); + + let mut header = tar::Header::new_gnu(); + header.set_entry_type(tar::EntryType::Symlink); + header.set_size(0); + header.set_cksum(); + builder + .append_data(&mut header, "symlink", std::io::empty()) + .unwrap(); + + let mut header = tar::Header::new_gnu(); + header.set_entry_type(tar::EntryType::Regular); + header.set_size(4); + header.set_cksum(); + builder + .append_data(&mut header, "dir/file.txt", "data".as_bytes()) + .unwrap(); + + let gz = builder.into_inner().unwrap(); + gz.finish().unwrap(); + } + + unpack_tarball_gz(std::io::Cursor::new(tar_buf), &dest).expect("unpack ok"); + + let extracted = dest.join("dir").join("file.txt"); + assert!(extracted.exists()); + assert_eq!(std::fs::read_to_string(extracted).unwrap(), "data"); + + assert!(!dest.join("symlink").exists()); + } + + #[test] + fn safe_join_tar_path_rejects_empty_and_dot_only_paths() { + let dest = Path::new("/safe/root"); + assert!(safe_join_tar_path(dest, Path::new(".")).is_err()); + assert!(safe_join_tar_path(dest, Path::new("././.")).is_err()); + assert!(safe_join_tar_path(dest, Path::new("")).is_err()); + + let ok = safe_join_tar_path(dest, Path::new("./dir/./file.txt")).expect("ok"); + assert_eq!(ok, dest.join("dir").join("file.txt")); + } + + #[test] + fn unpack_tarball_gz_rejects_path_traversal_entry() { + // The tar crate itself rejects `..` and absolute paths at archive-build time, + // so we can't construct those invalid entries via `tar::Builder`. + // Instead, we validate `safe_join_tar_path` directly for those cases. + let dest = Path::new("/safe/root"); + assert!(safe_join_tar_path(dest, Path::new("../evil.txt")).is_err()); + assert!(safe_join_tar_path(dest, Path::new("/evil.txt")).is_err()); + } +} + +async fn download_with_resume( + url: &str, + spool_path: &Path, + caps: DownloadCaps, + progress: Option, +) -> Result<(), ()> { + static HTTP_CLIENT: std::sync::LazyLock = + std::sync::LazyLock::new(reqwest::Client::new); + + let mut existing = match tokio::fs::metadata(spool_path).await { + Ok(meta) => meta.len(), + Err(_) => 0, + }; + + if let Some(pb) = &progress { + pb.set_position(existing); + pb.set_message("Downloading...".to_string()); + } + + tracing::debug!( + "downloading {url} into {} (resume from {existing})", + spool_path.display() + ); + + // If we have a partial spool, validate Range support before resuming. + let mut resp = if existing > 0 { + let r = HTTP_CLIENT + .get(url) + .header(reqwest::header::RANGE, format!("bytes={existing}-")) + .send() + .await + .map_err(|e| { + tracing::error!("failed to download runtime archive"); + tracing::error!("{e:?}"); + })?; + + match r.status() { + reqwest::StatusCode::PARTIAL_CONTENT => r, + // Some servers respond 416 when the local file is already complete. + reqwest::StatusCode::RANGE_NOT_SATISFIABLE => { + tracing::debug!("range not satisfiable; treating spool as complete"); + if let Some(pb) = &progress { + pb.set_position(existing); + } + return Ok(()); + } + // Server ignored range; start fresh. + reqwest::StatusCode::OK => { + tracing::debug!("server did not honor range; restarting download"); + existing = 0; + let _ = tokio::fs::remove_file(spool_path).await; + HTTP_CLIENT + .get(url) + .send() + .await + .and_then(|v| v.error_for_status()) + .map_err(|e| { + tracing::error!("failed to download runtime archive"); + tracing::error!("{e:?}"); + })? + } + + other => { + tracing::error!("unexpected HTTP status for range request: {other}"); + return Err(()); + } + } + } else { + HTTP_CLIENT + .get(url) + .send() + .await + .and_then(|v| v.error_for_status()) + .map_err(|e| { + tracing::error!("failed to download runtime archive"); + tracing::error!("{e:?}"); + })? + }; + + let mut downloaded = existing; + + loop { + let expected_total = match (downloaded, resp.content_length()) { + (0, Some(v)) => Some(v), + (n, Some(v)) => Some(n.saturating_add(v)), + _ => None, + }; + if matches!(expected_total, Some(v) if v > caps.max_download) { + tracing::error!("refusing to download {url}: size exceeds cap"); + return Err(()); + } + + if let (Some(pb), Some(total)) = (progress.as_ref(), expected_total) { + pb.set_length(total); + } + + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open(spool_path) + .await + .map_err(|e| { + tracing::error!("failed to open download file {}: {e}", spool_path.display()); + })?; + + match stream_response_body( + url, + &mut resp, + &mut file, + &mut downloaded, + caps, + progress.as_ref(), + ) + .await + { + Ok(()) => { + file.flush().await.ok(); + tracing::debug!("download finished: {} bytes", downloaded); + return Ok(()); + } + Err(()) => { + // Retry loop: request from current offset. + // If this fails `max_retries` times, we error out. + let mut attempt = 1usize; + loop { + if attempt > caps.max_retries { + tracing::error!("download failed after {} retries", caps.max_retries); + return Err(()); + } + + tokio::time::sleep(caps.retry_backoff * attempt as u32).await; + tracing::debug!("retrying download from byte {downloaded} (attempt {attempt})"); + + let r = HTTP_CLIENT + .get(url) + .header(reqwest::header::RANGE, format!("bytes={downloaded}-")) + .send() + .await + .and_then(|v| v.error_for_status()); + + match r { + Ok(v) if v.status() == reqwest::StatusCode::PARTIAL_CONTENT => { + resp = v; + break; + } + Ok(v) if v.status() == reqwest::StatusCode::OK => { + tracing::debug!( + "server ignored resume range; restarting download from 0" + ); + downloaded = 0; + let _ = tokio::fs::remove_file(spool_path).await; + resp = HTTP_CLIENT + .get(url) + .send() + .await + .and_then(|v| v.error_for_status()) + .map_err(|e| { + tracing::error!("failed to download runtime archive"); + tracing::error!("{e:?}"); + })?; + break; + } + Ok(v) => { + tracing::error!("server did not honor resume range: {}", v.status()); + return Err(()); + } + Err(e) => { + tracing::debug!("retry request failed: {e:?}"); + attempt += 1; + continue; + } + } + } + + // Continue outer loop with new response. + continue; + } + } + } +} + +async fn stream_response_body( + url: &str, + resp: &mut reqwest::Response, + file: &mut tokio::fs::File, + downloaded: &mut u64, + caps: DownloadCaps, + progress: Option<&indicatif::ProgressBar>, +) -> Result<(), ()> { + loop { + let chunk = match resp.chunk().await { + Ok(Some(chunk)) => chunk, + Ok(None) => break, + Err(e) => { + // Transient HTTP/2 resets happen in the wild (e.g. CDN/proxy). + // Treat as retryable so the caller can resume via Range. + tracing::debug!("failed to read download chunk: {e:?}"); + return Err(()); + } + }; + + *downloaded = downloaded.saturating_add(chunk.len() as u64); + if *downloaded > caps.max_download { + tracing::error!("refusing to download {url}: exceeded size cap"); + return Err(()); + } + + file.write_all(&chunk).await.map_err(|e| { + tracing::error!("failed writing download chunk: {e}"); + })?; + + if let Some(pb) = progress { + pb.set_position(*downloaded); + } + } + + Ok(()) +} + +fn safe_join_tar_path(dest: &Path, path: &Path) -> Result { + use std::path::Component; + + let mut out = dest.to_path_buf(); + let mut pushed_any = false; + + for component in path.components() { + match component { + Component::Normal(part) => { + out.push(part); + pushed_any = true; + } + Component::CurDir => continue, + _ => return Err(()), + } + } + + if !pushed_any { + return Err(()); + } + + Ok(out) +} + +fn unpack_tarball_gz(reader: impl std::io::Read, dest: &Path) -> Result<(), ()> { + // basic DoS protection + const MAX_ENTRY_UNCOMPRESSED: u64 = DOWNLOAD_CAP_BYTES; + const MAX_TOTAL_UNCOMPRESSED: u64 = DOWNLOAD_CAP_BYTES; + + let decoder = GzDecoder::new(reader); + let mut archive = Archive::new(decoder); + + let mut total_uncompressed = 0u64; + for entry in archive.entries().map_err(|_| ())? { + let mut entry = entry.map_err(|_| ())?; + + let entry_type = entry.header().entry_type(); + match entry_type { + EntryType::Regular | EntryType::Directory => {} + // Be conservative: skip symlinks/hardlinks/devices. + _ => { + continue; + } + } + + let path = entry.path().map_err(|_| ())?; + let out_path = safe_join_tar_path(dest, &path).map_err(|_| ())?; + + #[cfg(unix)] + let mode = entry.header().mode().ok(); + + if entry_type == EntryType::Directory { + std::fs::create_dir_all(&out_path).map_err(|_| ())?; + #[cfg(unix)] + if let Some(mode) = mode { + use std::os::unix::fs::PermissionsExt; + let _ = std::fs::set_permissions(&out_path, std::fs::Permissions::from_mode(mode)); + } + continue; + } + + if let Some(parent) = out_path.parent() { + std::fs::create_dir_all(parent).map_err(|_| ())?; + } + + let mut out = std::fs::File::create(&out_path).map_err(|_| ())?; + let mut limited = (&mut entry).take(MAX_ENTRY_UNCOMPRESSED.saturating_add(1)); + let written = std::io::copy(&mut limited, &mut out).map_err(|_| ())?; + + if written > MAX_ENTRY_UNCOMPRESSED { + return Err(()); + } + + #[cfg(unix)] + if let Some(mode) = mode { + use std::os::unix::fs::PermissionsExt; + let _ = std::fs::set_permissions(&out_path, std::fs::Permissions::from_mode(mode)); + } + + total_uncompressed = total_uncompressed.saturating_add(written); + if total_uncompressed > MAX_TOTAL_UNCOMPRESSED { + return Err(()); + } + } + + Ok(()) +} + +async fn download_tarball_and_extract( + url: &str, + dest: &Path, + spool_dir: &Path, + progress: Option, +) -> Result<(), ()> { + create_dir_all(spool_dir).await.map_err(|e| { + tracing::error!("failed to create spool dir {}: {e}", spool_dir.display()); + })?; + + if let Some(pb) = &progress { + pb.set_message("Downloading...".to_string()); + } + + let archive_path = spool_dir.join(format!("{}.tar.gz", hash_url_for_filename(url))); + + download_with_resume(url, &archive_path, DownloadCaps::DEFAULT, progress.clone()).await?; + + if let Some(pb) = &progress { + pb.set_message("Extracting...".to_string()); + } + + let dest = dest.to_path_buf(); + tokio::task::spawn_blocking(move || { + let file = std::fs::File::open(&archive_path).map_err(|_| ())?; + unpack_tarball_gz(file, &dest) + }) + .await + .map_err(|e| { + tracing::error!("failed to join unpack task: {e}"); + })? + .map_err(|_| { + tracing::error!("failed to unpack tarball"); + })?; + + if let Some(pb) = progress { + pb.finish_with_message("Installed"); + } + + Ok(()) +} + +#[cfg(target_os = "windows")] +fn safe_join_zip_path(dest: &Path, filename: &str) -> Result { + use std::path::Component; + + let path = Path::new(filename); + let mut out = dest.to_path_buf(); + let mut pushed_any = false; + + for component in path.components() { + match component { + Component::Normal(part) => { + out.push(part); + pushed_any = true; + } + Component::CurDir => continue, + _ => return Err(()), + } + } + + if !pushed_any { + return Err(()); + } + + Ok(out) +} + +#[cfg(target_os = "windows")] +async fn download_zip_and_extract( + url: &str, + dest: &Path, + spool_dir: &Path, + progress: Option, +) -> Result<(), ()> { + create_dir_all(spool_dir).await.map_err(|e| { + tracing::error!("failed to create spool dir {}: {e}", spool_dir.display()); + })?; + + if let Some(pb) = &progress { + pb.set_message("Downloading...".to_string()); + } + + let archive_path = spool_dir.join(format!("{}.zip", hash_url_for_filename(url))); + + download_with_resume(url, &archive_path, DownloadCaps::DEFAULT, progress.clone()).await?; + + if let Some(pb) = &progress { + pb.set_message("Extracting...".to_string()); + } + + let archive_path = archive_path.to_path_buf(); + let dest = dest.to_path_buf(); + tokio::task::spawn_blocking(move || { + use std::io::{Read as _, Write as _}; + + // basic DoS protection + const MAX_ENTRY_UNCOMPRESSED: u64 = DOWNLOAD_CAP_BYTES; + const MAX_TOTAL_UNCOMPRESSED: u64 = DOWNLOAD_CAP_BYTES; + + let file = std::fs::File::open(&archive_path).map_err(|e| { + tracing::error!("failed to open zip {}: {e}", archive_path.display()); + })?; + let reader = std::io::BufReader::new(file); + + let mut zip = zip::ZipArchive::new(reader).map_err(|e| { + tracing::error!("failed to read zip archive: {e}"); + })?; + + let mut total_uncompressed = 0u64; + + for i in 0..zip.len() { + let mut entry = zip.by_index(i).map_err(|e| { + tracing::error!("failed reading zip entry: {e}"); + })?; + + let name = entry.name().to_string(); + let out_path = safe_join_zip_path(&dest, &name)?; + + if name.ends_with('/') { + std::fs::create_dir_all(&out_path).map_err(|e| { + tracing::error!("failed creating dir {}: {e}", out_path.display()); + })?; + continue; + } + + if let Some(parent) = out_path.parent() { + std::fs::create_dir_all(parent).map_err(|e| { + tracing::error!("failed creating parent dir {}: {e}", parent.display()); + })?; + } + + // Guard against maliciously large entries. + let mut written_for_entry = 0u64; + let mut out = std::fs::File::create(&out_path).map_err(|e| { + tracing::error!("failed creating file {}: {e}", out_path.display()); + })?; + + let mut buf = [0u8; 32 * 1024]; + loop { + let n = entry.read(&mut buf).map_err(|e| { + tracing::error!("failed reading zip data: {e}"); + })?; + if n == 0 { + break; + } + + written_for_entry = written_for_entry.saturating_add(n as u64); + if written_for_entry > MAX_ENTRY_UNCOMPRESSED { + tracing::error!("zip entry exceeds size cap"); + return Err(()); + } + + total_uncompressed = total_uncompressed.saturating_add(n as u64); + if total_uncompressed > MAX_TOTAL_UNCOMPRESSED { + tracing::error!("zip total exceeds size cap"); + return Err(()); + } + + out.write_all(&buf[..n]).map_err(|e| { + tracing::error!("failed writing zip data: {e}"); + })?; + } + } + + Ok::<(), ()>(()) + }) + .await + .map_err(|e| { + tracing::error!("failed to join unpack task: {e}"); + })? + .map_err(|_| { + tracing::error!("failed to unpack zip"); + })?; + + if let Some(pb) = progress { + pb.finish_with_message("Installed"); + } + + Ok(()) +} + +struct ExtractedComponent { + _tempdir: tempfile::TempDir, + extracted_root: PathBuf, +} + +fn extract_base_dir_for_spool(spool_dir: &Path) -> PathBuf { + spool_dir.join("extract") +} + +async fn fetch_component( + component: &str, + base_url: &str, + spool_dir: &Path, + progress: Option, +) -> Result { + // Avoid using OS temp directories (often tmpfs) because toolchain components + // are large and can quickly exhaust memory-backed storage. + let temp_path = extract_base_dir_for_spool(spool_dir); + if create_dir_all(&temp_path).await.is_err() { + tracing::error!("failed to create extraction directory"); + return Err(()); + } + + let tempdir = tempfile::Builder::new() + .prefix("rustowl-extract-") + .tempdir_in(&temp_path) + .map_err(|_| ())?; + let temp_path = tempdir.path().to_owned(); + tracing::debug!("temp dir is made: {}", temp_path.display()); + + let component_toolchain = format!("{component}-{TOOLCHAIN_CHANNEL}-{HOST_TUPLE}"); + let tarball_url = format!("{base_url}/{component_toolchain}.tar.gz"); + + download_tarball_and_extract(&tarball_url, &temp_path, spool_dir, progress).await?; + + Ok(ExtractedComponent { + _tempdir: tempdir, + extracted_root: temp_path.join(component_toolchain), + }) +} + +async fn install_extracted_component(extracted: ExtractedComponent, dest: &Path) -> Result<(), ()> { + let components = read_to_string(extracted.extracted_root.join("components")) + .await + .map_err(|_| { + tracing::error!("failed to read components list"); + })?; + let components = components.split_whitespace(); + + for component_name in components { + let component_path = extracted.extracted_root.join(component_name); + for from in recursive_read_dir(&component_path) { + let rel_path = match from.strip_prefix(&component_path) { + Ok(v) => v, + Err(e) => { + tracing::error!("path error: {e}"); + return Err(()); + } + }; + let to = dest.join(rel_path); + if let Err(e) = create_dir_all(to.parent().unwrap()).await { + tracing::error!("failed to create dir: {e}"); + return Err(()); + } + if let Err(e) = rename(&from, &to).await { + // This is expected when temp directories are on a different device (EXDEV). + tracing::debug!("file rename failed: {e}, falling back to copy and delete"); + if let Err(copy_err) = tokio::fs::copy(&from, &to).await { + tracing::error!("file copy error (after rename failure): {copy_err}"); + return Err(()); + } + if let Err(del_err) = tokio::fs::remove_file(&from).await { + tracing::error!("file delete error (after copy): {del_err}"); + return Err(()); + } + } + } + tracing::debug!("component {component_name} successfully installed"); + } + Ok(()) +} + +pub async fn setup_toolchain(dest: impl AsRef, skip_rustowl: bool) -> Result<(), ()> { + if skip_rustowl { + setup_rust_toolchain(&dest).await + } else { + tokio::try_join!(setup_rust_toolchain(&dest), setup_rustowl_toolchain(&dest)).map(|_| ()) + } +} + +pub async fn setup_rust_toolchain(dest: impl AsRef) -> Result<(), ()> { + use indicatif::{MultiProgress, ProgressBar, ProgressDrawTarget, ProgressStyle}; + use std::io::IsTerminal; + + let sysroot = sysroot_from_runtime(dest.as_ref()); + if create_dir_all(&sysroot).await.is_err() { + tracing::error!("failed to create toolchain directory"); + return Err(()); + } + + let dist_base = "https://static.rust-lang.org/dist"; + let base_url = match TOOLCHAIN_DATE { + Some(v) => format!("{dist_base}/{v}"), + None => dist_base.to_owned(), + }; + + tracing::debug!("start installing Rust toolchain..."); + + const COMPONENTS: [&str; 3] = ["rustc", "rust-std", "cargo"]; + + let spool_dir = spool_dir_for_runtime(dest.as_ref()); + + let mp = if std::io::stderr().is_terminal() { + Some(MultiProgress::with_draw_target(ProgressDrawTarget::stderr())) + } else { + None + }; + + // Ensure `tracing` output is routed through a progress bar so it doesn't + // corrupt the multi-progress rendering. + let _log_guard = mp.as_ref().map(|mp| { + let pb = mp.add(ProgressBar::hidden()); + crate::ActiveProgressBarGuard::set(pb) + }); + + let mut fetched = HashMap::<&'static str, ExtractedComponent>::new(); + let mut set = tokio::task::JoinSet::new(); + + for component in COMPONENTS { + let base_url = base_url.clone(); + let spool_dir = spool_dir.clone(); + + let pb: Option = mp.as_ref().map(|mp| { + let pb = mp.add(ProgressBar::new(0)); + pb.set_style( + ProgressStyle::with_template( + "{spinner:.green} {prefix:8} {msg:40} [{bar:40.cyan/blue}] {percent:>3}% ({bytes_per_sec:>10}, {eta:>6})", + ) + .unwrap(), + ); + pb.set_prefix(component.to_string()); + pb.set_message("Starting...".to_string()); + pb + }); + + set.spawn(async move { + let res = fetch_component(component, &base_url, &spool_dir, pb.clone()).await; + if let Some(pb) = pb { + match &res { + Ok(_) => pb.finish_with_message("Installed"), + Err(_) => pb.finish_with_message("Failed"), + } + } + (component, res) + }); + } + + while let Some(joined) = set.join_next().await { + match joined { + Ok((component, Ok(extracted))) => { + fetched.insert(component, extracted); + } + Ok((_component, Err(()))) => { + if let Some(mp) = &mp { + let _ = mp.clear(); + } + return Err(()); + } + Err(e) => { + tracing::error!("failed to join toolchain fetch task: {e}"); + if let Some(mp) = &mp { + let _ = mp.clear(); + } + return Err(()); + } + } + } + + let rustc = fetched.remove("rustc").ok_or(())?; + let rust_std = fetched.remove("rust-std").ok_or(())?; + let cargo = fetched.remove("cargo").ok_or(())?; + + install_extracted_component(rustc, &sysroot).await?; + install_extracted_component(rust_std, &sysroot).await?; + install_extracted_component(cargo, &sysroot).await?; + + if let Some(mp) = mp { + let _ = mp.clear(); + } + + tracing::debug!("installing Rust toolchain finished"); + Ok(()) +} + +pub async fn setup_rustowl_toolchain(dest: impl AsRef) -> Result<(), ()> { + tracing::debug!("start installing RustOwl toolchain..."); + + let spool_dir = spool_dir_for_runtime(dest.as_ref()); + + #[cfg(not(target_os = "windows"))] + let rustowl_toolchain_result = { + let rustowl_tarball_url = format!( + "https://github.com/cordx56/rustowl/releases/download/v{}/rustowl-{HOST_TUPLE}.tar.gz", + clap::crate_version!(), + ); + download_tarball_and_extract(&rustowl_tarball_url, dest.as_ref(), &spool_dir, None).await + }; + + #[cfg(target_os = "windows")] + let rustowl_toolchain_result = { + let rustowl_zip_url = format!( + "https://github.com/cordx56/rustowl/releases/download/v{}/rustowl-{HOST_TUPLE}.zip", + clap::crate_version!(), + ); + download_zip_and_extract(&rustowl_zip_url, dest.as_ref(), &spool_dir, None).await + }; + + if rustowl_toolchain_result.is_ok() { + tracing::debug!("installing RustOwl toolchain finished"); + } else { + tracing::warn!( + "could not install RustOwl toolchain; local installed rustowlc will be used" + ); + } + + tracing::debug!("toolchain setup finished"); + Ok(()) +} + +pub async fn uninstall_toolchain() { + let sysroot = sysroot_from_runtime(&*FALLBACK_RUNTIME_DIR); + if sysroot.is_dir() { + tracing::debug!("remove sysroot: {}", sysroot.display()); + remove_dir_all(&sysroot).await.unwrap(); + } +} + +pub async fn get_executable_path(name: &str) -> String { + #[cfg(not(windows))] + let exec_name = name.to_owned(); + #[cfg(windows)] + let exec_name = format!("{name}.exe"); + + // Allow overriding specific tool paths for dev/bench setups. + // Example: `RUSTOWL_RUSTOWLC_PATH=/path/to/rustowlc`. + let override_key = format!("RUSTOWL_{}_PATH", name.to_ascii_uppercase()); + if let Ok(path) = env::var(&override_key) { + let path = PathBuf::from(path); + if path.is_file() { + tracing::debug!("{name} is selected via {override_key}"); + return path.to_string_lossy().to_string(); + } + } + + let sysroot = get_sysroot().await; + let exec_bin = sysroot.join("bin").join(&exec_name); + if exec_bin.is_file() { + tracing::debug!("{name} is selected in sysroot/bin"); + return exec_bin.to_string_lossy().to_string(); + } + + let mut current_exec = env::current_exe().unwrap(); + current_exec.set_file_name(&exec_name); + if current_exec.is_file() { + tracing::debug!("{name} is selected in the same directory as rustowl executable"); + return current_exec.to_string_lossy().to_string(); + } + + // When running benches/tests inside a cargo workspace, the binary might live under the + // workspace root `target/{debug,release}` while the current executable is in + // `target/{debug,release}/deps`. + let mut candidate_roots = Vec::new(); + if let Ok(cwd) = env::current_dir() { + candidate_roots.push(cwd); + } + if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { + let dir = PathBuf::from(dir); + candidate_roots.push(dir.clone()); + // Prefer the workspace root when the crate lives under `crates/`. + if let Some(root) = dir.ancestors().nth(2) { + candidate_roots.push(root.to_path_buf()); + } + } + + // Respect cargo's configured target dir (used by cargo-llvm-cov). + // Note: `CARGO_TARGET_DIR` already points to the *target directory* (not the workspace root). + let cargo_target_dir = env::var("CARGO_TARGET_DIR").ok().map(PathBuf::from); + + for root in candidate_roots { + let candidate = root.join("target").join("debug").join(&exec_name); + if candidate.is_file() { + tracing::debug!("{name} is selected in {}", candidate.display()); + return candidate.to_string_lossy().to_string(); + } + + let candidate = root.join("target").join("release").join(&exec_name); + if candidate.is_file() { + tracing::debug!("{name} is selected in {}", candidate.display()); + return candidate.to_string_lossy().to_string(); + } + } + + if let Some(dir) = cargo_target_dir { + let candidate = dir.join("debug").join(&exec_name); + if candidate.is_file() { + tracing::debug!("{name} is selected in {}", candidate.display()); + return candidate.to_string_lossy().to_string(); + } + + let candidate = dir.join("release").join(&exec_name); + if candidate.is_file() { + tracing::debug!("{name} is selected in {}", candidate.display()); + return candidate.to_string_lossy().to_string(); + } + } + + tracing::warn!("{name} not found; fallback"); + exec_name.to_owned() +} + +pub async fn setup_cargo_command(rustc_threads: usize) -> tokio::process::Command { + let cargo = get_executable_path("cargo").await; + let mut command = tokio::process::Command::new(&cargo); + let rustowlc = get_executable_path("rustowlc").await; + + // check user set flags + let delimiter = 0x1f as char; + let rustflags = env::var("RUSTFLAGS") + .unwrap_or("".to_string()) + .split_whitespace() + .fold("".to_string(), |acc, x| format!("{acc}{delimiter}{x}")); + let mut encoded_flags = env::var("CARGO_ENCODED_RUSTFLAGS") + .map(|v| format!("{v}{delimiter}")) + .unwrap_or("".to_string()); + if 1 < rustc_threads { + encoded_flags = format!("-Z{delimiter}threads={rustc_threads}{delimiter}{encoded_flags}"); + } + + let sysroot = get_sysroot().await; + command + .env("RUSTC", &rustowlc) + .env("RUSTC_WORKSPACE_WRAPPER", &rustowlc) + .env( + "CARGO_ENCODED_RUSTFLAGS", + format!( + "{}--sysroot={}{}", + encoded_flags, + sysroot.display(), + rustflags + ), + ); + set_rustc_env(&mut command, &sysroot); + command +} + +/// Configure environment variables on a Command so Rust invocations use the given sysroot. +/// +/// Sets: +/// - `RUSTC_BOOTSTRAP = "1"` to allow nightly-only features when invoking rustc. +/// - `CARGO_ENCODED_RUSTFLAGS = "--sysroot={sysroot}"` so cargo/rustc use the provided sysroot. +/// - On Linux: prepends `{sysroot}/lib` to `LD_LIBRARY_PATH`. +/// - On macOS: prepends `{sysroot}/lib` to `DYLD_FALLBACK_LIBRARY_PATH`. +/// - On Windows: prepends `{sysroot}/bin` to `Path`. +/// +/// The provided `command` is mutated in place. +/// +/// # Examples +/// +/// ``` +/// use std::path::Path; +/// use tokio::process::Command; +/// use rustowl::toolchain; +/// +/// let sysroot = Path::new("/opt/rust/sysroot"); +/// let mut cmd = Command::new("cargo"); +/// toolchain::set_rustc_env(&mut cmd, sysroot); +/// // cmd is now configured to invoke cargo/rustc with the given sysroot. +/// ``` +pub fn set_rustc_env(command: &mut tokio::process::Command, sysroot: &Path) { + command.env("RUSTC_BOOTSTRAP", "1"); // Support nightly projects + + #[cfg(target_os = "linux")] + { + let mut paths = env::split_paths(&env::var("LD_LIBRARY_PATH").unwrap_or("".to_owned())) + .collect::>(); + paths.push_front(sysroot.join("lib")); + let paths = env::join_paths(paths).unwrap(); + command.env("LD_LIBRARY_PATH", paths); + } + #[cfg(target_os = "macos")] + { + let mut paths = + env::split_paths(&env::var("DYLD_FALLBACK_LIBRARY_PATH").unwrap_or("".to_owned())) + .collect::>(); + paths.push_front(sysroot.join("lib")); + let paths = env::join_paths(paths).unwrap(); + command.env("DYLD_FALLBACK_LIBRARY_PATH", paths); + } + #[cfg(target_os = "windows")] + { + let mut paths = env::split_paths(&env::var_os("Path").unwrap()) + .collect::>(); + paths.push_front(sysroot.join("bin")); + let paths = env::join_paths(paths).unwrap(); + command.env("Path", paths); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::collections::BTreeMap; + use std::path::PathBuf; + + #[test] + fn test_sysroot_from_runtime() { + let runtime = PathBuf::from("/opt/test-runtime"); + let sysroot = sysroot_from_runtime(&runtime); + + let expected = runtime.join("sysroot").join(TOOLCHAIN); + assert_eq!(sysroot, expected); + } + + #[test] + fn set_rustc_env_sets_bootstrap_and_sysroot_flags() { + let sysroot = PathBuf::from("/opt/rust/sysroot"); + let mut cmd = tokio::process::Command::new("cargo"); + set_rustc_env(&mut cmd, &sysroot); + + let envs: BTreeMap = cmd + .as_std() + .get_envs() + .filter_map(|(key, value)| { + Some(( + key.to_string_lossy().to_string(), + value?.to_string_lossy().to_string(), + )) + }) + .collect(); + + assert_eq!(envs.get("RUSTC_BOOTSTRAP").map(String::as_str), Some("1")); + + #[cfg(target_os = "linux")] + { + let lib = sysroot.join("lib").to_string_lossy().to_string(); + assert!( + envs.get("LD_LIBRARY_PATH") + .is_some_and(|v| v.contains(lib.as_str())) + ); + } + #[cfg(target_os = "macos")] + { + let lib = sysroot.join("lib").to_string_lossy().to_string(); + assert!( + envs.get("DYLD_FALLBACK_LIBRARY_PATH") + .is_some_and(|v| v.contains(lib.as_str())) + ); + } + #[cfg(target_os = "windows")] + { + let bin = sysroot.join("bin").to_string_lossy().to_string(); + assert!(envs.get("Path").is_some_and(|v| v.contains(bin.as_str()))); + } + } + + use crate::async_test; + + async_test!(setup_cargo_command_encodes_threads_and_sysroot, async { + let sysroot = get_sysroot().await; + let cmd = setup_cargo_command(4).await; + + let envs: BTreeMap = cmd + .as_std() + .get_envs() + .filter_map(|(key, value)| { + Some(( + key.to_string_lossy().to_string(), + value?.to_string_lossy().to_string(), + )) + }) + .collect(); + + assert_eq!( + envs.get("RUSTC_WORKSPACE_WRAPPER").map(String::as_str), + envs.get("RUSTC").map(String::as_str) + ); + + let encoded = envs + .get("CARGO_ENCODED_RUSTFLAGS") + .expect("CARGO_ENCODED_RUSTFLAGS set by setup_cargo_command"); + assert!(encoded.contains("-Z\u{1f}threads=4\u{1f}")); + assert!(encoded.contains(&format!("--sysroot={}", sysroot.display()))); + + assert_eq!(envs.get("RUSTC_BOOTSTRAP").map(String::as_str), Some("1")); + }); + + #[test] + fn setup_cargo_command_preserves_user_rustflags_in_encoded_string() { + let delimiter = 0x1f as char; + + let user_rustflags = "-C debuginfo=2"; + let rustflags = user_rustflags + .split_whitespace() + .fold(String::new(), |acc, x| format!("{acc}{delimiter}{x}")); + + let user_encoded = "--cfg".to_owned() + &delimiter.to_string() + "from_user"; + let mut encoded_flags = format!("{user_encoded}{delimiter}"); + + let rustc_threads = 4; + if 1 < rustc_threads { + encoded_flags = + format!("-Z{delimiter}threads={rustc_threads}{delimiter}{encoded_flags}"); + } + + let sysroot = PathBuf::from("/opt/rust/sysroot"); + let mut cmd = tokio::process::Command::new("cargo"); + cmd.env( + "CARGO_ENCODED_RUSTFLAGS", + format!( + "{}--sysroot={}{}", + encoded_flags, + sysroot.display(), + rustflags + ), + ); + + let envs: BTreeMap = cmd + .as_std() + .get_envs() + .filter_map(|(key, value)| { + Some(( + key.to_string_lossy().to_string(), + value?.to_string_lossy().to_string(), + )) + }) + .collect(); + + let encoded = envs.get("CARGO_ENCODED_RUSTFLAGS").unwrap(); + assert!(encoded.contains("--cfg\u{1f}from_user\u{1f}")); + assert!(encoded.contains("\u{1f}-C\u{1f}debuginfo=2")); + } +} diff --git a/crates/rustowl/src/utils.rs b/crates/rustowl/src/utils.rs new file mode 100644 index 00000000..e6d75ba4 --- /dev/null +++ b/crates/rustowl/src/utils.rs @@ -0,0 +1,787 @@ +//! Utility functions for range manipulation and MIR analysis. +//! +//! This module provides core algorithms for working with source code ranges, +//! merging overlapping ranges, and providing visitor patterns for MIR traversal. + +use crate::models::{ + Function, Loc, MirDecl, MirStatement, MirTerminator, Range, RangeVec, range_vec_into_vec, +}; + +/// Determines if one range completely contains another range. +/// +/// A range `r1` is a super range of `r2` if `r1` completely encompasses `r2`. +/// This means `r1` starts before or at the same position as `r2` and ends +/// after or at the same position as `r2`, with at least one strict inequality. +pub fn is_super_range(r1: Range, r2: Range) -> bool { + (r1.from() < r2.from() && r2.until() <= r1.until()) + || (r1.from() <= r2.from() && r2.until() < r1.until()) +} + +/// Finds the overlapping portion of two ranges. +/// +/// Returns the intersection of two ranges if they overlap, or `None` if +/// they don't intersect. +pub fn common_range(r1: Range, r2: Range) -> Option { + if r2.from() < r1.from() { + return common_range(r2, r1); + } + if r1.until() < r2.from() { + return None; + } + let from = r2.from(); + let until = r1.until().min(r2.until()); + Range::new(from, until) +} + +/// Finds all pairwise intersections among a collection of ranges. +/// +/// Returns a vector of ranges representing all overlapping regions +/// between pairs of input ranges, with overlapping regions merged. +pub fn common_ranges(ranges: &[Range]) -> Vec { + let mut common_ranges = Vec::new(); + for i in 0..ranges.len() { + for j in i + 1..ranges.len() { + if let Some(common) = common_range(ranges[i], ranges[j]) { + common_ranges.push(common); + } + } + } + eliminated_ranges(common_ranges) +} + +/// Merges two ranges into their superset if they overlap or are adjacent. +/// +/// Returns a single range that encompasses both input ranges if they +/// overlap or are directly adjacent. Returns `None` if they are disjoint. +pub fn merge_ranges(r1: Range, r2: Range) -> Option { + if common_range(r1, r2).is_some() || r1.until() == r2.from() || r2.until() == r1.from() { + let from = r1.from().min(r2.from()); + let until = r1.until().max(r2.until()); + Range::new(from, until) + } else { + None + } +} + +/// Eliminates overlapping and adjacent ranges by merging them. +pub fn eliminated_ranges(mut ranges: Vec) -> Vec { + if ranges.len() <= 1 { + return ranges; + } + // Sort by start, then end + ranges.sort_by_key(|r| (r.from().0, r.until().0)); + let mut merged: Vec = Vec::with_capacity(ranges.len()); + let mut current = ranges[0]; + for r in ranges.into_iter().skip(1) { + if r.from().0 <= current.until().0 || r.from().0 == current.until().0 { + // Overlapping or adjacent + if r.until().0 > current.until().0 { + current = Range::new(current.from(), r.until()).unwrap(); + } + } else { + merged.push(current); + current = r; + } + } + merged.push(current); + merged +} + +/// Version of [`eliminated_ranges`] that works with `RangeVec`. +pub fn eliminated_ranges_small(ranges: RangeVec) -> Vec { + eliminated_ranges(range_vec_into_vec(ranges)) +} + +/// Subtracts exclude ranges from a set of ranges. +/// +/// For each range in `from`, removes any portions that overlap with +/// ranges in `excludes`. If a range is partially excluded, it may be +/// split into multiple smaller ranges. +pub fn exclude_ranges(from: Vec, excludes: Vec) -> Vec { + let mut from = from; + let mut i = 0; + 'outer: while i < from.len() { + let mut j = 0; + while j < excludes.len() { + if let Some(common) = common_range(from[i], excludes[j]) { + if let Some(r) = Range::new(from[i].from(), common.from() - 1) { + from.push(r); + } + if let Some(r) = Range::new(common.until() + 1, from[i].until()) { + from.push(r); + } + from.remove(i); + continue 'outer; + } + j += 1; + } + i += 1; + } + eliminated_ranges(from) +} + +/// Version of [`exclude_ranges`] that works with `RangeVec`. +pub fn exclude_ranges_small(from: RangeVec, excludes: Vec) -> Vec { + exclude_ranges(range_vec_into_vec(from), excludes) +} + +/// Visitor trait for traversing MIR (Mid-level IR) structures. +/// +/// Provides a flexible pattern for implementing analysis passes over +/// MIR functions by visiting different components in a structured way. +pub trait MirVisitor { + /// Called when visiting a function. + fn visit_func(&mut self, _func: &Function) {} + /// Called when visiting a variable declaration. + fn visit_decl(&mut self, _decl: &MirDecl) {} + /// Called when visiting a statement. + fn visit_stmt(&mut self, _stmt: &MirStatement) {} + /// Called when visiting a terminator. + fn visit_term(&mut self, _term: &MirTerminator) {} +} + +/// Traverses a MIR function using the visitor pattern. +/// +/// Calls the appropriate visitor methods for each component of the function +/// in a structured order: function, declarations, statements, terminators. +pub fn mir_visit(func: &Function, visitor: &mut impl MirVisitor) { + visitor.visit_func(func); + for decl in &func.decls { + visitor.visit_decl(decl); + } + for bb in &func.basic_blocks { + for stmt in &bb.statements { + visitor.visit_stmt(stmt); + } + if let Some(term) = &bb.terminator { + visitor.visit_term(term); + } + } +} + +/// Precomputed mapping from *normalized* byte offsets to `Loc`. +/// +/// `rustc` byte positions behave as if `\r` bytes do not exist in the source. +/// `Loc` is a *logical character index* where `\r` is ignored too. +#[derive(Debug, Clone)] +pub struct NormalizedByteCharIndex { + kind: NormalizedByteCharIndexKind, +} + +#[derive(Debug, Clone)] +enum NormalizedByteCharIndexKind { + /// ASCII without CR: logical char index == byte index. + AsciiNoCr { len_bytes: u32 }, + /// General case: `ends[i]` is the normalized byte offset at the end of char i. + General { ends: Vec, len_bytes: u32 }, +} + +impl NormalizedByteCharIndex { + pub fn new(source: &str) -> Self { + if source.is_ascii() && !source.as_bytes().contains(&b'\r') { + return Self { + kind: NormalizedByteCharIndexKind::AsciiNoCr { + len_bytes: source.len().min(u32::MAX as usize) as u32, + }, + }; + } + + let mut ends = Vec::with_capacity(source.len().min(1024)); + let mut normalized = 0u32; + + for ch in source.chars() { + if ch == '\r' { + continue; + } + normalized = normalized.saturating_add(ch.len_utf8().min(u32::MAX as usize) as u32); + ends.push(normalized); + } + + Self { + kind: NormalizedByteCharIndexKind::General { + ends, + len_bytes: normalized, + }, + } + } + + /// Convert a normalized byte offset (CR bytes excluded) to a logical `Loc`. + pub fn loc_from_normalized_byte_pos(&self, byte_pos: u32) -> crate::models::Loc { + match &self.kind { + NormalizedByteCharIndexKind::AsciiNoCr { len_bytes } => { + crate::models::Loc(byte_pos.min(*len_bytes)) + } + NormalizedByteCharIndexKind::General { ends, len_bytes } => { + let clamped = byte_pos.min(*len_bytes); + let n = ends.partition_point(|&end| end <= clamped); + crate::models::Loc(n.min(u32::MAX as usize) as u32) + } + } + } + + /// Equivalent to `Loc::new(source, byte_pos, offset)`, but uses this index. + pub fn loc_from_byte_pos(&self, byte_pos: u32, offset: u32) -> crate::models::Loc { + self.loc_from_normalized_byte_pos(byte_pos.saturating_sub(offset)) + } + + pub fn normalized_len_bytes(&self) -> u32 { + match &self.kind { + NormalizedByteCharIndexKind::AsciiNoCr { len_bytes } => *len_bytes, + NormalizedByteCharIndexKind::General { len_bytes, .. } => *len_bytes, + } + } + + pub fn eof(&self) -> crate::models::Loc { + match &self.kind { + NormalizedByteCharIndexKind::AsciiNoCr { len_bytes } => crate::models::Loc(*len_bytes), + NormalizedByteCharIndexKind::General { ends, .. } => { + crate::models::Loc(ends.len().min(u32::MAX as usize) as u32) + } + } + } +} + +/// Precomputed line/column mapping for a source string. +/// +/// `Loc` is a *logical character index* where `\r` is ignored. Building this +/// index once and reusing it avoids repeatedly scanning the whole file when +/// converting many ranges (e.g. LSP decorations). +#[derive(Debug, Clone)] +pub struct LineCharIndex { + // For each line i, the logical char-index at the start of that line. + // Always non-empty (line 0 starts at index 0). + line_starts: Vec, + eof: u32, +} + +impl LineCharIndex { + pub fn new(source: &str) -> Self { + // ASCII without CR means logical char-index == byte index. + // We still store logical char-indexes, which match bytes in this case. + if source.is_ascii() && !source.as_bytes().contains(&b'\r') { + let mut line_starts = Vec::with_capacity(128); + line_starts.push(0); + for (i, b) in source.as_bytes().iter().enumerate() { + if *b == b'\n' { + // newline is a logical character (included), next line starts after it + let next = (i + 1) as u32; + line_starts.push(next); + } + } + return Self { + line_starts, + eof: source.len() as u32, + }; + } + + // Fallback: scan chars once, skipping CR. + let mut line_starts = Vec::with_capacity(128); + line_starts.push(0); + + let mut logical_idx = 0u32; + for ch in source.chars() { + if ch == '\r' { + continue; + } + logical_idx = logical_idx.saturating_add(1); + // newline is a logical character; next line starts after it + if ch == '\n' { + line_starts.push(logical_idx); + } + } + + Self { + line_starts, + eof: logical_idx, + } + } + + pub fn index_to_line_char(&self, idx: Loc) -> (u32, u32) { + let target = idx.0; + // Find the last line start <= target. + let line = match self.line_starts.binary_search(&target) { + Ok(i) => i, + Err(0) => 0, + Err(i) => i - 1, + }; + + let line_start = self.line_starts[line]; + let col = target.saturating_sub(line_start); + (line as u32, col) + } + + pub fn line_char_to_index(&self, line: u32, character: u32) -> u32 { + let Some(&line_start) = self.line_starts.get(line as usize) else { + // Best-effort: out-of-range line maps to EOF. + return self.eof; + }; + + let target = line_start.saturating_add(character); + + // Best effort + let next_line_start = self + .line_starts + .get(line as usize + 1) + .copied() + .unwrap_or(self.eof); + if target >= next_line_start { + return self.eof; + } + + target + } + + pub fn eof(&self) -> u32 { + self.eof + } +} + +/// Returns the byte offsets at the start of each line. +/// +/// The returned vector always starts with `0` for line 0. +pub fn line_start_bytes(source: &str) -> Vec { + use memchr::memchr_iter; + + let mut starts = Vec::with_capacity(128); + starts.push(0); + for nl in memchr_iter(b'\n', source.as_bytes()) { + let next = (nl + 1).min(u32::MAX as usize) as u32; + starts.push(next); + } + starts +} + +fn utf16_col_to_byte_offset(line: &str, character: u32) -> usize { + if character == 0 { + return 0; + } + + let mut units = 0u32; + for (byte_idx, ch) in line.char_indices() { + if units >= character { + return byte_idx; + } + units = units.saturating_add(ch.len_utf16() as u32); + } + line.len() +} + +/// Convert an LSP (line, UTF-16 column) position to a byte offset. +/// +/// This is best-effort: if the position is out of range it clamps to EOF. +pub fn line_utf16_to_byte_offset( + source: &str, + line_start_bytes: &[u32], + line: u32, + character: u32, +) -> usize { + let Some(&start) = line_start_bytes.get(line as usize) else { + return source.len(); + }; + let start = start as usize; + + let end = line_start_bytes + .get(line as usize + 1) + .map(|v| *v as usize) + .unwrap_or(source.len()); + + let end = end.min(source.len()); + let start = start.min(end); + + let within_line = utf16_col_to_byte_offset(&source[start..end], character); + start + within_line +} + +/// Converts a character index to line and column numbers. +/// +/// Given a source string and character index, returns the corresponding +/// line and column position. Handles CR characters consistently with +/// the Rust compiler by ignoring them. +/// +/// For repeated conversions on the same `source` (e.g. mapping many +/// decorations), prefer building a `LineCharIndex` once. +pub fn index_to_line_char(s: &str, idx: Loc) -> (u32, u32) { + use memchr::memchr_iter; + + let target = idx.0; + let mut line = 0u32; + let mut col = 0u32; + let mut logical_idx = 0u32; // counts chars excluding CR + let mut seg_start = 0usize; + + // Scan newline boundaries quickly, counting chars inside each segment. + for nl in memchr_iter(b'\n', s.as_bytes()) { + for ch in s[seg_start..=nl].chars() { + if ch == '\r' { + continue; + } + if logical_idx == target { + return (line, col); + } + if ch == '\n' { + line += 1; + col = 0; + } else { + col += 1; + } + logical_idx += 1; + } + seg_start = nl + 1; + if logical_idx > target { + break; + } + } + + if logical_idx <= target { + for ch in s[seg_start..].chars() { + if ch == '\r' { + continue; + } + if logical_idx == target { + return (line, col); + } + if ch == '\n' { + line += 1; + col = 0; + } else { + col += 1; + } + logical_idx += 1; + } + } + + (line, col) +} + +/// Converts line and column numbers to a character index. +/// +/// Given a source string, line number, and column number, returns the +/// corresponding character index. Handles CR characters consistently +/// with the Rust compiler by ignoring them. +/// +/// For repeated conversions on the same `source` (e.g. mapping many +/// cursor positions), prefer building a `LineCharIndex` once. +pub fn line_char_to_index(s: &str, mut line: u32, char: u32) -> u32 { + use memchr::memchr_iter; + + let mut consumed = 0u32; // logical chars excluding CR + let mut seg_start = 0usize; + + for nl in memchr_iter(b'\n', s.as_bytes()) { + if line == 0 { + break; + } + for ch in s[seg_start..=nl].chars() { + if ch == '\r' { + continue; + } + consumed += 1; + } + seg_start = nl + 1; + line -= 1; + } + + if line > 0 { + for ch in s[seg_start..].chars() { + if ch == '\r' { + continue; + } + consumed += 1; + } + return consumed; // best effort if line exceeds file + } + + let mut col_count = 0u32; + for ch in s[seg_start..].chars() { + if ch == '\r' { + continue; + } + if col_count == char { + return consumed; + } + consumed += 1; + col_count += 1; + } + consumed +} + +pub fn get_default_parallel_count() -> usize { + num_cpus::get_physical() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::models::*; + + #[test] + fn test_is_super_range() { + let r1 = Range::new(Loc(0), Loc(10)).unwrap(); + let r2 = Range::new(Loc(2), Loc(8)).unwrap(); + let r3 = Range::new(Loc(5), Loc(15)).unwrap(); + + assert!(is_super_range(r1, r2)); // r1 contains r2 + assert!(!is_super_range(r2, r1)); // r2 doesn't contain r1 + assert!(!is_super_range(r1, r3)); // r1 doesn't fully contain r3 + assert!(!is_super_range(r3, r1)); // r3 doesn't contain r1 + } + + #[test] + fn test_common_range() { + let r1 = Range::new(Loc(0), Loc(10)).unwrap(); + let r2 = Range::new(Loc(5), Loc(15)).unwrap(); + let r3 = Range::new(Loc(20), Loc(30)).unwrap(); + + // Overlapping ranges + let common = common_range(r1, r2).unwrap(); + assert_eq!(common.from(), Loc(5)); + assert_eq!(common.until(), Loc(10)); + + // Non-overlapping ranges + assert!(common_range(r1, r3).is_none()); + + // Order shouldn't matter + let common2 = common_range(r2, r1).unwrap(); + assert_eq!(common, common2); + } + + #[test] + fn test_merge_ranges() { + let r1 = Range::new(Loc(0), Loc(10)).unwrap(); + let r2 = Range::new(Loc(5), Loc(15)).unwrap(); + let r3 = Range::new(Loc(10), Loc(20)).unwrap(); // Adjacent + let r4 = Range::new(Loc(25), Loc(30)).unwrap(); // Disjoint + + // Overlapping ranges should merge + let merged = merge_ranges(r1, r2).unwrap(); + assert_eq!(merged.from(), Loc(0)); + assert_eq!(merged.until(), Loc(15)); + + // Adjacent ranges should merge + let merged = merge_ranges(r1, r3).unwrap(); + assert_eq!(merged.from(), Loc(0)); + assert_eq!(merged.until(), Loc(20)); + + // Disjoint ranges shouldn't merge + assert!(merge_ranges(r1, r4).is_none()); + } + + #[test] + fn test_eliminated_ranges() { + let ranges = vec![ + Range::new(Loc(0), Loc(10)).unwrap(), + Range::new(Loc(5), Loc(15)).unwrap(), + Range::new(Loc(12), Loc(20)).unwrap(), + Range::new(Loc(25), Loc(30)).unwrap(), + ]; + + let eliminated = eliminated_ranges(ranges); + assert_eq!(eliminated.len(), 2); + + // Should have merged the overlapping ranges + assert!( + eliminated + .iter() + .any(|r| r.from() == Loc(0) && r.until() == Loc(20)) + ); + assert!( + eliminated + .iter() + .any(|r| r.from() == Loc(25) && r.until() == Loc(30)) + ); + } + + #[test] + fn test_exclude_ranges() { + let from = vec![Range::new(Loc(0), Loc(20)).unwrap()]; + let excludes = vec![Range::new(Loc(5), Loc(15)).unwrap()]; + + let result = exclude_ranges(from, excludes); + + // Should split the original range around the exclusion + assert_eq!(result.len(), 2); + assert!( + result + .iter() + .any(|r| r.from() == Loc(0) && r.until() == Loc(4)) + ); + assert!( + result + .iter() + .any(|r| r.from() == Loc(16) && r.until() == Loc(20)) + ); + } + + #[test] + fn test_index_to_line_char_edge_cases() { + let source = "line1\nline2\nline3"; + + // Test position at line start + let (line, col) = index_to_line_char(source, Loc(6)); // Start of "line2" + assert_eq!(line, 1); + assert_eq!(col, 0); + + // Test position at line end (before newline) + let (line, col) = index_to_line_char(source, Loc(11)); // End of "line2" (including newline) + assert_eq!(line, 1); + assert_eq!(col, 5); + + // Test position at EOF + let (line, col) = index_to_line_char(source, Loc(source.len() as u32)); + assert_eq!(line, 2); + assert_eq!(col, 5); // "line3" has 5 characters + } + + #[test] + fn test_line_char_to_index_roundtrip() { + let source = "line1\nline2\nline3"; + + // Test round trip conversion + let original_index = 8u32; // Position in "line2" + let (line, col) = index_to_line_char(source, Loc(original_index)); + let converted_index = line_char_to_index(source, line, col); + assert_eq!(converted_index, original_index); + + // Test line/char at EOF + let eof_index = source.len() as u32; + let (line, col) = index_to_line_char(source, Loc(eof_index)); + let converted_index = line_char_to_index(source, line, col); + assert_eq!(converted_index as usize, source.len()); + } + + #[test] + fn test_line_char_to_index() { + let source = "hello\nworld\ntest"; + + assert_eq!(line_char_to_index(source, 0, 0), 0); // 'h' + assert_eq!(line_char_to_index(source, 1, 0), 6); // 'w' + assert_eq!(line_char_to_index(source, 2, 0), 12); // 't' + } + + #[test] + fn test_excluded_ranges_small() { + use crate::models::range_vec_from_vec; + + let from = range_vec_from_vec(vec![Range::new(Loc(0), Loc(20)).unwrap()]); + let excludes = vec![Range::new(Loc(5), Loc(15)).unwrap()]; + + let result = exclude_ranges_small(from, excludes); + + // Should split the original range around the exclusion + assert_eq!(result.len(), 2); + assert!( + result + .iter() + .any(|r| r.from() == Loc(0) && r.until() == Loc(4)) + ); + assert!( + result + .iter() + .any(|r| r.from() == Loc(16) && r.until() == Loc(20)) + ); + } + + #[test] + fn test_mir_visitor_pattern() { + struct TestVisitor { + func_count: usize, + decl_count: usize, + stmt_count: usize, + term_count: usize, + } + + impl MirVisitor for TestVisitor { + fn visit_func(&mut self, _func: &Function) { + self.func_count += 1; + } + + fn visit_decl(&mut self, _decl: &MirDecl) { + self.decl_count += 1; + } + + fn visit_stmt(&mut self, _stmt: &MirStatement) { + self.stmt_count += 1; + } + + fn visit_term(&mut self, _term: &MirTerminator) { + self.term_count += 1; + } + } + + let mut func = Function::new(1); + + // Add some declarations + func.decls.push(MirDecl::Other { + local: FnLocal::new(1, 1), + ty: "i32".to_string().into(), + lives: crate::models::RangeVec::new(), + shared_borrow: crate::models::RangeVec::new(), + mutable_borrow: crate::models::RangeVec::new(), + drop: false, + drop_range: crate::models::RangeVec::new(), + must_live_at: crate::models::RangeVec::new(), + }); + + // Add a basic block with statements and terminator + let mut bb = MirBasicBlock::new(); + bb.statements.push(MirStatement::Other { + range: Range::new(Loc(0), Loc(5)).unwrap(), + }); + bb.statements.push(MirStatement::Other { + range: Range::new(Loc(5), Loc(10)).unwrap(), + }); + bb.terminator = Some(MirTerminator::Other { + range: Range::new(Loc(10), Loc(15)).unwrap(), + }); + + func.basic_blocks.push(bb); + + let mut visitor = TestVisitor { + func_count: 0, + decl_count: 0, + stmt_count: 0, + term_count: 0, + }; + + mir_visit(&func, &mut visitor); + + assert_eq!(visitor.func_count, 1); + assert_eq!(visitor.decl_count, 1); + assert_eq!(visitor.stmt_count, 2); + assert_eq!(visitor.term_count, 1); + } + + #[test] + fn test_index_line_char_with_carriage_returns() { + // Test that CR characters are handled correctly (ignored like the compiler) + let source_with_cr = "hello\r\nworld\r\ntest"; + let source_without_cr = "hello\nworld\ntest"; + + // Both should give the same line/char results + let loc = Loc(8); // Should be 'r' in "world" + let (line_cr, char_cr) = index_to_line_char(source_with_cr, loc); + let (line_no_cr, char_no_cr) = index_to_line_char(source_without_cr, loc); + + assert_eq!(line_cr, line_no_cr); + assert_eq!(char_cr, char_no_cr); + + // Test conversion back + let back_cr = line_char_to_index(source_with_cr, line_cr, char_cr); + let back_no_cr = line_char_to_index(source_without_cr, line_no_cr, char_no_cr); + + assert_eq!(back_cr, back_no_cr); + } + + #[test] + fn test_line_char_to_index_edge_cases() { + let source = "a\nb\nc"; + + // Test beyond end of string + let result = line_char_to_index(source, 10, 0); + assert_eq!(result, source.chars().count() as u32); + + // Test beyond end of line + let result = line_char_to_index(source, 0, 10); + assert_eq!(result, source.chars().count() as u32); + } +} diff --git a/crates/rustowl/tests/rustowlc_integration.rs b/crates/rustowl/tests/rustowlc_integration.rs new file mode 100644 index 00000000..6e7838b6 --- /dev/null +++ b/crates/rustowl/tests/rustowlc_integration.rs @@ -0,0 +1,196 @@ +use std::process::Command; + +#[test] +fn rustowlc_emits_workspace_json_for_simple_crate() { + let temp = tempfile::tempdir().expect("tempdir"); + let crate_dir = temp.path(); + + // Keep the directory around on failure for debugging. + eprintln!("rustowlc integration temp crate: {}", crate_dir.display()); + + std::fs::write( + crate_dir.join("Cargo.toml"), + r#"[package] +name = "rustowlc_integ" +version = "0.1.0" +edition = "2021" + +[lib] +path = "src/lib.rs" +"#, + ) + .unwrap(); + + std::fs::create_dir_all(crate_dir.join("src")).unwrap(); + std::fs::write( + crate_dir.join("src/lib.rs"), + r#"pub fn foo() -> i32 { + let x = 1; + x + 1 +} +"#, + ) + .unwrap(); + + // Prefer the instrumented rustowlc that `cargo llvm-cov` builds under + // `target/llvm-cov-target`. Fall back to whatever `toolchain` resolves. + let exe = std::env::consts::EXE_SUFFIX; + + let manifest_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let workspace_root = manifest_dir + .ancestors() + .nth(2) + .map(|p| p.to_path_buf()) + .unwrap_or(manifest_dir.clone()); + + // `cargo llvm-cov` does *not* propagate `CARGO_TARGET_DIR` into the test process. + // So if we want the instrumented `rustowlc`, we must probe the well-known location first. + let target_dir = std::env::var_os("CARGO_TARGET_DIR") + .map(std::path::PathBuf::from) + .unwrap_or_else(|| workspace_root.join("target")); + let instrumented_target_dir = workspace_root.join("target/llvm-cov-target"); + + let rustowlc_path = instrumented_target_dir.join(format!("debug/rustowlc{exe}")); + let rustowlc_path = if rustowlc_path.is_file() { + rustowlc_path + } else { + let rustowlc_path = instrumented_target_dir.join(format!("release/rustowlc{exe}")); + if rustowlc_path.is_file() { + rustowlc_path + } else { + let rustowlc_path = target_dir.join(format!("debug/rustowlc{exe}")); + if rustowlc_path.is_file() { + rustowlc_path + } else { + target_dir.join(format!("release/rustowlc{exe}")) + } + } + }; + assert!( + rustowlc_path.is_file(), + "missing rustowlc at {}", + rustowlc_path.display() + ); + + // Drive rustc via cargo so it behaves like real usage. + // We explicitly disable incremental compilation to avoid artifacts affecting output. + // Ensure sccache doesn't insert itself in front of our wrapper. + let mut cmd = Command::new("cargo"); + cmd.arg("clean") + .env_remove("RUSTC_WRAPPER") + .env_remove("SCCACHE") + .env_remove("CARGO_BUILD_RUSTC_WRAPPER") + .env_remove("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER") + .env("CARGO_BUILD_RUSTC_WRAPPER", "") + .current_dir(crate_dir); + let clean_out = cmd.output().expect("cargo clean"); + assert!(clean_out.status.success()); + + let sysroot = std::process::Command::new("rustc") + .args(["--print", "sysroot"]) + .output() + .expect("rustc --print sysroot") + .stdout; + let sysroot = String::from_utf8_lossy(&sysroot).trim().to_string(); + + // If we're running under `cargo llvm-cov`, `CARGO_TARGET_DIR` points at the instrumented + // target directory we want to write `.profraw` files into. + let llvm_profile_dir = std::env::var_os("CARGO_TARGET_DIR") + .map(std::path::PathBuf::from) + .unwrap_or_else(|| workspace_root.join("target/llvm-cov-target")); + std::fs::create_dir_all(&llvm_profile_dir).unwrap(); + + // Use `%p` to avoid collisions across processes. `%m` is the binary name. + let llvm_profile_file = llvm_profile_dir.join("rustowlc-integration-%m-%p.profraw"); + + // Use an absolute path outside of the temp crate to avoid any target-dir sandboxing. + let output_path = std::env::temp_dir().join(format!( + "rustowl_output_{}.jsonl", + std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_nanos() + )); + let _ = std::fs::remove_file(&output_path); + + let rustc_path = std::process::Command::new("rustc") + .args(["--print", "sysroot"]) // just to verify rustc exists + .output() + .expect("rustc exists"); + drop(rustc_path); + + let mut cmd = Command::new("cargo"); + cmd.arg("check") + .arg("--release") + // Ensure we compile the workspace crate itself (not just deps). + .arg("--lib") + // Make cargo invoke: `rustowlc rustc ...` so `argv0 == argv1` and analysis runs. + .env( + "RUSTC", + std::process::Command::new("rustc") + .arg("--print") + .arg("rustc") + .output() + .ok() + .and_then(|o| String::from_utf8(o.stdout).ok()) + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .unwrap_or_else(|| "rustc".to_string()), + ) + .env("RUSTC_WORKSPACE_WRAPPER", &rustowlc_path) + .env("CARGO_INCREMENTAL", "0") + .env("RUSTOWL_OUTPUT_PATH", &output_path) + // Ensure coverage from the rustowlc subprocess is captured. + .env("LLVM_PROFILE_FILE", &llvm_profile_file) + // rustowlc depends on rustc private dylibs. + .env("LD_LIBRARY_PATH", format!("{}/lib", sysroot)) + // Ensure no outer wrapper like sccache interferes. + .env_remove("RUSTC_WRAPPER") + .env_remove("SCCACHE") + .env_remove("CARGO_BUILD_RUSTC_WRAPPER") + .env_remove("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER") + .env("CARGO_BUILD_RUSTC_WRAPPER", "") + .current_dir(crate_dir); + + let output = cmd.output().expect("run cargo check"); + + assert!( + output.status.success(), + "cargo failed: status={:?}\nstdout:\n{}\nstderr:\n{}", + output.status, + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + + // Cargo may suppress compiler stdout. We instead ask rustowlc to append JSON lines to a file. + // If we didn't run analysis, the file won't exist. + assert!( + output_path.is_file(), + "expected rustowl output file at {}; crate dir entries: {:?}; /tmp entries include output?={}", + output_path.display(), + std::fs::read_dir(crate_dir) + .unwrap() + .flatten() + .map(|e| e.path()) + .collect::>(), + output_path.exists() + ); + + let output_contents = std::fs::read_to_string(&output_path).expect("read rustowl output file"); + assert!( + !output_contents.trim().is_empty(), + "expected rustowl output to be non-empty" + ); + assert!( + output_contents.contains("\"rustowlc_integ\"") + || output_contents.contains("rustowlc_integ"), + "expected crate name in output" + ); + // Windows emits backslashes and the JSON contains escaped `\\`. + assert!( + output_contents.contains("/src/lib.rs") + || output_contents.contains("\\\\src\\\\lib.rs") + || output_contents.contains("src/lib.rs"), + "expected output to mention src/lib.rs; output was:\n{output_contents}" + ); +} diff --git a/crates/xtask/Cargo.toml b/crates/xtask/Cargo.toml new file mode 100644 index 00000000..faeec8a5 --- /dev/null +++ b/crates/xtask/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "xtask" +version = "0.0.0" +edition.workspace = true +publish = false +license.workspace = true + +[dependencies] +anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } +open = "5" +regex.workspace = true +serde.workspace = true +serde_json.workspace = true +tempfile.workspace = true +tokio = { workspace = true, features = ["process", "rt-multi-thread", "macros", "fs", "io-util"] } +reqwest = { workspace = true } +flate2.workspace = true +tar.workspace = true +jiff.workspace = true diff --git a/crates/xtask/src/commands/bench.rs b/crates/xtask/src/commands/bench.rs new file mode 100644 index 00000000..30593bad --- /dev/null +++ b/crates/xtask/src/commands/bench.rs @@ -0,0 +1,475 @@ +use anyhow::{Context, Result, anyhow}; +use clap::Parser; +use open; +use regex::Regex; +use serde::{Deserialize, Serialize}; +use std::fmt::Write as _; +use std::{collections::BTreeMap, path::PathBuf}; + +use crate::util::{Cmd, percent_change, repo_root, write_string}; + +#[derive(Parser, Debug)] +#[command( + about = "Run divan benches and track performance baselines", + long_about = "Runs `cargo bench -p rustowl` under the pinned toolchain wrapper. + +Modes: +- default: run benchmarks and report parsed results +- `--save `: save results to `baselines/performance//` +- `--load `: compare against a saved baseline and fail on regressions + +Options: +- `--bench `: restrict which benches run (repeatable) +- `--clean`: `cargo clean` before benchmarking +- `--quiet`: pass `--quiet` to `cargo bench` +- `--open`: open the generated summary report" +)] +pub struct Args { + /// Save current benchmark results as baseline (directory name) + #[arg(long, value_name = "NAME")] + save: Option, + + /// Load baseline and compare current results against it + #[arg(long, value_name = "NAME")] + load: Option, + + /// Regression threshold percent (e.g. 5) + #[arg(long, default_value_t = 5.0, value_name = "PERCENT")] + threshold: f64, + + /// Clean build artifacts before benchmarking + #[arg(long)] + clean: bool, + + /// Repeat `--bench ` to restrict benches + #[arg(long = "bench", value_name = "NAME")] + benches: Vec, + + /// Emit less output; intended for CI + #[arg(long)] + quiet: bool, + + /// Open the generated benchmark summary report + #[arg(long)] + open: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct BaselineFile { + meta: Meta, + benches: BTreeMap, + analysis_seconds: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct Meta { + git_sha: Option, + host: Option, + rustc: Option, +} + +pub async fn run(args: Args) -> Result<()> { + let root = repo_root()?; + + if args.save.is_some() && args.load.is_some() { + return Err(anyhow!("--save and --load are mutually exclusive")); + } + + if args.clean { + Cmd::new("cargo").args(["clean"]).cwd(&root).run().await?; + } + + // Run divan benches via cargo bench. + let mut cmd = Cmd::new("cargo").args(["xtask", "toolchain", "cargo", "bench", "-p", "rustowl"]); + if !args.benches.is_empty() { + for b in &args.benches { + cmd = cmd.args(["--bench", b]); + } + } else { + cmd = cmd.args(["--benches"]); + } + + if args.quiet { + cmd = cmd.arg("--quiet"); + } + + let output = cmd.cwd(&root).output().await.context("run cargo bench")?; + let out_str = format!( + "{}{}", + String::from_utf8_lossy(&output.stdout), + String::from_utf8_lossy(&output.stderr) + ); + + // When parsing fails, capturing raw output is crucial for diagnosing format changes. + if args.save.is_none() && args.load.is_none() && !args.quiet { + write_string(root.join("target/xtask/bench_last.log"), &out_str).ok(); + } + + if !output.status.success() { + return Err(anyhow!("bench command failed")); + } + + let parsed = parse_divan_output(&out_str).context("parse divan output")?; + + // The legacy script timed `./target/release/rustowl check `. + // That measurement is far noisier than microbench timings and caused flaky regressions. + // For the Divan migration, we record it as metadata only by default. + let analysis_time = None; + + let baseline_dir = root.join("baselines/performance"); + + if let Some(name) = args.save { + let dir = baseline_dir.join(&name); + std::fs::create_dir_all(&dir).with_context(|| format!("create {}", dir.display()))?; + + write_string(dir.join("bench.log"), &out_str)?; + + let baseline = BaselineFile { + meta: Meta { + git_sha: git_rev_parse(&root).await.ok(), + host: rustc_host().await.ok(), + rustc: rustc_version().await.ok(), + }, + benches: parsed, + analysis_seconds: analysis_time, + }; + + let json = serde_json::to_string_pretty(&baseline).context("serialize baseline")?; + write_string(dir.join("baseline.json"), &(json + "\n"))?; + if let Some(secs) = analysis_time { + write_string(dir.join("analysis_time.txt"), &format!("{secs}\n"))?; + } + + let summary = build_summary_markdown(&baseline, None, args.threshold); + let summary_path = dir.join("summary.md"); + write_string(&summary_path, &summary)?; + + if args.open { + let _ = open::that(&summary_path); + } + + Ok(()) + } else if let Some(name) = args.load { + let dir = baseline_dir.join(&name); + let baseline_path = dir.join("baseline.json"); + let baseline: BaselineFile = serde_json::from_str( + &std::fs::read_to_string(&baseline_path) + .with_context(|| format!("read {}", baseline_path.display()))?, + ) + .context("parse baseline")?; + + let cmp = compare(&baseline, &parsed, analysis_time, args.threshold)?; + let summary = build_summary_markdown(&baseline, Some(&cmp), args.threshold); + let summary_path = dir.join("summary.md"); + write_string(&summary_path, &summary)?; + + if args.open { + let _ = open::that(&summary_path); + } + + Ok(()) + } else { + // Strict mode: parse and report. + println!("Parsed {} benches.", parsed.len()); + + let cur = BaselineFile { + meta: Meta { + git_sha: git_rev_parse(&root).await.ok(), + host: rustc_host().await.ok(), + rustc: rustc_version().await.ok(), + }, + benches: parsed, + analysis_seconds: None, + }; + + let summary_path = root.join("target/xtask/bench_summary.md"); + std::fs::create_dir_all(summary_path.parent().unwrap()) + .with_context(|| format!("create {}", summary_path.parent().unwrap().display()))?; + write_string( + &summary_path, + &build_summary_markdown(&cur, None, args.threshold), + )?; + + if args.open { + let _ = open::that(&summary_path); + } + + Ok(()) + } +} + +#[derive(Debug, Clone)] +struct CompareResult { + benches: Vec, + analysis: Option, + failed: bool, +} + +#[derive(Debug, Clone)] +struct BenchCompare { + name: String, + baseline: f64, + current: f64, + change_pct: Option, +} + +fn compare( + baseline: &BaselineFile, + current: &BTreeMap, + analysis_time: Option, + threshold: f64, +) -> Result { + let mut failed = false; + let mut bench_rows = Vec::new(); + + for (name, base) in &baseline.benches { + let Some(cur) = current.get(name) else { + return Err(anyhow!("missing benchmark in current run: {name}")); + }; + + let change = percent_change(*base, *cur); + if let Some(pct) = change { + println!("{name}: {base:.6} -> {cur:.6} ({pct:.2}%)"); + if pct > threshold { + failed = true; + } + } + + bench_rows.push(BenchCompare { + name: name.to_string(), + baseline: *base, + current: *cur, + change_pct: change, + }); + } + + let mut analysis_row = None; + if let (Some(base_analysis), Some(cur_analysis)) = (baseline.analysis_seconds, analysis_time) { + let change = percent_change(base_analysis, cur_analysis); + if let Some(pct) = change { + println!("analysis: {base_analysis:.3}s -> {cur_analysis:.3}s ({pct:.2}%)"); + if pct > threshold { + failed = true; + } + } else { + println!("analysis: baseline {base_analysis:.3}s current {cur_analysis:.3}s"); + if cur_analysis > 0.0 { + failed = true; + } + } + + analysis_row = Some(BenchCompare { + name: "analysis".to_string(), + baseline: base_analysis, + current: cur_analysis, + change_pct: change, + }); + } + + let res = CompareResult { + benches: bench_rows, + analysis: analysis_row, + failed, + }; + + if res.failed { + Err(anyhow!("benchmark regression beyond threshold")) + } else { + Ok(res) + } +} + +fn build_summary_markdown( + current: &BaselineFile, + compare: Option<&CompareResult>, + threshold: f64, +) -> String { + let mut out = String::new(); + + let _ = writeln!(&mut out, "# RustOwl Benchmark Summary"); + let _ = writeln!(&mut out); + + if let Some(rustc) = ¤t.meta.rustc { + let _ = writeln!(&mut out, "- rustc: {rustc}"); + } + if let Some(host) = ¤t.meta.host { + let _ = writeln!(&mut out, "- host: {host}"); + } + if let Some(sha) = ¤t.meta.git_sha { + let _ = writeln!(&mut out, "- git: {sha}"); + } + let _ = writeln!(&mut out, "- threshold: {threshold:.2}%"); + let _ = writeln!(&mut out); + + if let Some(cmp) = compare { + let _ = writeln!(&mut out, "## Comparison"); + let _ = writeln!( + &mut out, + "- status: {}", + if cmp.failed { "failed" } else { "ok" } + ); + let _ = writeln!(&mut out); + + let _ = writeln!( + &mut out, + "| Benchmark | Baseline (s) | Current (s) | Change |" + ); + let _ = writeln!(&mut out, "|---|---:|---:|---:|"); + for row in &cmp.benches { + let change = row + .change_pct + .map(|v| format!("{v:.2}%")) + .unwrap_or_else(|| "n/a".to_string()); + let _ = writeln!( + &mut out, + "| {} | {:.6} | {:.6} | {} |", + row.name, row.baseline, row.current, change + ); + } + if let Some(row) = &cmp.analysis { + let change = row + .change_pct + .map(|v| format!("{v:.2}%")) + .unwrap_or_else(|| "n/a".to_string()); + let _ = writeln!( + &mut out, + "| {} | {:.6} | {:.6} | {} |", + row.name, row.baseline, row.current, change + ); + } + + let _ = writeln!(&mut out); + } + + let _ = writeln!(&mut out, "## Current Results"); + let _ = writeln!(&mut out, "| Benchmark | Seconds |"); + let _ = writeln!(&mut out, "|---|---:|"); + for (name, secs) in ¤t.benches { + let _ = writeln!(&mut out, "| {name} | {secs:.6} |"); + } + if let Some(secs) = current.analysis_seconds { + let _ = writeln!(&mut out, "| analysis | {secs:.6} |"); + } + + out +} + +fn parse_divan_output(output: &str) -> Result> { + // Current divan output for our benches is a table like: + // "│ ├─ default 6.931 ms │ ... │ mean 7.457 ms │ ..." + // To keep this robust, we parse any row that contains a benchmark name and a "mean" value. + // The key becomes "/" (e.g. "rustowl_check/default"). + let re = Regex::new( + r"^\s*[│|]\s*[├╰]─\s*(?P[A-Za-z0-9_\-]+)\s+(?P[0-9]+(?:\.[0-9]+)?)\s*(?Pns|µs|us|ms|s)\s*[│|]\s*(?P[0-9]+(?:\.[0-9]+)?)\s*(?Pns|µs|us|ms|s)\s*[│|]\s*(?P[0-9]+(?:\.[0-9]+)?)\s*(?Pns|µs|us|ms|s)\s*[│|]\s*(?P[0-9]+(?:\.[0-9]+)?)\s*(?Pns|µs|us|ms|s)\b", + ) + .context("compile regex")?; + + fn to_secs(val: f64, unit: &str) -> Option { + Some(match unit { + "ns" => val / 1_000_000_000.0, + "us" | "µs" => val / 1_000_000.0, + "ms" => val / 1_000.0, + "s" => val, + _ => return None, + }) + } + + let mut map = BTreeMap::new(); + let mut current_group: Option = None; + + for raw in output.lines() { + let line = raw.trim_end(); + let trimmed = line.trim(); + if trimmed.is_empty() { + continue; + } + + // Group headers look like: "├─ rustowl_check" or "╰─ rustowl_comprehensive". + if let Some(rest) = trimmed + .strip_prefix("├─ ") + .or_else(|| trimmed.strip_prefix("╰─ ")) + { + current_group = Some(rest.split_whitespace().next().unwrap_or("").to_string()); + continue; + } + // Some output lines include the left border '│' before the group marker. + // Only treat them as group headers if they don't have timing columns. + if trimmed.matches('│').count() < 2 { + if let Some(rest) = trimmed + .strip_prefix("│ ├─ ") + .or_else(|| trimmed.strip_prefix("│ ╰─ ")) + { + current_group = Some(rest.trim().to_string()); + continue; + } + if let Some(rest) = trimmed.strip_prefix(" ╰─ ") { + current_group = Some(rest.trim().to_string()); + continue; + } + } + + let Some(caps) = re.captures(trimmed) else { + continue; + }; + + let name = caps.name("name").unwrap().as_str().trim().to_string(); + let mean_val: f64 = caps + .name("mean") + .unwrap() + .as_str() + .parse() + .context("parse mean")?; + let mean_unit = caps.name("mean_unit").unwrap().as_str(); + let Some(secs) = to_secs(mean_val, mean_unit) else { + continue; + }; + + let key = if let Some(group) = ¤t_group { + format!("{group}/{name}") + } else { + name + }; + + map.insert(key, secs); + } + + if map.is_empty() { + return Err(anyhow!("could not find any divan timing lines")); + } + + Ok(map) +} + +async fn git_rev_parse(root: &PathBuf) -> Result { + crate::util::ensure_tool("git")?; + let out = Cmd::new("git") + .args(["rev-parse", "HEAD"]) + .cwd(root) + .output() + .await?; + if !out.status.success() { + return Err(anyhow!("git rev-parse failed")); + } + Ok(String::from_utf8_lossy(&out.stdout).trim().to_string()) +} + +async fn rustc_version() -> Result { + let out = Cmd::new("rustc").args(["--version"]).output().await?; + if !out.status.success() { + return Err(anyhow!("rustc --version failed")); + } + Ok(String::from_utf8_lossy(&out.stdout).trim().to_string()) +} + +async fn rustc_host() -> Result { + let out = Cmd::new("rustc").args(["-vV"]).output().await?; + if !out.status.success() { + return Err(anyhow!("rustc -vV failed")); + } + for line in String::from_utf8_lossy(&out.stdout).lines() { + if let Some(host) = line.strip_prefix("host: ") { + return Ok(host.trim().to_string()); + } + } + Err(anyhow!("host line not found")) +} diff --git a/crates/xtask/src/commands/bump.rs b/crates/xtask/src/commands/bump.rs new file mode 100644 index 00000000..8159aa64 --- /dev/null +++ b/crates/xtask/src/commands/bump.rs @@ -0,0 +1,132 @@ +use anyhow::{Context, Result, anyhow}; +use clap::Parser; +use serde_json::Value; +use std::path::PathBuf; + +use crate::util::{Cmd, ensure_tool, read_to_string, repo_root, write_string}; + +#[derive(Parser, Debug)] +#[command( + about = "Bump versions and create a git tag", + long_about = "Updates version fields for a release and creates an annotated git tag. + +What gets updated: +- `crates/rustowl/Cargo.toml` version +- `vscode/package.json` version (if present) +- AUR PKGBUILD files (if present and not a prerelease) + +Then runs: `git tag `. + +Example: + cargo xtask bump v1.0.0" +)] +pub struct Args { + /// Version tag like `v1.2.3` (must start with 'v') + #[arg(value_name = "VERSION")] + version: String, +} + +pub async fn run(args: Args) -> Result<()> { + let root = repo_root()?; + ensure_tool("git")?; + + let (version_tag, version) = parse_version(&args.version)?; + let is_prerelease = is_prerelease(&version); + + update_rustowl_cargo_toml(&root.join("crates/rustowl/Cargo.toml"), &version)?; + + let vscode_pkg = root.join("vscode/package.json"); + if vscode_pkg.is_file() { + update_vscode_package_json(&vscode_pkg, &version)?; + } + + if !is_prerelease { + let aur_pkgbuild = root.join("aur/PKGBUILD"); + if aur_pkgbuild.is_file() { + update_pkgbuild(&aur_pkgbuild, &version)?; + } + let aur_pkgbuild_bin = root.join("aur/PKGBUILD-BIN"); + if aur_pkgbuild_bin.is_file() { + update_pkgbuild(&aur_pkgbuild_bin, &version)?; + } + } + + Cmd::new("git") + .args(["tag", &version_tag]) + .cwd(&root) + .run() + .await + .context("git tag")?; + + Ok(()) +} + +fn parse_version(input: &str) -> Result<(String, String)> { + if !input.starts_with('v') { + return Err(anyhow!("version must start with 'v' (e.g. v0.3.1)")); + } + let ver = input.trim_start_matches('v').to_string(); + if ver.is_empty() { + return Err(anyhow!("invalid version")); + } + Ok((input.to_string(), ver)) +} + +fn is_prerelease(version: &str) -> bool { + let lower = version.to_ascii_lowercase(); + ["alpha", "beta", "rc", "dev", "pre", "snapshot"] + .iter() + .any(|p| lower.contains(p)) +} + +fn update_rustowl_cargo_toml(path: &PathBuf, version: &str) -> Result<()> { + let original = read_to_string(path)?; + let mut out = String::new(); + let mut replaced = false; + + for line in original.lines() { + if !replaced && line.trim_start().starts_with("version =") { + out.push_str(&format!("version = \"{}\"\n", version)); + replaced = true; + } else { + out.push_str(line); + out.push('\n'); + } + } + + if !replaced { + return Err(anyhow!("did not find version field in {}", path.display())); + } + + write_string(path, &out)?; + Ok(()) +} + +fn update_vscode_package_json(path: &PathBuf, version: &str) -> Result<()> { + let content = read_to_string(path)?; + let mut json: Value = serde_json::from_str(&content).context("parse vscode/package.json")?; + json["version"] = Value::String(version.to_string()); + let formatted = serde_json::to_string_pretty(&json).context("serialize vscode/package.json")?; + write_string(path, &(formatted + "\n"))?; + Ok(()) +} + +fn update_pkgbuild(path: &PathBuf, version: &str) -> Result<()> { + let original = read_to_string(path)?; + let mut out = String::new(); + let mut replaced = false; + for line in original.lines() { + if line.starts_with("pkgver=") { + out.push_str(&format!("pkgver={}\n", version)); + replaced = true; + } else { + out.push_str(line); + out.push('\n'); + } + } + if !replaced { + return Err(anyhow!("did not find pkgver= in {}", path.display())); + } + write_string(path, &out)?; + Ok(()) +} diff --git a/crates/xtask/src/commands/dev_checks.rs b/crates/xtask/src/commands/dev_checks.rs new file mode 100644 index 00000000..07542ceb --- /dev/null +++ b/crates/xtask/src/commands/dev_checks.rs @@ -0,0 +1,179 @@ +use anyhow::{Context, Result, anyhow}; +use clap::Parser; + +use crate::util::{Cmd, read_to_string, repo_root}; + +#[derive(Parser, Debug)] +#[command( + about = "Run developer checks (fmt, clippy, build, tests)", + long_about = "Runs the project's standard developer quality checks: +- rustfmt (optionally fix) +- clippy (all targets, all features, workspace, -D warnings) +- stable rustc version check (>= `.rust-version-stable`) +- release build via `cargo xtask toolchain cargo build --release` +- a basic `cargo test --lib --bins` +- optional VSCode extension checks (if `vscode/` exists and `pnpm` is installed)" +)] +pub struct Args { + /// Automatically fix issues where possible + #[arg(short, long)] + fix: bool, +} + +pub async fn run(args: Args) -> Result<()> { + let root = repo_root()?; + + if args.fix { + Cmd::new("cargo").arg("fmt").cwd(&root).run().await?; + } else { + Cmd::new("cargo") + .args(["fmt", "--check", "--all"]) + .cwd(&root) + .run() + .await?; + } + + if args.fix { + // Best-effort: clippy --fix can fail on some toolchains/configs. + let _ = Cmd::new("cargo") + .args(["clippy", "--fix", "--allow-dirty", "--allow-staged"]) + .cwd(&root) + .run() + .await; + } + + Cmd::new("cargo") + .args([ + "clippy", + "--all-targets", + "--all-features", + "--workspace", + "--", + "-D", + "warnings", + ]) + .cwd(&root) + .run() + .await + .context("clippy")?; + + check_stable_rust_min_version(&root).await?; + + // Build (release) using the custom toolchain wrapper. + Cmd::new("cargo") + .args(["xtask", "toolchain", "cargo", "build", "--release"]) + .cwd(&root) + .run() + .await + .context("build")?; + + // Tests: keep parity with the previous script (run basic tests; project may have none). + let output = Cmd::new("cargo") + .args(["test", "--lib", "--bins"]) + .cwd(&root) + .output() + .await + .context("cargo test")?; + + if !output.status.success() { + return Err(anyhow!("cargo test failed")); + } + + // VSCode checks, only if pnpm exists + if root.join("vscode").is_dir() && crate::util::which("pnpm").is_some() { + let vscode = root.join("vscode"); + if !vscode.join("node_modules").is_dir() { + Cmd::new("pnpm") + .args(["install", "--frozen-lockfile"]) + .cwd(&vscode) + .run() + .await + .context("pnpm install")?; + } + if args.fix { + let _ = Cmd::new("pnpm") + .args(["prettier", "--write", "src"]) + .cwd(&vscode) + .run() + .await; + } else { + Cmd::new("pnpm") + .args(["prettier", "--check", "src"]) + .cwd(&vscode) + .run() + .await + .context("pnpm prettier")?; + } + Cmd::new("pnpm") + .args(["lint"]) + .cwd(&vscode) + .run() + .await + .context("pnpm lint")?; + Cmd::new("pnpm") + .args(["check-types"]) + .cwd(&vscode) + .run() + .await + .context("pnpm check-types")?; + } + + Ok(()) +} + +async fn check_stable_rust_min_version(root: &std::path::Path) -> Result<()> { + // Parity with `scripts/dev-checks.sh`: require stable rustc >= `.rust-version-stable`. + // This avoids surprising compiler errors when running release builds. + let pinned = + read_to_string(root.join(".rust-version-stable")).context("read .rust-version-stable")?; + let pinned = pinned.trim(); + if pinned.is_empty() { + return Ok(()); + } + + let output = Cmd::new("rustc") + .args(["--version"]) + .cwd(root) + .output() + .await + .context("rustc --version")?; + + let version_str = String::from_utf8_lossy(&output.stdout); + let mut it = version_str.split_whitespace(); + let _rustc = it.next(); + let current = it.next().unwrap_or("").trim(); + + if current.is_empty() { + return Err(anyhow!("could not parse rustc version from: {version_str}")); + } + + if compares_ge_semver(current, pinned) { + Ok(()) + } else { + Err(anyhow!( + "rustc {current} is below required stable {pinned} (from .rust-version-stable)" + )) + } +} + +fn compares_ge_semver(current: &str, required: &str) -> bool { + // Minimal semver (x.y.z) comparison. Keep it local to xtask to avoid adding deps. + // Accept inputs like `1.92.0` and `1.94.0-nightly`. + fn parse(v: &str) -> Option<(u64, u64, u64)> { + let v = v.split_once('-').map(|(a, _)| a).unwrap_or(v); + let mut it = v.split('.'); + Some(( + it.next()?.parse().ok()?, + it.next()?.parse().ok()?, + it.next()?.parse().ok()?, + )) + } + + let Some(c) = parse(current) else { + return false; + }; + let Some(r) = parse(required) else { + return false; + }; + c >= r +} diff --git a/crates/xtask/src/commands/mod.rs b/crates/xtask/src/commands/mod.rs new file mode 100644 index 00000000..4944db2e --- /dev/null +++ b/crates/xtask/src/commands/mod.rs @@ -0,0 +1,7 @@ +pub mod bench; +pub mod bump; +pub mod dev_checks; +pub mod nvim_tests; +pub mod security; +pub mod size_check; +pub mod toolchain; diff --git a/crates/xtask/src/commands/nvim_tests.rs b/crates/xtask/src/commands/nvim_tests.rs new file mode 100644 index 00000000..411cad57 --- /dev/null +++ b/crates/xtask/src/commands/nvim_tests.rs @@ -0,0 +1,51 @@ +use anyhow::{Context, Result, anyhow}; +use clap::Parser; + +use crate::util::{Cmd, ensure_tool, repo_root}; + +#[derive(Parser, Debug)] +#[command( + about = "Run Neovim integration tests", + long_about = "Runs the tests in `nvim-tests/` using a headless Neovim instance. + +Requirements: +- `nvim` must be installed and discoverable on PATH. + +This uses the MiniTest-based harness via `nvim-tests/minimal_init.lua`." +)] +pub struct Args {} + +pub async fn run(_args: Args) -> Result<()> { + ensure_tool("nvim")?; + let root = repo_root()?; + + let output = Cmd::new("nvim") + .args([ + "--headless", + "--noplugin", + "-u", + "./nvim-tests/minimal_init.lua", + "-c", + "lua MiniTest.run()", + "-c", + "qa", + ]) + .cwd(&root) + .output() + .await + .context("run nvim tests")?; + + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + print!("{stdout}"); + eprint!("{stderr}"); + + if output.status.success() + && (stdout.contains("Fails (0) and Notes (0)") + || stderr.contains("Fails (0) and Notes (0)")) + { + Ok(()) + } else { + Err(anyhow!("neovim tests failed")) + } +} diff --git a/crates/xtask/src/commands/security.rs b/crates/xtask/src/commands/security.rs new file mode 100644 index 00000000..54247231 --- /dev/null +++ b/crates/xtask/src/commands/security.rs @@ -0,0 +1,800 @@ +use anyhow::{Context, Result, anyhow}; +use clap::Parser; +use std::{ + fmt::Write as _, + path::{Path, PathBuf}, +}; + +use crate::util::{Cmd, OsKind, is_ci, os_kind, repo_root, sudo_install, which, write_string}; + +async fn instruments_available(root: &Path) -> bool { + if which("instruments").is_none() { + return false; + } + + // Match the shell script: instruments exists, but can be non-functional without Xcode setup. + Cmd::new("timeout") + .args(["10s", "instruments", "-help"]) + .cwd(root) + .output() + .await + .map(|o| o.status.success()) + .unwrap_or(false) +} + +#[derive(Parser, Debug)] +#[command( + about = "Run security-oriented checks", + long_about = "Runs a suite of security and correctness checks and writes logs to `security-logs/`. + +Modes: +- default: run configured checks and write a summary Markdown file +- `--check`: print tool availability and exit +- `--install`: try installing missing tools (interactive mode) +- `--ci`: force CI mode (enables auto-install + verbose output) + +Checks include: +- `cargo deny check` (unless `--no-deny`) +- `cargo shear` (optional) +- `cargo miri` (optional; runs tests under Miri) +- valgrind (optional; platform-dependent) + +In CI, this command can auto-install missing cargo tools and some OS packages." +)] +pub struct Args { + /// Only check tool availability and exit (no tests) + #[arg(long)] + check: bool, + + /// Install missing tools in interactive mode + #[arg(long)] + install: bool, + + /// Force CI mode (enables auto-install and verbose logging) + #[arg(long)] + ci: bool, + + /// Disable auto-installation (even in CI mode) + #[arg(long)] + no_auto_install: bool, + + /// Skip Miri tests + #[arg(long = "no-miri")] + no_miri: bool, + + /// Skip valgrind checks + #[arg(long = "no-valgrind")] + no_valgrind: bool, + + /// Force-enable Valgrind even on unsupported platforms (e.g. macOS) + #[arg(long = "force-valgrind")] + force_valgrind: bool, + + /// Skip dependency vulnerabilities check (cargo-deny) + #[arg(long = "no-deny")] + no_deny: bool, + + /// Skip unused dependency scan (cargo-shear) + #[arg(long = "no-shear")] + no_shear: bool, + + /// Skip macOS Instruments checks + #[arg(long = "no-instruments")] + no_instruments: bool, + + /// Force-enable Instruments checks on macOS + #[arg(long = "force-instruments")] + force_instruments: bool, + + /// Override MIRIFLAGS (default matches legacy script) + #[arg( + long, + value_name = "FLAGS", + default_value = "-Zmiri-disable-isolation -Zmiri-permissive-provenance" + )] + miri_flags: String, + + /// Override RUSTFLAGS for Miri (default matches legacy script) + #[arg(long, value_name = "FLAGS", default_value = "--cfg miri")] + miri_rustflags: String, +} + +pub async fn run(args: Args) -> Result<()> { + let root = repo_root()?; + let logs_dir = root.join("security-logs"); + + let ci_mode = args.ci || is_ci(); + let auto_install = !args.no_auto_install && (args.install || ci_mode); + + // Keep flags for CLI parity with the legacy script. + let _ = args.no_instruments; + + if ci_mode { + eprintln!( + "CI detected (auto-install: {})", + if auto_install { "enabled" } else { "disabled" } + ); + } + + if args.install && !auto_install { + // This can happen if `--install` and `--no-auto-install` are both set. + return Err(anyhow!("--install conflicts with --no-auto-install")); + } + + // Keep parity with the shell scripts: require stable rustc >= .rust-version-stable. + check_stable_rust_min_version(&root).await?; + + // Auto-configure defaults based on platform (mirrors scripts/security.sh). + // + // Rule of thumb: + // - explicit user flags win (e.g. `--no-*`), unless the user also `--force-*` + // - "force" only affects auto-config defaults; it doesn't bypass missing tools + let mut args = args; + match os_kind() { + OsKind::Linux => { + // Linux: keep default behavior (Miri + Valgrind are allowed). + } + OsKind::Macos => { + // macOS: legacy script disabled valgrind, instruments, and TSAN by default. + if !args.force_valgrind { + args.no_valgrind = true; + } + + // Instruments exists only on macOS, but is off by default in the script. + if !args.force_instruments { + args.no_instruments = true; + } + } + _ => { + // Unknown platform: be conservative. + if !args.force_valgrind { + args.no_valgrind = true; + } + if !args.force_instruments { + args.no_instruments = true; + } + + // Also disable nightly-dependent features on unknown platforms. + args.no_miri = true; + } + } + + // Apply force overrides last (so they reliably undo auto-config). + if args.force_valgrind { + args.no_valgrind = false; + } + if args.force_instruments { + args.no_instruments = false; + } + + if args.check { + print_tool_status(&root, ci_mode).await?; + return Ok(()); + } + + println!("RustOwl Security & Memory Safety Testing"); + println!("========================================="); + println!(); + + let mut summary = String::new(); + writeln!(&mut summary, "# Security Testing Summary")?; + writeln!(&mut summary)?; + writeln!(&mut summary, "Generated by `cargo xtask security`.")?; + writeln!(&mut summary)?; + + let mut overall_ok = true; + + // cargo-deny always runs unless explicitly disabled. + // CI policy: security.yml passes `--no-deny` to avoid duplicate cost. + if !args.no_deny { + ensure_cargo_tool("cargo-deny", "cargo-deny", auto_install).await?; + println!("\n== cargo-deny =="); + let (ok, out) = run_and_capture( + &root, + "cargo deny check", + Cmd::new("cargo").args(["deny", "check"]).cwd(&root), + ) + .await; + write_string(logs_dir.join("cargo-deny.log"), &out)?; + overall_ok &= ok; + append_step( + &mut summary, + "cargo deny", + ok, + Some("security-logs/cargo-deny.log"), + ); + } else { + append_step(&mut summary, "cargo deny", true, Some("skipped")); + } + + if !args.no_shear { + // `cargo shear` is used to detect unused dependencies. + ensure_cargo_tool("cargo-shear", "cargo-shear", auto_install).await?; + println!("\n== cargo-shear =="); + let (ok, out) = run_and_capture( + &root, + "cargo shear", + Cmd::new("cargo").args(["shear"]).cwd(&root), + ) + .await; + write_string(logs_dir.join("cargo-shear.log"), &out)?; + overall_ok &= ok; + append_step( + &mut summary, + "cargo shear", + ok, + Some("security-logs/cargo-shear.log"), + ); + } else { + append_step(&mut summary, "cargo shear", true, Some("skipped")); + } + + // We don't run nextest by default in security. We still ensure it's installed because Miri can + // use it as a faster test runner (via `cargo miri nextest`). + ensure_cargo_tool("cargo-nextest", "cargo-nextest", auto_install).await?; + append_step( + &mut summary, + "cargo nextest", + true, + Some("available (used by miri)"), + ); + + if !args.no_miri { + // Miri requires nightly. + ensure_miri(auto_install).await?; + + println!("\n== miri =="); + // Phase 1: unit tests under Miri. + // Legacy script: use `miri nextest` when available, else fall back to `miri test`. + let (ok_unit, out_unit) = { + let nextest_available = Cmd::new("cargo") + .args(["nextest", "--version"]) + .cwd(&root) + .output() + .await + .map(|o| o.status.success()) + .unwrap_or(false); + + if nextest_available { + run_and_capture( + &root, + "miri unit tests (nextest)", + Cmd::new("cargo") + .args([ + "xtask", + "toolchain", + "rustup", + "run", + "nightly", + "cargo", + "miri", + "nextest", + "run", + "--lib", + "-p", + "rustowl", + ]) + .cwd(&root) + .env("MIRIFLAGS", &args.miri_flags) + .env("RUSTFLAGS", &args.miri_rustflags), + ) + .await + } else { + run_and_capture( + &root, + "miri unit tests (cargo test)", + Cmd::new("cargo") + .args([ + "xtask", + "toolchain", + "rustup", + "run", + "nightly", + "cargo", + "miri", + "test", + "--lib", + "-p", + "rustowl", + ]) + .cwd(&root) + .env("MIRIFLAGS", &args.miri_flags) + .env("RUSTFLAGS", &args.miri_rustflags), + ) + .await + } + }; + write_string(logs_dir.join("miri_unit_tests.log"), &out_unit)?; + overall_ok &= ok_unit; + append_step( + &mut summary, + "miri unit tests", + ok_unit, + Some("security-logs/miri_unit_tests.log"), + ); + + append_step( + &mut summary, + "miri rustowl run", + true, + Some("skipped (removed; proc-spawn makes it unreliable)"), + ); + } else { + append_step(&mut summary, "miri", true, Some("skipped")); + } + + if !args.no_instruments { + if os_kind() != OsKind::Macos { + append_step( + &mut summary, + "instruments", + true, + Some("skipped (non-macOS)"), + ); + } else if !instruments_available(&root).await { + append_step( + &mut summary, + "instruments", + false, + Some("missing or not functional; try Xcode setup"), + ); + overall_ok = false; + } else { + // Minimal sanity check: ensure `instruments -help` works. + let (ok, out) = run_and_capture( + &root, + "instruments -help", + Cmd::new("timeout") + .args(["10s", "instruments", "-help"]) + .cwd(&root), + ) + .await; + write_string(logs_dir.join("instruments.log"), &out)?; + overall_ok &= ok; + append_step( + &mut summary, + "instruments", + ok, + Some("security-logs/instruments.log"), + ); + } + } else { + append_step(&mut summary, "instruments", true, Some("skipped")); + } + + // Legacy script behavior: valgrind is only considered on Linux, unless forced. + if !args.no_valgrind && (args.force_valgrind || os_kind() == OsKind::Linux) { + ensure_valgrind(auto_install).await?; + + println!("\n== valgrind =="); + let (build_ok, build_out) = run_and_capture( + &root, + "build rustowl (system allocator)", + Cmd::new("cargo") + .args([ + "xtask", + "toolchain", + "cargo", + "build", + "--release", + "--no-default-features", + "-p", + "rustowl", + ]) + .cwd(&root), + ) + .await; + write_string(logs_dir.join("build-rustowl.log"), &build_out)?; + overall_ok &= build_ok; + append_step( + &mut summary, + "build rustowl (release, system allocator)", + build_ok, + Some("security-logs/build-rustowl.log"), + ); + + let bin = if root.join("target/release/rustowl.exe").is_file() { + "./target/release/rustowl.exe" + } else { + "./target/release/rustowl" + }; + + let suppressions_path = root.join(".valgrind-suppressions"); + let suppressions = if suppressions_path.is_file() { + Some(".valgrind-suppressions") + } else { + None + }; + + let mut args = vec![ + "--tool=memcheck", + "--leak-check=full", + "--show-leak-kinds=all", + "--track-origins=yes", + ]; + let suppressions_flag; + if let Some(s) = suppressions { + suppressions_flag = format!("--suppressions={s}"); + args.push(&suppressions_flag); + } + args.push(bin); + if root.join("./perf-tests/dummy-package").is_dir() { + args.push("check"); + args.push("./perf-tests/dummy-package"); + } else { + args.push("--help"); + } + + let (ok, out) = run_and_capture( + &root, + "valgrind", + Cmd::new("valgrind") + .args(args) + .cwd(&root) + .env("RUST_BACKTRACE", "1"), + ) + .await; + write_string(logs_dir.join("valgrind.log"), &out)?; + + // Valgrind output is useful, but the exit code can vary by configuration. + // Use the log as the source of truth. + append_step( + &mut summary, + "valgrind", + ok, + Some("security-logs/valgrind.log"), + ); + + // Keep overall status independent of valgrind step. + } else { + append_step(&mut summary, "valgrind", true, Some("skipped")); + } + + let summary_name = format!("security_summary_{}.md", timestamp()); + let summary_path = logs_dir.join(summary_name); + write_string(&summary_path, &summary)?; + + if !overall_ok { + return Err(anyhow!( + "one or more security checks failed; see {}", + summary_path.display() + )); + } + + Ok(()) +} + +fn append_step(summary: &mut String, name: &str, ok: bool, log: Option<&str>) { + let _ = writeln!(summary, "## {name}"); + let _ = writeln!(summary, "- status: {}", if ok { "ok" } else { "failed" }); + if let Some(log) = log { + let _ = writeln!(summary, "- log: {log}"); + } + let _ = writeln!(summary); +} + +async fn run_and_capture(root: &Path, name: &str, cmd: Cmd) -> (bool, String) { + println!("Running: {name}"); + + match cmd.output().await { + Ok(out) => { + let mut s = String::new(); + s.push_str("stdout:\n"); + s.push_str(&String::from_utf8_lossy(&out.stdout)); + s.push_str("\n\nstderr:\n"); + s.push_str(&String::from_utf8_lossy(&out.stderr)); + s.push('\n'); + ( + out.status.success(), + format!("cwd: {}\n\n{}", root.display(), s), + ) + } + Err(err) => (false, format!("cwd: {}\nerror: {err:#}\n", root.display())), + } +} + +fn timestamp() -> String { + use std::time::{SystemTime, UNIX_EPOCH}; + let secs = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_secs(); + secs.to_string() +} + +async fn print_tool_status(root: &PathBuf, ci_mode: bool) -> Result<()> { + let host = os_kind(); + + println!("Tool Availability Summary"); + println!("================================"); + println!(); + + println!("platform: {:?}", host); + println!("ci: {}", ci_mode); + println!(); + + let cargo_deny = which("cargo-deny").is_some(); + let cargo_shear = which("cargo-shear").is_some(); + let cargo_nextest = Cmd::new("cargo") + .args(["nextest", "--version"]) + .cwd(root) + .output() + .await + .map(|o| o.status.success()) + .unwrap_or(false); + + let has_miri = Cmd::new("rustup") + .args(["component", "list", "--installed"]) + .output() + .await + .map(|out| String::from_utf8_lossy(&out.stdout).contains("miri")) + .unwrap_or(false); + + let has_valgrind = which("valgrind").is_some(); + let has_instruments = if host == OsKind::Macos { + instruments_available(root).await + } else { + false + }; + + println!("Security Tools:"); + println!( + " cargo-deny: {}", + if cargo_deny { "yes" } else { "no" } + ); + println!( + " cargo-shear: {}", + if cargo_shear { "yes" } else { "no" } + ); + println!( + " cargo-nextest: {}", + if cargo_nextest { "yes" } else { "no" } + ); + println!(" miri component: {}", if has_miri { "yes" } else { "no" }); + if host == OsKind::Linux { + println!( + " valgrind: {}", + if has_valgrind { "yes" } else { "no" } + ); + } + if host == OsKind::Macos { + println!( + " instruments: {}", + if has_instruments { "yes" } else { "no" } + ); + } + + println!(); + + let active_toolchain = Cmd::new("rustup") + .args(["show", "active-toolchain"]) + .cwd(root) + .output() + .await + .ok() + .map(|o| String::from_utf8_lossy(&o.stdout).to_string()) + .unwrap_or_else(|| "unknown".to_string()); + let active_toolchain = active_toolchain + .split_whitespace() + .next() + .unwrap_or("unknown"); + + println!("Advanced Features:"); + if active_toolchain.contains("nightly") { + println!(" nightly toolchain: yes ({active_toolchain})"); + } else { + println!(" nightly toolchain: no ({active_toolchain})"); + } + + println!(); + println!("Defaults (after auto-config):"); + + // Re-run the same auto-config logic used by `run()` so `--check` output matches. + let mut defaults = Args { + check: false, + install: false, + ci: ci_mode, + no_auto_install: false, + no_miri: false, + no_valgrind: false, + force_valgrind: false, + no_deny: false, + no_shear: false, + no_instruments: false, + force_instruments: false, + miri_flags: "-Zmiri-disable-isolation -Zmiri-permissive-provenance".to_string(), + miri_rustflags: "--cfg miri".to_string(), + }; + + match host { + OsKind::Linux => { + // Linux keeps everything enabled by default. + } + OsKind::Macos => { + defaults.no_valgrind = true; + defaults.no_instruments = true; + } + _ => { + defaults.no_valgrind = true; + defaults.no_instruments = true; + defaults.no_miri = true; + } + } + + println!( + " deny: {}", + if defaults.no_deny { "off" } else { "on" } + ); + println!( + " shear: {}", + if defaults.no_shear { "off" } else { "on" } + ); + println!( + " miri: {}", + if defaults.no_miri { "off" } else { "on" } + ); + println!( + " valgrind: {}", + if defaults.no_valgrind { "off" } else { "on" } + ); + println!( + " instruments: {}", + if defaults.no_instruments { "off" } else { "on" } + ); + + Ok(()) +} + +async fn check_stable_rust_min_version(root: &PathBuf) -> Result<()> { + let pinned = crate::util::read_to_string(root.join(".rust-version-stable"))?; + let pinned = pinned.trim(); + if pinned.is_empty() { + return Ok(()); + } + + let output = Cmd::new("rustc") + .args(["--version"]) + .cwd(root) + .output() + .await?; + let stdout = String::from_utf8_lossy(&output.stdout); + let current = stdout.split_whitespace().nth(1).unwrap_or("").trim(); + if current.is_empty() { + return Err(anyhow!("could not parse rustc version from: {stdout}")); + } + + if compares_ge_semver(current, pinned) { + Ok(()) + } else { + Err(anyhow!( + "rustc {current} is below required stable {pinned} (from .rust-version-stable)" + )) + } +} + +fn compares_ge_semver(current: &str, required: &str) -> bool { + fn parse(v: &str) -> Option<(u64, u64, u64)> { + let v = v.split_once('-').map(|(a, _)| a).unwrap_or(v); + let mut it = v.split('.'); + Some(( + it.next()?.parse().ok()?, + it.next()?.parse().ok()?, + it.next()?.parse().ok()?, + )) + } + + let Some(c) = parse(current) else { + return false; + }; + let Some(r) = parse(required) else { + return false; + }; + c >= r +} + +async fn ensure_cargo_tool(bin: &str, crate_name: &str, auto_install: bool) -> Result<()> { + if which(bin).is_some() { + return Ok(()); + } + + if !auto_install { + return Err(anyhow!( + "required tool `{bin}` not found; install it with `cargo binstall {crate_name}` (recommended) or `cargo install {crate_name}`" + )); + } + + ensure_cargo_binstall().await?; + + // Prefer binstall so CI doesn't build crates from source. + if let Err(err) = Cmd::new("cargo") + .args(["binstall", "-y", crate_name]) + .run() + .await + { + // Fall back to source install if no prebuilt package is available. + eprintln!("[security] cargo binstall failed for {crate_name}: {err:#}"); + Cmd::new("cargo") + .args(["install", crate_name]) + .run() + .await + .with_context(|| format!("install {crate_name}"))?; + } + + if which(bin).is_none() { + return Err(anyhow!("tool {bin} still not found after install")); + } + + Ok(()) +} + +async fn ensure_cargo_binstall() -> Result<()> { + if Cmd::new("cargo") + .args(["binstall", "--version"]) + .output() + .await + .map(|o| o.status.success()) + .unwrap_or(false) + { + return Ok(()); + } + + // Using `cargo install` here is fine: this happens once, and enables fast installs for other tools. + Cmd::new("cargo") + .args(["install", "cargo-binstall"]) + .run() + .await + .context("install cargo-binstall")?; + + Ok(()) +} + +async fn ensure_miri(auto_install: bool) -> Result<()> { + // If it's already installed, keep this cheap. + let installed = Cmd::new("rustup") + .args(["component", "list", "--installed"]) + .output() + .await + .map(|out| String::from_utf8_lossy(&out.stdout).contains("miri")) + .unwrap_or(false); + + if installed { + return Ok(()); + } + + if !auto_install { + return Err(anyhow!( + "miri component is not installed; run `rustup component add miri --toolchain nightly`" + )); + } + + Cmd::new("rustup") + .args(["component", "add", "miri", "--toolchain", "nightly"]) + .run() + .await + .context("rustup component add miri")?; + Ok(()) +} + +async fn ensure_valgrind(auto_install: bool) -> Result<()> { + if which("valgrind").is_some() { + return Ok(()); + } + + if !auto_install { + return Err(anyhow!( + "valgrind not found; install it via your system package manager" + )); + } + + match os_kind() { + OsKind::Linux => sudo_install(&["valgrind"]).await?, + // The legacy script attempted this, but valgrind is generally unreliable on macOS. + // We keep the behavior behind `auto_install` for parity. + OsKind::Macos => sudo_install(&["valgrind"]).await?, + _ => return Err(anyhow!("valgrind unsupported on this OS")), + } + if which("valgrind").is_none() { + return Err(anyhow!("valgrind not found after install")); + } + Ok(()) +} diff --git a/crates/xtask/src/commands/size_check.rs b/crates/xtask/src/commands/size_check.rs new file mode 100644 index 00000000..ed9c9c90 --- /dev/null +++ b/crates/xtask/src/commands/size_check.rs @@ -0,0 +1,201 @@ +use anyhow::{Context, Result, anyhow}; +use clap::{Parser, Subcommand}; +use std::path::PathBuf; + +use jiff::{Unit, Zoned}; + +use crate::util::{Cmd, format_bytes, percent_change, repo_root, write_string}; + +const DEFAULT_THRESHOLD_PCT: f64 = 10.0; + +#[derive(Parser, Debug)] +#[command( + about = "Track release binary sizes and regressions", + long_about = "Builds release binaries (via `xtask toolchain`) and reports their sizes. + +Subcommands: +- check (default): print current sizes +- baseline: write `baselines/size_baseline.txt` +- compare: compare current sizes to baseline and fail if over threshold +- clean: remove the baseline file", + args_conflicts_with_subcommands = false, + subcommand_precedence_over_arg = false +)] +pub struct Args { + /// Subcommand to run (defaults to `check`) + #[command(subcommand)] + command: Option, + + /// Fail if size increases beyond this percent (compare mode) + #[arg(short, long, default_value_t = DEFAULT_THRESHOLD_PCT)] + threshold: f64, +} + +#[derive(Subcommand, Debug, Clone, Copy)] +enum Command { + /// Print current release binary sizes + Check(VerbosityArgs), + + /// Write `baselines/size_baseline.txt` from current sizes + Baseline(VerbosityArgs), + + /// Compare current sizes to the baseline + Compare(VerbosityArgs), + + /// Remove the baseline file + Clean, +} + +#[derive(Parser, Debug, Clone, Copy)] +struct VerbosityArgs { + /// Show a more verbose, table-style output + #[arg(short, long)] + verbose: bool, +} + +pub async fn run(args: Args) -> Result<()> { + let root = repo_root()?; + let baseline_path = root.join("baselines/size_baseline.txt"); + + match args + .command + .unwrap_or(Command::Check(VerbosityArgs { verbose: false })) + { + Command::Check(verbosity) => { + let sizes = ensure_built_and_get_sizes(&root).await?; + if verbosity.verbose { + print_size_table(&sizes); + } else { + for (name, bytes) in &sizes { + println!("{name}: {bytes} ({})", format_bytes(*bytes)); + } + } + } + Command::Baseline(verbosity) => { + let sizes = ensure_built_and_get_sizes(&root).await?; + let mut out = String::new(); + out.push_str("# RustOwl Binary Size Baseline\n"); + out.push_str(&format!("# Generated on {}\n", timestamp_utc())); + out.push_str("# Format: binary_name:size_in_bytes\n"); + for (name, bytes) in &sizes { + out.push_str(&format!("{name}:{bytes}\n")); + } + write_string(&baseline_path, &out)?; + println!("Wrote baseline: {}", baseline_path.display()); + if verbosity.verbose { + print_size_table(&sizes); + } + } + Command::Clean => { + if baseline_path.is_file() { + std::fs::remove_file(&baseline_path) + .with_context(|| format!("remove {}", baseline_path.display()))?; + } + } + Command::Compare(verbosity) => { + let baseline = read_baseline(&baseline_path)?; + let current = ensure_built_and_get_sizes(&root).await?; + + let mut failed = false; + for (name, cur) in ¤t { + let Some(base) = baseline.get(name) else { + eprintln!("warning: no baseline for {name}"); + continue; + }; + let change = percent_change(*base as f64, *cur as f64); + let diff = *cur as i64 - *base as i64; + let diff_str = if diff >= 0 { + format!("+{}", format_bytes(diff as u64)) + } else { + format!("-{}", format_bytes((-diff) as u64)) + }; + match change { + None => println!("{name}: baseline 0, current {cur}"), + Some(pct) => { + println!( + "{name}: {} -> {} ({diff_str}, {pct:.1}%)", + format_bytes(*base), + format_bytes(*cur) + ); + if pct > args.threshold { + failed = true; + } + } + } + } + + if failed { + return Err(anyhow!("binary size regression beyond threshold")); + } + + if verbosity.verbose { + print_size_table(¤t); + } + } + } + + Ok(()) +} + +async fn ensure_built_and_get_sizes(root: &PathBuf) -> Result> { + let bins = [ + ("rustowl".to_string(), root.join("target/release/rustowl")), + ("rustowlc".to_string(), root.join("target/release/rustowlc")), + ]; + + let need_build = bins.iter().any(|(_, p)| !p.is_file()); + if need_build { + Cmd::new("cargo") + .args(["xtask", "toolchain", "cargo", "build", "--release"]) + .cwd(root) + .run() + .await + .context("build release")?; + } + + bins.into_iter() + .map(|(name, path)| { + let size = std::fs::metadata(&path) + .with_context(|| format!("metadata {}", path.display()))? + .len(); + Ok((name, size)) + }) + .collect() +} + +fn print_size_table(sizes: &[(String, u64)]) { + println!("\n{:<20} {:>12} {:>12}", "Binary", "Bytes", "Formatted"); + println!("{:<20} {:>12} {:>12}", "------", "-----", "---------"); + for (name, bytes) in sizes { + println!("{:<20} {:>12} {:>12}", name, bytes, format_bytes(*bytes)); + } + println!(); +} + +fn timestamp_utc() -> String { + Zoned::now() + .in_tz("UTC") + .ok() + .and_then(|z| z.round(Unit::Second).ok()) + .map(|z| z.strftime("%Y-%m-%d %H:%M:%S UTC").to_string()) + .unwrap_or_else(|| "unknown".to_string()) +} + +fn read_baseline(path: &PathBuf) -> Result> { + let content = std::fs::read_to_string(path) + .with_context(|| format!("read baseline {}", path.display()))?; + let mut map = std::collections::HashMap::new(); + for line in content.lines() { + let line = line.trim(); + if line.is_empty() || line.starts_with('#') { + continue; + } + let Some((name, size)) = line.split_once(':') else { + continue; + }; + if let Ok(parsed) = size.trim().parse::() { + map.insert(name.trim().to_string(), parsed); + } + } + Ok(map) +} diff --git a/crates/xtask/src/commands/toolchain.rs b/crates/xtask/src/commands/toolchain.rs new file mode 100644 index 00000000..cfeaaa1a --- /dev/null +++ b/crates/xtask/src/commands/toolchain.rs @@ -0,0 +1,236 @@ +use anyhow::{Context, Result, anyhow}; +use clap::Parser; +use flate2::read::GzDecoder; +use std::{ + ffi::OsString, + path::{Path, PathBuf}, +}; +use tar::Archive; +use tempfile::TempDir; + +use crate::util::{Cmd, read_to_string, repo_root, which}; + +#[derive(Parser, Debug)] +#[command( + about = "Run a command using RustOwl's pinned toolchain/sysroot", + long_about = "Runs any command with RustOwl's pinned Rust toolchain available on PATH. + +This command downloads a minimal sysroot (rustc, rust-std, cargo, rustc-dev, llvm-tools) +into `~/.rustowl/sysroot/-/` (or `$SYSROOT` if set) and then executes +the requested command with that sysroot's `bin/` prepended to PATH. + +Common usage is wrapping `cargo` so CI and local tooling use the same compiler bits. + +Examples: + cargo xtask toolchain cargo build --release + cargo xtask toolchain cargo test -p rustowl + cargo xtask toolchain cargo +nightly miri test -p rustowl" +)] +pub struct Args { + /// Command (and args) to execute under the RustOwl sysroot + #[arg(trailing_var_arg = true, required = true, value_name = "CMD")] + cmd: Vec, +} + +pub async fn run(args: Args) -> Result<()> { + let root = repo_root()?; + + let channel = read_toolchain_channel(&root)?; + let host = host_tuple()?; + let toolchain = format!("{}-{}", channel, host); + + let sysroot = match std::env::var_os("SYSROOT") { + Some(s) => PathBuf::from(s), + None => { + let home = std::env::var_os("HOME").ok_or_else(|| anyhow!("HOME not set"))?; + PathBuf::from(home) + .join(".rustowl/sysroot") + .join(&toolchain) + } + }; + + ensure_sysroot(&sysroot, &toolchain).await?; + + let mut iter = args.cmd.into_iter(); + let program = iter + .next() + .ok_or_else(|| anyhow!("missing command"))? + .to_string_lossy() + .to_string(); + let cmd_args: Vec = iter.map(|s| s.to_string_lossy().to_string()).collect(); + + let path = sysroot.join("bin"); + let existing = std::env::var("PATH").unwrap_or_default(); + let new_path = format!("{}:{}", path.display(), existing); + + Cmd::new(program) + .args(cmd_args) + .cwd(&root) + .env("PATH", new_path) + .env("RUSTC_BOOTSTRAP", "rustowlc") + .run() + .await +} + +fn read_toolchain_channel(root: &Path) -> Result { + let pinned_stable = root.join(".rust-version-stable"); + if pinned_stable.is_file() { + return Ok(read_to_string(&pinned_stable)? + .lines() + .next() + .unwrap_or("") + .trim() + .to_string()); + } + + Err(anyhow!( + "could not locate pinned stable toolchain version (expected .rust-version-stable)" + )) +} + +fn host_tuple() -> Result { + let os = if cfg!(target_os = "linux") { + "unknown-linux-gnu" + } else if cfg!(target_os = "macos") { + "apple-darwin" + } else if cfg!(target_os = "windows") { + "pc-windows-msvc" + } else { + return Err(anyhow!("unsupported OS")); + }; + + let hint = std::env::var("RUNNER_ARCH") + .ok() + .or_else(|| std::env::var("PROCESSOR_ARCHITEW6432").ok()) + .or_else(|| std::env::var("PROCESSOR_ARCHITECTURE").ok()) + .or_else(|| std::env::var("MSYSTEM_CARCH").ok()); + + let arch = match hint.as_deref() { + Some("ARM64") | Some("arm64") | Some("aarch64") => "aarch64", + Some("AMD64") | Some("X64") | Some("amd64") | Some("x86_64") => "x86_64", + _ => { + let arch = std::env::consts::ARCH; + match arch { + "aarch64" => "aarch64", + "x86_64" => "x86_64", + other => return Err(anyhow!("unsupported architecture: {other}")), + } + } + }; + + Ok(format!("{arch}-{os}")) +} + +async fn ensure_sysroot(sysroot: &Path, toolchain: &str) -> Result<()> { + if sysroot.is_dir() { + return Ok(()); + } + + std::fs::create_dir_all(sysroot) + .with_context(|| format!("create sysroot {}", sysroot.display()))?; + + let components = ["rustc", "rust-std", "cargo", "rustc-dev", "llvm-tools"]; + + // Download/install in parallel (matches legacy shell script behavior). + let mut tasks = Vec::new(); + for component in components { + tasks.push(tokio::spawn(install_component( + component, + sysroot.to_path_buf(), + toolchain.to_string(), + ))); + } + + for t in tasks { + t.await.context("join toolchain installer")??; + } + + Ok(()) +} + +async fn install_component(component: &str, sysroot: PathBuf, toolchain: String) -> Result<()> { + let dist_base = "https://static.rust-lang.org/dist"; + let url = format!("{dist_base}/{component}-{toolchain}.tar.gz"); + eprintln!("Downloading {url}"); + + let resp = reqwest::get(&url) + .await + .with_context(|| format!("GET {url}"))?; + + if resp.status() == reqwest::StatusCode::NOT_FOUND { + return Err(anyhow!( + "toolchain artifact not found (404): {url}\n\ +This usually means the pinned nightly ({toolchain}) is no longer available on static.rust-lang.org (cleanup/retention).\n\ +Fix by updating `rust-toolchain.toml` to an existing nightly date or set `$SYSROOT` to a pre-downloaded sysroot." + )); + } + + let bytes = resp + .error_for_status() + .with_context(|| format!("HTTP {url}"))? + .bytes() + .await + .with_context(|| format!("read body {url}"))?; + + let temp = TempDir::new().context("tempdir")?; + let tar = GzDecoder::new(bytes.as_ref()); + let mut archive = Archive::new(tar); + archive.unpack(temp.path()).context("unpack")?; + + let component_dir = format!("{component}-{toolchain}"); + let base = temp.path().join(&component_dir); + let components_file = base.join("components"); + let comps = std::fs::read_to_string(&components_file) + .with_context(|| format!("read {}", components_file.display()))?; + + for entry in comps.lines().filter(|l| !l.trim().is_empty()) { + let com_base = base.join(entry.trim()); + let files_dir = com_base; + if !files_dir.is_dir() { + continue; + } + // Mirror the old script: move all files into sysroot. + for path in walk_files(&files_dir)? { + let rel = path.strip_prefix(&files_dir).unwrap(); + let dest = sysroot.join(rel); + if let Some(p) = dest.parent() { + std::fs::create_dir_all(p).with_context(|| format!("mkdir {}", p.display()))?; + } + std::fs::rename(&path, &dest).or_else(|_| { + std::fs::copy(&path, &dest) + .map(|_| ()) + .with_context(|| format!("copy {}", path.display())) + })?; + } + } + + Ok(()) +} + +fn walk_files(dir: &Path) -> Result> { + let mut out = Vec::new(); + walk_files_inner(dir, &mut out)?; + Ok(out) +} + +fn walk_files_inner(dir: &Path, out: &mut Vec) -> Result<()> { + for entry in std::fs::read_dir(dir).with_context(|| format!("read_dir {}", dir.display()))? { + let entry = entry.context("read_dir entry")?; + let path = entry.path(); + let ty = entry.file_type().context("file_type")?; + if ty.is_dir() { + walk_files_inner(&path, out)?; + } else if ty.is_file() { + out.push(path); + } + } + Ok(()) +} + +#[allow(dead_code)] +fn ensure_git() -> Result<()> { + if which("git").is_none() { + return Err(anyhow!("git not found")); + } + Ok(()) +} diff --git a/crates/xtask/src/main.rs b/crates/xtask/src/main.rs new file mode 100644 index 00000000..115204d9 --- /dev/null +++ b/crates/xtask/src/main.rs @@ -0,0 +1,54 @@ +use anyhow::{Context, Result}; +use clap::{Parser, Subcommand}; + +mod commands; +mod util; + +#[derive(Parser, Debug)] +#[command(author, version, about = "Project maintenance commands")] +#[command(propagate_version = true)] +#[command(disable_version_flag = true)] +struct Cli { + #[command(subcommand)] + command: Command, +} + +#[derive(Subcommand, Debug)] +enum Command { + /// Run a command under a pinned Rust sysroot + Toolchain(commands::toolchain::Args), + + /// Run formatting, linting, build and smoke tests + DevChecks(commands::dev_checks::Args), + + /// Track release binary sizes and regressions + SizeCheck(commands::size_check::Args), + + /// Run Neovim-based integration tests + NvimTests(commands::nvim_tests::Args), + + /// Prepare a release tag and bump versions + Bump(commands::bump::Args), + + /// Run performance benchmarks and compare baselines + Bench(commands::bench::Args), + + /// Run security-oriented checks (audit, miri, etc.) + Security(commands::security::Args), +} + +#[tokio::main] +async fn main() -> Result<()> { + let cli = Cli::parse(); + + match cli.command { + Command::Toolchain(args) => commands::toolchain::run(args).await, + Command::DevChecks(args) => commands::dev_checks::run(args).await, + Command::SizeCheck(args) => commands::size_check::run(args).await, + Command::NvimTests(args) => commands::nvim_tests::run(args).await, + Command::Bump(args) => commands::bump::run(args).await, + Command::Bench(args) => commands::bench::run(args).await, + Command::Security(args) => commands::security::run(args).await, + } + .context("xtask failed") +} diff --git a/crates/xtask/src/util.rs b/crates/xtask/src/util.rs new file mode 100644 index 00000000..c9a2a498 --- /dev/null +++ b/crates/xtask/src/util.rs @@ -0,0 +1,301 @@ +use anyhow::{Context, Result, anyhow}; +use std::{ + ffi::OsStr, + path::{Path, PathBuf}, + process::Stdio, +}; +use tokio::process::Command; + +pub fn repo_root() -> Result { + let mut dir = std::env::current_dir().context("get current dir")?; + loop { + if dir.join("Cargo.toml").is_file() && dir.join("crates").is_dir() { + return Ok(dir); + } + if !dir.pop() { + break; + } + } + Err(anyhow!("could not locate repo root")) +} + +pub fn is_ci() -> bool { + std::env::var_os("CI").is_some() || std::env::var_os("GITHUB_ACTIONS").is_some() +} + +pub fn which>(tool: S) -> Option { + let tool = tool.as_ref(); + let paths = std::env::var_os("PATH")?; + for path in std::env::split_paths(&paths) { + let candidate = path.join(tool); + if candidate.is_file() { + return Some(candidate); + } + #[cfg(windows)] + { + let candidate_exe = path.join(format!("{}.exe", tool.to_string_lossy())); + if candidate_exe.is_file() { + return Some(candidate_exe); + } + } + } + None +} + +#[derive(Clone, Debug)] +pub struct Cmd { + pub program: String, + pub args: Vec, + pub cwd: Option, + pub env: Vec<(String, String)>, +} + +impl Cmd { + pub fn new(program: impl Into) -> Self { + Self { + program: program.into(), + args: Vec::new(), + cwd: None, + env: Vec::new(), + } + } + + pub fn arg(mut self, arg: impl Into) -> Self { + self.args.push(arg.into()); + self + } + + pub fn args(mut self, args: I) -> Self + where + I: IntoIterator, + S: Into, + { + self.args.extend(args.into_iter().map(Into::into)); + self + } + + pub fn cwd(mut self, cwd: impl Into) -> Self { + self.cwd = Some(cwd.into()); + self + } + + pub fn env(mut self, key: impl Into, value: impl Into) -> Self { + self.env.push((key.into(), value.into())); + self + } + + pub async fn run(self) -> Result<()> { + run_cmd(self, false).await + } + + pub async fn output(self) -> Result { + let mut cmd = Command::new(&self.program); + cmd.args(&self.args); + if let Some(cwd) = &self.cwd { + cmd.current_dir(cwd); + } + for (k, v) in &self.env { + cmd.env(k, v); + } + cmd.stdin(Stdio::null()); + cmd.output() + .await + .with_context(|| format!("run {}", display_cmd(&self.program, &self.args))) + } +} + +async fn run_cmd(cmd: Cmd, quiet: bool) -> Result<()> { + let mut c = Command::new(&cmd.program); + c.args(&cmd.args); + if let Some(cwd) = &cmd.cwd { + c.current_dir(cwd); + } + for (k, v) in &cmd.env { + c.env(k, v); + } + c.stdin(Stdio::null()); + + if quiet { + c.stdout(Stdio::null()); + c.stderr(Stdio::null()); + } else { + c.stdout(Stdio::inherit()); + c.stderr(Stdio::inherit()); + } + + let status = c + .status() + .await + .with_context(|| format!("run {}", display_cmd(&cmd.program, &cmd.args)))?; + if !status.success() { + return Err(anyhow!( + "command failed ({}): {}", + status, + display_cmd(&cmd.program, &cmd.args) + )); + } + Ok(()) +} + +pub fn display_cmd(program: &str, args: &[String]) -> String { + let mut s = program.to_string(); + for a in args { + s.push(' '); + s.push_str(&shell_escape(a)); + } + s +} + +fn shell_escape(s: &str) -> String { + if s.is_empty() { + return "''".to_string(); + } + if s.chars() + .all(|c| c.is_ascii_alphanumeric() || "-._/:".contains(c)) + { + return s.to_string(); + } + let mut out = String::from("'"); + for ch in s.chars() { + if ch == '\'' { + out.push_str("'\\''"); + } else { + out.push(ch); + } + } + out.push('\''); + out +} + +pub fn read_to_string(path: impl AsRef) -> Result { + std::fs::read_to_string(&path).with_context(|| format!("read {}", path.as_ref().display())) +} + +pub fn write_string(path: impl AsRef, contents: &str) -> Result<()> { + if let Some(parent) = path.as_ref().parent() { + std::fs::create_dir_all(parent) + .with_context(|| format!("create dir {}", parent.display()))?; + } + std::fs::write(&path, contents).with_context(|| format!("write {}", path.as_ref().display())) +} + +pub fn format_bytes(bytes: u64) -> String { + const KB: f64 = 1024.0; + const MB: f64 = KB * 1024.0; + const GB: f64 = MB * 1024.0; + let b = bytes as f64; + if b >= GB { + format!("{:.2}GiB", b / GB) + } else if b >= MB { + format!("{:.2}MiB", b / MB) + } else if b >= KB { + format!("{:.2}KiB", b / KB) + } else { + format!("{}B", bytes) + } +} + +pub fn percent_change(baseline: f64, current: f64) -> Option { + if baseline == 0.0 { + return None; + } + Some(((current - baseline) / baseline) * 100.0) +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum OsKind { + Linux, + Macos, + Windows, + Other, +} + +pub fn os_kind() -> OsKind { + if cfg!(target_os = "linux") { + OsKind::Linux + } else if cfg!(target_os = "macos") { + OsKind::Macos + } else if cfg!(target_os = "windows") { + OsKind::Windows + } else { + OsKind::Other + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum PackageManager { + Apt, + Dnf, + Yum, + Pacman, + Brew, +} + +pub fn detect_package_manager() -> Option { + if which("apt-get").is_some() { + return Some(PackageManager::Apt); + } + if which("dnf").is_some() { + return Some(PackageManager::Dnf); + } + if which("yum").is_some() { + return Some(PackageManager::Yum); + } + if which("pacman").is_some() { + return Some(PackageManager::Pacman); + } + if which("brew").is_some() { + return Some(PackageManager::Brew); + } + None +} + +pub async fn sudo_install(pkgs: &[&str]) -> Result<()> { + let mgr = + detect_package_manager().ok_or_else(|| anyhow!("no supported package manager found"))?; + match mgr { + PackageManager::Apt => { + Cmd::new("sudo").args(["apt-get", "update"]).run().await?; + Cmd::new("sudo") + .args(["apt-get", "install", "-y"]) + .args(pkgs.iter().copied()) + .run() + .await + } + PackageManager::Dnf => { + Cmd::new("sudo") + .args(["dnf", "install", "-y"]) + .args(pkgs.iter().copied()) + .run() + .await + } + PackageManager::Yum => { + Cmd::new("sudo") + .args(["yum", "install", "-y"]) + .args(pkgs.iter().copied()) + .run() + .await + } + PackageManager::Pacman => { + Cmd::new("sudo") + .args(["pacman", "-S", "--noconfirm"]) + .args(pkgs.iter().copied()) + .run() + .await + } + PackageManager::Brew => { + Cmd::new("brew") + .args(["install"]) + .args(pkgs.iter().copied()) + .run() + .await + } + } +} + +pub fn ensure_tool(tool: &str) -> Result<()> { + if which(tool).is_none() { + return Err(anyhow!("required tool not found in PATH: {tool}")); + } + Ok(()) +} diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000..bcd6fe3e --- /dev/null +++ b/deny.toml @@ -0,0 +1,241 @@ +# This template contains all of the possible sections and their default values + +# Note that all fields that take a lint level have these possible values: +# * deny - An error will be produced and the check will fail +# * warn - A warning will be produced, but the check will not fail +# * allow - No warning or error will be produced, though in some cases a note +# will be + +# The values provided in this template are the default values that will be used +# when any section or field is not specified in your own configuration + +# Root options + +# The graph table configures how the dependency graph is constructed and thus +# which crates the checks are performed against +[graph] +# If 1 or more target triples (and optionally, target_features) are specified, +# only the specified targets will be checked when running `cargo deny check`. +# This means, if a particular package is only ever used as a target specific +# dependency, such as, for example, the `nix` crate only being used via the +# `target_family = "unix"` configuration, that only having windows targets in +# this list would mean the nix crate, as well as any of its exclusive +# dependencies not shared by any other crates, would be ignored, as the target +# list here is effectively saying which targets you are building for. +targets = [ + # The triple can be any string, but only the target triples built in to + # rustc (as of 1.40) can be checked against actual config expressions + # "x86_64-unknown-linux-musl", + # You can also specify which target_features you promise are enabled for a + # particular target. target_features are currently not validated against + # the actual valid features supported by the target architecture. + # { triple = "wasm32-unknown-unknown", features = ["atomics"] }, +] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +# exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = false +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +# features = [] + +# The output table provides options for how/if diagnostics are outputted +[output] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +# More documentation for the advisories section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html +[advisories] +# The path where the advisory databases are cloned/fetched into +# db-path = "$CARGO_HOME/advisory-dbs" +# The url(s) of the advisory databases to use +# db-urls = ["https://github.com/rustsec/advisory-db"] +# A list of advisory IDs to ignore. Note that ignored advisories will still +# output a note when they are encountered. +ignore = [ + # "RUSTSEC-0000-0000", + # { id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, + # "a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish + # { crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, +] +# If this is true, then cargo deny will use the git executable to fetch advisory database. +# If this is false, then it uses a built-in git library. +# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. +# See Git Authentication for more information about setting up git authentication. +# git-fetch-with-cli = true + +# This section is considered when running `cargo deny check licenses` +# More documentation for the licenses section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html +[licenses] +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +allow = [ + "MIT", + "Apache-2.0", + "Zlib", + "Unicode-3.0", + "ISC", + "MPL-2.0", + "BSD-3-Clause", + "OpenSSL", + "CDLA-Permissive-2.0", + "CC0-1.0", + "MIT-0", +] +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + # Each entry is the crate and version constraint, and its specific allow + # list + # { allow = ["Zlib"], crate = "adler32" }, +] + +# Some crates don't have (easily) machine readable licensing information, +# adding a clarification entry for it allows you to manually specify the +# licensing information +# [[licenses.clarify]] +# The package spec the clarification applies to +# crate = "ring" +# The SPDX expression for the license requirements of the crate +# expression = "MIT AND ISC AND OpenSSL" +# One or more files in the crate's source used as the "source of truth" for +# the license expression. If the contents match, the clarification will be used +# when running the license check, otherwise the clarification will be ignored +# and the crate will be checked normally, which may produce warnings or errors +# depending on the rest of your configuration +# license-files = [ +# Each entry is a crate relative path, and the (opaque) hash of its contents +# { path = "LICENSE", hash = 0xbd0eed23 } +# ] + +[licenses.private] +# If true, ignores workspace crates that aren't published, or are only +# published to private registries. +# To see how to mark a crate as unpublished (to the official registry), +# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. +ignore = false +# One or more private registries that you might publish crates to, if a crate +# is only published to private registries, and ignore is true, the crate will +# not have its license(s) checked +registries = [ + # "https://sekretz.com/registry +] + +# This section is considered when running `cargo deny check bans`. +# More documentation about the 'bans' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "allow" +# Lint level for when a crate version requirement is `*` +wildcards = "allow" +# The graph highlighting used when creating dotgraphs for crates +# with multiple versions +# * lowest-version - The path to the lowest versioned duplicate is highlighted +# * simplest-path - The path to the version with the fewest edges is highlighted +# * all - Both lowest-version and simplest-path are used +highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overridden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overridden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" +# List of crates that are allowed. Use with care! +allow = [ + # "ansi_term@0.11.0", + # { crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, +] +# List of crates to deny +deny = [ + # "ansi_term@0.11.0", + # { crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + # { crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, +] +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +# [[bans.features]] +# crate = "reqwest" +# Features to not allow +# deny = ["json"] +# Features to allow +# allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +# ] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +# exact = true +# Certain crates/versions that will be skipped when doing duplicate detection. +skip = [ + # "ansi_term@0.11.0", + # { crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, +] +# Similarly to `skip` allows you to skip certain crates during duplicate +# detection. Unlike skip, it also includes the entire tree of transitive +# dependencies starting at the specified crate, up to a certain depth, which is +# by default infinite. +skip-tree = [ + # "ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies + # { crate = "ansi_term@0.11.0", depth = 20 }, +] + +# This section is considered when running `cargo deny check sources`. +# More documentation about the 'sources' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html +[sources] +# Lint level for what to happen when a crate from a crate registry that is not +# in the allow list is encountered +unknown-registry = "warn" +# Lint level for what to happen when a crate from a git repository that is not +# in the allow list is encountered +unknown-git = "warn" +# List of URLs for allowed crate registries. Defaults to the crates.io index +# if not specified. If it is specified but empty, no registries are allowed. +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +# List of URLs for allowed Git repositories +allow-git = [] + +[sources.allow-org] +# github.com organizations to allow git sources for +github = [] +# gitlab.com organizations to allow git sources for +gitlab = [] +# bitbucket.org organizations to allow git sources for +bitbucket = [] diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index 6daa69e7..fc903b7d 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -67,10 +67,10 @@ Note: Using this method is strongly discouraged officially. See [Unstable Book]( To compile `rustowlc` with stable compiler, you should set environment variable as `RUSTC_BOOTSTRAP=1`. -For example building with stable 1.89.0 Rust compiler: +For example building with stable 1.92.0 Rust compiler: ```bash -RUSTC_BOOTSTRAP=1 rustup run 1.89.0 cargo build --release +RUSTC_BOOTSTRAP=1 rustup run 1.92.0 cargo build --release ``` Note that by using normal `cargo` command RustOwl will be built with nightly compiler since there is a `rust-toolchain.toml` which specifies nightly compiler for development environment. @@ -94,10 +94,10 @@ We provide a comprehensive development checks script that validates code quality ```bash # Run all development checks -./scripts/dev-checks.sh +cargo xtask dev-checks # Run checks and automatically fix issues where possible -./scripts/dev-checks.sh --fix +cargo xtask dev-checks --fix ``` This script performs: @@ -109,21 +109,60 @@ This script performs: - Unit test execution - VS Code extension checks (formatting, linting, type checking) +### Writing Miri-Compatible Async Tests + +Miri doesn't support `#[tokio::test]` directly. RustOwl provides the `async_test!` macro for writing async tests that work with both regular test runs and Miri: + +```rust +use crate::async_test; + +async_test!(test_async_operation, async { + // Your async test code here + let result = some_async_function().await; + assert!(result.is_ok()); +}); +``` + +The macro creates a tokio runtime with `enable_all()` and runs the async block. See the [Miri issue](https://github.com/rust-lang/miri/issues/602#issuecomment-884019764) for background. + +> [!IMPORTANT] +> The `async_test!` macro enables tokio's IO driver, which uses platform-specific syscalls (`kqueue` on macOS, `epoll` on Linux) that Miri doesn't support. For tests that require the IO driver (e.g., LSP backend tests, networking, file system operations via `tokio::fs`), exclude the entire test module from Miri: + +```rust +// Tests requiring tokio IO driver - excluded from Miri +#[cfg(all(test, not(miri)))] +mod tests { + use crate::async_test; + + async_test!(test_with_io, async { + // Test code using tokio::fs, networking, etc. + }); +} +``` + +For individual tests that cannot run under Miri, prefer `async_test!` (it automatically applies `#[cfg_attr(miri, ignore)]`): + +```rust +use crate::async_test; + +async_test!(test_requiring_external_io, async { + // Test code +}); +``` + ### Security and Memory Safety Testing Run comprehensive security analysis before submitting: ```bash # Run all available security tests -./scripts/security.sh - -# Check which security tools are available -./scripts/security.sh --check +cargo xtask security # Run specific test categories -./scripts/security.sh --no-miri # Skip Miri tests -./scripts/security.sh --no-valgrind # Skip Valgrind tests -./scripts/security.sh --no-audit # Skip cargo-audit +cargo xtask security --no-miri # Skip Miri tests +cargo xtask security --no-valgrind # Skip Valgrind tests +cargo xtask security --no-audit # Skip cargo-audit +cargo xtask security --no-machete # Skip cargo-machete ``` The security script includes: @@ -139,26 +178,25 @@ The security script includes: Validate that your changes don't introduce performance regressions: ```bash -# Run performance benchmarks -./scripts/bench.sh +# Run performance benchmarks (Divan) +cargo xtask bench # Create a baseline for comparison -./scripts/bench.sh --save my-baseline +cargo xtask bench --save my-baseline # Compare against a baseline with custom threshold -./scripts/bench.sh --load my-baseline --threshold 3% +cargo xtask bench --load my-baseline --threshold 3 -# Clean build and open HTML report -./scripts/bench.sh --clean --open +# Clean build before benchmarking +cargo xtask bench --clean ``` Performance testing features: -- Criterion benchmark integration +- Divan benchmark integration - Baseline creation and comparison - Configurable regression thresholds (default: 5%) -- Automatic test package detection -- HTML report generation +- Strict parsing (errors if output format changes) ### Binary Size Monitoring @@ -166,13 +204,13 @@ Check for binary size regressions: ```bash # Analyze current binary sizes -./scripts/size-check.sh +cargo xtask size-check # Compare against a saved baseline -./scripts/size-check.sh --load previous-baseline +cargo xtask size-check compare # Save current sizes as baseline -./scripts/size-check.sh --save new-baseline +cargo xtask size-check baseline ``` ### Manual Checks @@ -199,42 +237,42 @@ If the automated scripts are not available, ensure: ```bash # Create performance baseline - ./scripts/bench.sh --save before-changes + cargo xtask bench --save before-changes ``` -2. **During development**: +1. **During development**: ```bash # Run quick checks frequently - ./scripts/dev-checks.sh --fix + cargo xtask dev-checks --fix ``` -3. **Before committing**: +1. **Before committing**: + ```bash # Run comprehensive validation - ./scripts/dev-checks.sh - ./scripts/security.sh - ./scripts/bench.sh --load before-changes - ./scripts/size-check.sh + cargo xtask dev-checks + cargo xtask security + cargo xtask bench --load before-changes + cargo xtask size-check ``` ### Integration with CI -Our scripts are designed to match CI workflows: +Our `cargo xtask` commands are designed to match CI workflows: -- **`security.sh`** ↔ **`.github/workflows/security.yml`** -- **`bench.sh`** ↔ **`.github/workflows/bench-performance.yml`** -- **`dev-checks.sh`** ↔ **`.github/workflows/checks.yml`** +- **`cargo xtask security`** ↔ **`.github/workflows/security.yml`** +- **`cargo xtask bench`** ↔ *(local-only by default)* +- **`cargo xtask dev-checks`** ↔ **`.github/workflows/checks.yml`** This ensures local testing provides the same results as CI. ## Troubleshooting -### Script Permissions +### `cargo xtask` + +If `cargo xtask` is not found, ensure you are on the workspace root and have Rust installed. -```bash -chmod +x scripts/*.sh -``` ### Missing Tools @@ -260,7 +298,5 @@ brew install gnuplot - Check workflow logs for specific error messages - Verify `rust-toolchain.toml` compatibility -- Ensure scripts have execution permissions -- Test locally with the same script used in CI +- Test locally with the same `cargo xtask` command used in CI -For more detailed information about the scripts, see [`scripts/README.md`](../scripts/README.md). diff --git a/docs/build.md b/docs/build.md index 462be954..071256b8 100644 --- a/docs/build.md +++ b/docs/build.md @@ -13,11 +13,10 @@ On a freshly installed Ubuntu system, you need to run `apt install build-essenti ### Build RustOwl using stable toolchain -There are scripts to build the stable version of RustOwl. -`scripts/build/toolchain` sets up the RustOwl toolchain and executes command using that toolchain. +Use `cargo xtask toolchain` to set up the RustOwl sysroot and execute a command under it. ```bash -./scripts/build/toolchain cargo install --path . --locked +cargo xtask toolchain cargo install --path . --locked ``` ### Build RustOwl using custom toolchain diff --git a/docs/cache-configuration.md b/docs/cache-configuration.md new file mode 100644 index 00000000..3ec69211 --- /dev/null +++ b/docs/cache-configuration.md @@ -0,0 +1,129 @@ +# Cache Configuration + +RustOwl includes a robust incremental caching system that significantly improves analysis performance by storing and reusing previously computed results. This document explains how to configure and optimize the cache for your needs. + +## Overview + +The cache system stores analyzed MIR (Mid-level Intermediate Representation) data to avoid recomputing results for unchanged code. With the new robust caching implementation, you get: + +- **Intelligent cache eviction** with LRU (Least Recently Used) policy +- **Memory usage tracking** and automatic cleanup +- **File modification time validation** to ensure cache consistency +- **Comprehensive statistics** and debugging information +- **Configurable policies** via environment variables + +## Environment Variables + +### Core Cache Settings + +- **`RUSTOWL_CACHE`**: Enable/disable caching (default: enabled) + - Set to `false` or `0` to disable caching entirely + +- **`RUSTOWL_CACHE_DIR`**: Set custom cache directory + - Default (cargo workspace runs): `{target_dir}/owl/cache` + - For single-file analysis, set `RUSTOWL_CACHE_DIR` explicitly. + - Example: `export RUSTOWL_CACHE_DIR=/tmp/rustowl-cache` + +### Advanced Configuration + +- **`RUSTOWL_CACHE_MAX_ENTRIES`**: Maximum number of cache entries (default: 1000) + - Example: `export RUSTOWL_CACHE_MAX_ENTRIES=2000` + +- **`RUSTOWL_CACHE_MAX_MEMORY_MB`**: Maximum cache memory in MB (default: 100) + - Example: `export RUSTOWL_CACHE_MAX_MEMORY_MB=200` + +- **`RUSTOWL_CACHE_EVICTION`**: Cache eviction policy (default: "lru") + - Options: `lru` (Least Recently Used), `fifo` (First In First Out) + - Example: `export RUSTOWL_CACHE_EVICTION=lru` + +- **`RUSTOWL_CACHE_VALIDATE_FILES`**: Enable file modification validation (default: enabled) + - Set to `false` or `0` to disable file timestamp checking + - Example: `export RUSTOWL_CACHE_VALIDATE_FILES=false` + +## Cache Performance Tips + +### For Large Projects + +```bash +# Increase cache size for large codebases +export RUSTOWL_CACHE_MAX_ENTRIES=5000 +export RUSTOWL_CACHE_MAX_MEMORY_MB=500 +``` + +### For Development + +```bash +# Enable full validation and debugging +export RUSTOWL_CACHE_VALIDATE_FILES=true +export RUSTOWL_CACHE_EVICTION=lru +``` + +## Cache Statistics + +The cache system provides detailed statistics about performance: + +- **Hit Rate**: Percentage of cache hits vs misses +- **Memory Usage**: Current memory consumption +- **Evictions**: Number of entries removed due to space or memory constraints +- **Invalidations**: Number of entries removed proactively due to source file changes (mtime validation) + +These statistics are logged during analysis and when the cache is saved. + +## Cache File Format + +Cache files are stored as JSON in the cache directory with the format: + +- `{crate_name}.json` - Main cache file +- `{crate_name}.json.tmp` - Temporary file used for atomic writes + +The cache includes metadata for each entry: + +- Creation and last access timestamps +- Access count for LRU calculations +- File modification times for validation +- Memory usage estimation + +## Performance Impact + +With the robust caching system, you can expect: + +- **93% reduction** in analysis time for unchanged code +- **Intelligent memory management** to prevent memory exhaustion +- **Faster startup** due to optimized cache loading +- **Better reliability** with atomic file operations and corruption detection + +## Troubleshooting + +### Cache Not Working + +1. Check if caching is enabled: `echo $RUSTOWL_CACHE` +2. Verify cache directory permissions: `ls -la $RUSTOWL_CACHE_DIR` +3. Look for cache-related log messages during analysis + +### High Memory Usage + +1. Reduce `RUSTOWL_CACHE_MAX_MEMORY_MB` +2. Decrease `RUSTOWL_CACHE_MAX_ENTRIES` +3. Consider switching to FIFO eviction: `export RUSTOWL_CACHE_EVICTION=fifo` + +### Inconsistent Results + +1. Enable file validation: `export RUSTOWL_CACHE_VALIDATE_FILES=true` +2. Clear the cache directory to force fresh analysis +3. Check for file system timestamp issues + +## Example Configuration + +Here's a complete configuration for a large Rust project: + +```bash +# Enable caching with generous limits +export RUSTOWL_CACHE=true +export RUSTOWL_CACHE_DIR=/fast-ssd/rustowl-cache +export RUSTOWL_CACHE_MAX_ENTRIES=10000 +export RUSTOWL_CACHE_MAX_MEMORY_MB=1000 +export RUSTOWL_CACHE_EVICTION=lru +export RUSTOWL_CACHE_VALIDATE_FILES=true +``` + +This configuration provides maximum performance while maintaining cache consistency and reliability. diff --git a/perf-tests/dummy-package/Cargo.toml b/perf-tests/dummy-package/Cargo.toml index c13ef42e..f0c0e634 100644 --- a/perf-tests/dummy-package/Cargo.toml +++ b/perf-tests/dummy-package/Cargo.toml @@ -3,32 +3,6 @@ name = "rustowl-perf-test-dummy" version = "0.1.0" edition = "2021" -[features] -default = ["tokio"] -feature_a = ["dep:winapi"] -feature_b = ["dep:base64"] -networking = ["reqwest", "tokio"] -advanced_crypto = ["feature_b"] - -[dependencies] -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -tokio = { version = "1.0", features = ["full"], optional = true } -reqwest = { version = "0.12.18", features = ["json"], optional = true } -clap = { version = "4.5", features = ["derive"] } -anyhow = "1.0" -log = "0.4" -env_logger = "0.11.8" -chrono = { version = "0.4", features = ["serde"] } -base64 = { version = "0.22", optional = true } - -# Platform-specific dependencies -[target.'cfg(windows)'.dependencies] -winapi = { version = "0.3", features = ["winuser", "processthreadsapi"], optional = true } - -[target.'cfg(unix)'.dependencies] -libc = "0.2" - [lib] name = "rustowl_perf_test_dummy" path = "src/lib.rs" @@ -44,3 +18,29 @@ path = "examples/example_target.rs" [[bench]] name = "bench-target" path = "benches/bench_target.rs" + +[dependencies] +anyhow = "1.0" +base64 = { version = "0.22", optional = true } +chrono = { version = "0.4", features = ["serde"] } +clap = { version = "4.5", features = ["derive"] } +env_logger = "0.11.8" +log = "0.4" +reqwest = { version = "0.12.18", features = ["json"], optional = true } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tokio = { version = "1.0", features = ["full"], optional = true } + +[target.'cfg(unix)'.dependencies] +libc = "0.2" + +# Platform-specific dependencies +[target.'cfg(windows)'.dependencies] +winapi = { version = "0.3", features = ["processthreadsapi", "winuser"], optional = true } + +[features] +default = ["tokio"] +advanced_crypto = ["feature_b"] +feature_a = ["dep:winapi"] +feature_b = ["dep:base64"] +networking = ["reqwest", "tokio"] diff --git a/perf-tests/dummy-package/src/main.rs b/perf-tests/dummy-package/src/main.rs index 815ac2ad..d49d2b77 100644 --- a/perf-tests/dummy-package/src/main.rs +++ b/perf-tests/dummy-package/src/main.rs @@ -319,7 +319,8 @@ async fn run_feature_tests() -> Result<()> { mod tests { use super::*; - #[tokio::test] + #[cfg_attr(not(miri), tokio::test)] + #[cfg_attr(miri, test)] async fn test_data_operations() { let result = run_data_operations(10).await; // Allow this to fail since some operations are intentionally problematic diff --git a/rust-toolchain.toml b/rust-toolchain.toml index ef8ceded..0e30c52b 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "nightly-2025-06-20" +channel = "nightly-2025-12-11" components = [ "rustc", "rust-std", diff --git a/scripts/.commitlintrc.json b/scripts/.commitlintrc.json deleted file mode 100644 index baddd35e..00000000 --- a/scripts/.commitlintrc.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "$schema": "./vscode/node_modules/@commitlint/config-validator/lib/commitlint.schema.json", - "extends": ["@commitlint/config-conventional"], - "formatter": "@commitlint/format" -} diff --git a/scripts/bench.sh b/scripts/bench.sh index faa5aca1..1f91e486 100755 --- a/scripts/bench.sh +++ b/scripts/bench.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Local performance benchmarking script for RustOwl # This script provides an easy way to run Criterion benchmarks locally # Local performance benchmarking script for development use @@ -14,15 +14,18 @@ BOLD='\033[1m' NC='\033[0m' # No Color # Configuration -BENCHMARK_NAME="rustowl_bench_simple" +BENCHMARK_NAME=( + "rustowl_bench_simple" + "line_col_bench" +) # Look for existing test packages in the repo TEST_PACKAGES=( - "./tests/fixtures" - "./benches/fixtures" - "./test-data" - "./examples" - "./perf-tests" + "./tests/fixtures" + "./benches/fixtures" + "./test-data" + "./examples" + "./perf-tests" ) # Options @@ -36,425 +39,429 @@ REGRESSION_THRESHOLD="5%" TEST_PACKAGE_PATH="" usage() { - echo "Usage: $0 [OPTIONS]" - echo "" - echo "Performance Benchmarking Script for RustOwl" - echo "Runs Criterion benchmarks with comparison and regression detection capabilities" - echo "" - echo "Options:" - echo " -h, --help Show this help message" - echo " --save Save benchmark results as baseline with given name" - echo " --load Load baseline and compare current results against it" - echo " --threshold Set regression threshold (default: 5%)" - echo " --test-package Use specific test package (auto-detected if not specified)" - echo " --open Open HTML report in browser after benchmarking" - echo " --clean Clean build artifacts before benchmarking" - echo " --quiet Minimal output (for CI/automated use)" - echo "" - echo "Examples:" - echo " $0 # Run benchmarks with default settings" - echo " $0 --save main # Save results as 'main' baseline" - echo " $0 --load main --threshold 3% # Compare against 'main' with 3% threshold" - echo " $0 --clean --open # Clean build, run benchmarks, open report" - echo " $0 --save current --quiet # Save baseline quietly (for CI)" - echo "" - echo "Baseline Management:" - echo " Baselines are stored in: baselines/performance//" - echo " HTML reports are in: target/criterion/report/" - echo "" + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Performance Benchmarking Script for RustOwl" + echo "Runs Criterion benchmarks with comparison and regression detection capabilities" + echo "" + echo "Options:" + echo " -h, --help Show this help message" + echo " --save Save benchmark results as baseline with given name" + echo " --load Load baseline and compare current results against it" + echo " --threshold Set regression threshold (default: 5%)" + echo " --test-package Use specific test package (auto-detected if not specified)" + echo " --open Open HTML report in browser after benchmarking" + echo " --clean Clean build artifacts before benchmarking" + echo " --quiet Minimal output (for CI/automated use)" + echo "" + echo "Examples:" + echo " $0 # Run benchmarks with default settings" + echo " $0 --save main # Save results as 'main' baseline" + echo " $0 --load main --threshold 3% # Compare against 'main' with 3% threshold" + echo " $0 --clean --open # Clean build, run benchmarks, open report" + echo " $0 --save current --quiet # Save baseline quietly (for CI)" + echo "" + echo "Baseline Management:" + echo " Baselines are stored in: baselines/performance//" + echo " HTML reports are in: target/criterion/report/" + echo "" } # Parse command line arguments while [[ $# -gt 0 ]]; do - case $1 in - -h|--help) - usage - exit 0 - ;; - --save) - if [[ -z "$2" ]]; then - echo -e "${RED}Error: --save requires a baseline name${NC}" - echo "Example: $0 --save main" - exit 1 - fi - SAVE_BASELINE="$2" - shift 2 - ;; - --load) - if [[ -z "$2" ]]; then - echo -e "${RED}Error: --load requires a baseline name${NC}" - echo "Example: $0 --load main" - exit 1 - fi - LOAD_BASELINE="$2" - COMPARE_MODE=true - shift 2 - ;; - --threshold) - if [[ -z "$2" ]]; then - echo -e "${RED}Error: --threshold requires a percentage${NC}" - echo "Example: $0 --threshold 3%" - exit 1 - fi - REGRESSION_THRESHOLD="$2" - shift 2 - ;; - --test-package) - if [[ -z "$2" ]]; then - echo -e "${RED}Error: --test-package requires a path${NC}" - echo "Example: $0 --test-package ./examples/sample" - exit 1 - fi - TEST_PACKAGE_PATH="$2" - shift 2 - ;; - --open) - OPEN_REPORT=true - shift - ;; - --clean) - CLEAN_BUILD=true - shift - ;; - --quiet) - SHOW_OUTPUT=false - shift - ;; - baseline) - # Legacy support for CI workflow - SAVE_BASELINE="main" - SHOW_OUTPUT=false - shift - ;; - compare) - # Legacy support for CI workflow - COMPARE_MODE=true - LOAD_BASELINE="main" - shift - ;; - *) - echo -e "${RED}Unknown option: $1${NC}" - echo "Use --help for usage information" - exit 1 - ;; - esac + case $1 in + -h | --help) + usage + exit 0 + ;; + --save) + if [[ -z "$2" ]]; then + echo -e "${RED}Error: --save requires a baseline name${NC}" + echo "Example: $0 --save main" + exit 1 + fi + SAVE_BASELINE="$2" + shift 2 + ;; + --load) + if [[ -z "$2" ]]; then + echo -e "${RED}Error: --load requires a baseline name${NC}" + echo "Example: $0 --load main" + exit 1 + fi + LOAD_BASELINE="$2" + COMPARE_MODE=true + shift 2 + ;; + --threshold) + if [[ -z "$2" ]]; then + echo -e "${RED}Error: --threshold requires a percentage${NC}" + echo "Example: $0 --threshold 3%" + exit 1 + fi + REGRESSION_THRESHOLD="$2" + shift 2 + ;; + --test-package) + if [[ -z "$2" ]]; then + echo -e "${RED}Error: --test-package requires a path${NC}" + echo "Example: $0 --test-package ./examples/sample" + exit 1 + fi + TEST_PACKAGE_PATH="$2" + shift 2 + ;; + --open) + OPEN_REPORT=true + shift + ;; + --clean) + CLEAN_BUILD=true + shift + ;; + --quiet) + SHOW_OUTPUT=false + shift + ;; + baseline) + # Legacy support for CI workflow + SAVE_BASELINE="main" + SHOW_OUTPUT=false + shift + ;; + compare) + # Legacy support for CI workflow + COMPARE_MODE=true + LOAD_BASELINE="main" + shift + ;; + *) + echo -e "${RED}Unknown option: $1${NC}" + echo "Use --help for usage information" + exit 1 + ;; + esac done print_header() { - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${BLUE}${BOLD}=====================================${NC}" - echo -e "${BLUE}${BOLD} RustOwl Performance Benchmarks${NC}" - echo -e "${BLUE}${BOLD}=====================================${NC}" - echo "" - - if [[ -n "$SAVE_BASELINE" ]]; then - echo -e "${GREEN}Mode: Save baseline as '$SAVE_BASELINE'${NC}" - elif [[ "$COMPARE_MODE" == "true" ]]; then - echo -e "${GREEN}Mode: Compare against '$LOAD_BASELINE' baseline${NC}" - echo -e "${GREEN}Regression threshold: $REGRESSION_THRESHOLD${NC}" - else - echo -e "${GREEN}Mode: Standard benchmark run${NC}" - fi - echo "" - fi + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${BLUE}${BOLD}=====================================${NC}" + echo -e "${BLUE}${BOLD} RustOwl Performance Benchmarks${NC}" + echo -e "${BLUE}${BOLD}=====================================${NC}" + echo "" + + if [[ -n "$SAVE_BASELINE" ]]; then + echo -e "${GREEN}Mode: Save baseline as '$SAVE_BASELINE'${NC}" + elif [[ "$COMPARE_MODE" == "true" ]]; then + echo -e "${GREEN}Mode: Compare against '$LOAD_BASELINE' baseline${NC}" + echo -e "${GREEN}Regression threshold: $REGRESSION_THRESHOLD${NC}" + else + echo -e "${GREEN}Mode: Standard benchmark run${NC}" + fi + echo "" + fi } find_test_package() { - if [[ -n "$TEST_PACKAGE_PATH" ]]; then - if [[ -d "$TEST_PACKAGE_PATH" ]]; then - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Using specified test package: $TEST_PACKAGE_PATH${NC}" - fi - return 0 - else - echo -e "${RED}Error: Specified test package not found: $TEST_PACKAGE_PATH${NC}" - exit 1 - fi - fi - - # Auto-detect existing test packages - for test_dir in "${TEST_PACKAGES[@]}"; do - if [[ -d "$test_dir" ]]; then - # Check if it contains Rust code - if find "$test_dir" -name "*.rs" | head -1 >/dev/null 2>&1; then - TEST_PACKAGE_PATH="$test_dir" - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Found test package: $TEST_PACKAGE_PATH${NC}" - fi - return 0 - fi - # Check if it contains Cargo.toml files (subdirectories with packages) - if find "$test_dir" -name "Cargo.toml" | head -1 >/dev/null 2>&1; then - TEST_PACKAGE_PATH=$(find "$test_dir" -name "Cargo.toml" | head -1 | xargs dirname) - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Found test package: $TEST_PACKAGE_PATH${NC}" - fi - return 0 - fi - fi - done - - # Look for existing benchmark files - if [[ -d "./benches" ]]; then - TEST_PACKAGE_PATH="./benches" - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Using benchmark directory: $TEST_PACKAGE_PATH${NC}" - fi - return 0 - fi - - # Use the current project as test package - if [[ -f "./Cargo.toml" ]]; then - TEST_PACKAGE_PATH="." - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Using current project as test package${NC}" - fi - return 0 - fi - - echo -e "${RED}Error: No suitable test package found in the repository${NC}" - echo -e "${YELLOW}Searched in: ${TEST_PACKAGES[*]}${NC}" - echo -e "${YELLOW}Use --test-package to specify a custom location${NC}" - exit 1 + if [[ -n "$TEST_PACKAGE_PATH" ]]; then + if [[ -d "$TEST_PACKAGE_PATH" ]]; then + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Using specified test package: $TEST_PACKAGE_PATH${NC}" + fi + return 0 + else + echo -e "${RED}Error: Specified test package not found: $TEST_PACKAGE_PATH${NC}" + exit 1 + fi + fi + + # Auto-detect existing test packages + for test_dir in "${TEST_PACKAGES[@]}"; do + if [[ -d "$test_dir" ]]; then + # Check if it contains Rust code + if find "$test_dir" -name "*.rs" | head -1 >/dev/null 2>&1; then + TEST_PACKAGE_PATH="$test_dir" + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Found test package: $TEST_PACKAGE_PATH${NC}" + fi + return 0 + fi + # Check if it contains Cargo.toml files (subdirectories with packages) + if find "$test_dir" -name "Cargo.toml" | head -1 >/dev/null 2>&1; then + TEST_PACKAGE_PATH=$(find "$test_dir" -name "Cargo.toml" | head -1 | xargs dirname) + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Found test package: $TEST_PACKAGE_PATH${NC}" + fi + return 0 + fi + fi + done + + # Look for existing benchmark files + if [[ -d "./benches" ]]; then + TEST_PACKAGE_PATH="./benches" + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Using benchmark directory: $TEST_PACKAGE_PATH${NC}" + fi + return 0 + fi + + # Use the current project as test package + if [[ -f "./Cargo.toml" ]]; then + TEST_PACKAGE_PATH="." + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Using current project as test package${NC}" + fi + return 0 + fi + + echo -e "${RED}Error: No suitable test package found in the repository${NC}" + echo -e "${YELLOW}Searched in: ${TEST_PACKAGES[*]}${NC}" + echo -e "${YELLOW}Use --test-package to specify a custom location${NC}" + exit 1 } check_prerequisites() { - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Checking prerequisites...${NC}" - fi - - # Check Rust installation (any version is fine - we trust rust-toolchain.toml) - if ! command -v rustc >/dev/null 2>&1; then - echo -e "${RED}Error: Rust is not installed${NC}" - echo -e "${YELLOW}Please install Rust: https://rustup.rs/${NC}" - exit 1 - fi - - # Show current Rust version - local rust_version=$(rustc --version) - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Rust: $rust_version${NC}" - echo -e "${GREEN}✓ Cargo: $(cargo --version)${NC}" - echo -e "${GREEN}✓ Host: $(rustc -vV | grep host | cut -d' ' -f2)${NC}" - fi - - # Check if cargo-criterion is available - if command -v cargo-criterion >/dev/null 2>&1; then - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ cargo-criterion is available${NC}" - fi - else - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}! cargo-criterion not found, using cargo bench${NC}" - fi - fi - - # Find and validate test package - find_test_package - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo "" - fi + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Checking prerequisites...${NC}" + fi + + # Check Rust installation (any version is fine - we trust rust-toolchain.toml) + if ! command -v rustc >/dev/null 2>&1; then + echo -e "${RED}Error: Rust is not installed${NC}" + echo -e "${YELLOW}Please install Rust: https://rustup.rs/${NC}" + exit 1 + fi + + # Show current Rust version + local rust_version + rust_version=$(rustc --version) + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Rust: $rust_version${NC}" + echo -e "${GREEN}✓ Cargo: $(cargo --version)${NC}" + echo -e "${GREEN}✓ Host: $(rustc -vV | grep host | cut -d' ' -f2)${NC}" + fi + + # Check if cargo-criterion is available + if command -v cargo-criterion >/dev/null 2>&1; then + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ cargo-criterion is available${NC}" + fi + else + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}! cargo-criterion not found, using cargo bench${NC}" + fi + fi + + # Find and validate test package + find_test_package + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo "" + fi } clean_build() { - if [[ "$CLEAN_BUILD" == "true" ]]; then - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Cleaning build artifacts...${NC}" - fi - cargo clean - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Build artifacts cleaned${NC}" - echo "" - fi - fi + if [[ "$CLEAN_BUILD" == "true" ]]; then + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Cleaning build artifacts...${NC}" + fi + cargo clean + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Build artifacts cleaned${NC}" + echo "" + fi + fi } build_rustowl() { - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Building RustOwl in release mode...${NC}" - fi - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - ./scripts/build/toolchain cargo build --release - else - ./scripts/build/toolchain cargo build --release --quiet - fi - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Build completed${NC}" - echo "" - fi + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Building RustOwl in release mode...${NC}" + fi + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + ./scripts/build/toolchain cargo build --release + else + ./scripts/build/toolchain cargo build --release --quiet + fi + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Build completed${NC}" + echo "" + fi } run_benchmarks() { - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Running performance benchmarks...${NC}" - fi - - # Check if we have any benchmark files - if [[ -d "./benches" ]] && find "./benches" -name "*.rs" | head -1 >/dev/null 2>&1; then - # Prepare benchmark command - local bench_cmd="cargo bench" - local bench_args="" - - # Use cargo-criterion if available and not doing baseline operations - if command -v cargo-criterion >/dev/null 2>&1 && [[ -z "$SAVE_BASELINE" && "$COMPARE_MODE" != "true" ]]; then - bench_cmd="cargo criterion" - fi - - # Add baseline arguments if saving - if [[ -n "$SAVE_BASELINE" ]]; then - bench_args="$bench_args --bench rustowl_bench_simple -- --save-baseline $SAVE_BASELINE" - fi - - # Add baseline arguments if comparing - if [[ "$COMPARE_MODE" == "true" && -n "$LOAD_BASELINE" ]]; then - bench_args="$bench_args --bench rustowl_bench_simple -- --baseline $LOAD_BASELINE" - fi - - # If no baseline operations, run all benchmarks - if [[ -z "$SAVE_BASELINE" && "$COMPARE_MODE" != "true" ]]; then - bench_args="$bench_args --bench rustowl_bench_simple" - fi - - # Run the benchmarks - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${BLUE}Running: $bench_cmd $bench_args${NC}" - $bench_cmd $bench_args - else - $bench_cmd $bench_args --quiet 2>/dev/null || $bench_cmd $bench_args >/dev/null 2>&1 - fi - else - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}! No benchmark files found in ./benches, skipping Criterion benchmarks${NC}" - fi - fi - - # Run specific RustOwl analysis benchmarks using real test data - if [[ -f "./target/release/rustowl" || -f "./target/release/rustowl.exe" ]]; then - local rustowl_binary="./target/release/rustowl" - if [[ -f "./target/release/rustowl.exe" ]]; then - rustowl_binary="./target/release/rustowl.exe" - fi - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Running RustOwl analysis benchmark on: $TEST_PACKAGE_PATH${NC}" - fi - - # Time the analysis of the test package - local start_time=$(date +%s.%N 2>/dev/null || date +%s) - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - timeout 120 "$rustowl_binary" check "$TEST_PACKAGE_PATH" 2>/dev/null || true - else - timeout 120 "$rustowl_binary" check "$TEST_PACKAGE_PATH" >/dev/null 2>&1 || true - fi - - local end_time=$(date +%s.%N 2>/dev/null || date +%s) - - # Calculate duration (handle both nanosecond and second precision) - local duration - if command -v bc >/dev/null 2>&1 && [[ "$start_time" == *.* ]]; then - duration=$(echo "$end_time - $start_time" | bc -l 2>/dev/null || echo "N/A") - else - duration=$((end_time - start_time)) - fi - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Analysis completed in ${duration}s${NC}" - fi - - # Save timing info for comparison - if [[ -n "$SAVE_BASELINE" ]]; then - mkdir -p "baselines/performance/$SAVE_BASELINE" - echo "$duration" > "baselines/performance/$SAVE_BASELINE/analysis_time.txt" - echo "$TEST_PACKAGE_PATH" > "baselines/performance/$SAVE_BASELINE/test_package.txt" - # Copy Criterion benchmark results for local development - if [[ -d "target/criterion" ]]; then - cp -r "target/criterion" "baselines/performance/$SAVE_BASELINE/criterion" - fi - fi - - # Compare timing if in compare mode - if [[ "$COMPARE_MODE" == "true" && -f "baselines/performance/$LOAD_BASELINE/analysis_time.txt" ]]; then - local baseline_time=$(cat "baselines/performance/$LOAD_BASELINE/analysis_time.txt") - compare_analysis_times "$baseline_time" "$duration" - fi - else - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}! RustOwl binary not found, skipping analysis benchmark${NC}" - fi - fi - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ Benchmarks completed${NC}" - echo "" - fi + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Running performance benchmarks...${NC}" + fi + + # Check if we have any benchmark files + if [[ -d "./benches" ]] && find "./benches" -name "*.rs" | head -1 >/dev/null 2>&1; then + # Prepare benchmark command + local bench_cmd="cargo bench" + local bench_args=() + + # Use cargo-criterion if available and not doing baseline operations + if command -v cargo-criterion >/dev/null 2>&1 && [[ -z "$SAVE_BASELINE" && "$COMPARE_MODE" != "true" ]]; then + bench_cmd="cargo criterion" + fi + + # Add all benchmark names defined in BENCHMARK_NAME array + if [[ "${#BENCHMARK_NAME[@]}" -gt 0 ]]; then + for bn in "${BENCHMARK_NAME[@]}"; do + bench_args+=(--bench "$bn") + done + fi + + # Baseline save / compare options (Criterion) + if [[ -n "$SAVE_BASELINE" ]]; then + bench_args+=(-- --save-baseline "$SAVE_BASELINE") + elif [[ "$COMPARE_MODE" == "true" && -n "$LOAD_BASELINE" ]]; then + bench_args+=(-- --baseline "$LOAD_BASELINE") + fi + + # Run the benchmarks + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${BLUE}Running: $bench_cmd ${bench_args[*]}${NC}" + # shellcheck disable=SC2086 + $bench_cmd "${bench_args[@]}" + else + # shellcheck disable=SC2086 + $bench_cmd "${bench_args[@]}" --quiet 2>/dev/null || $bench_cmd "${bench_args[@]}" >/dev/null 2>&1 + fi + else + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}! No benchmark files found in ./benches, skipping Criterion benchmarks${NC}" + fi + fi + + # Run specific RustOwl analysis benchmarks using real test data + if [[ -f "./target/release/rustowl" || -f "./target/release/rustowl.exe" ]]; then + local rustowl_binary="./target/release/rustowl" + if [[ -f "./target/release/rustowl.exe" ]]; then + rustowl_binary="./target/release/rustowl.exe" + fi + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Running RustOwl analysis benchmark on: $TEST_PACKAGE_PATH${NC}" + fi + + # Time the analysis of the test package + local start_time end_time duration + start_time=$(date +%s.%N 2>/dev/null || date +%s) + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + timeout 120 "$rustowl_binary" check "$TEST_PACKAGE_PATH" 2>/dev/null || true + else + timeout 120 "$rustowl_binary" check "$TEST_PACKAGE_PATH" >/dev/null 2>&1 || true + fi + + end_time=$(date +%s.%N 2>/dev/null || date +%s) + + # Calculate duration (handle both nanosecond and second precision) + if command -v bc >/dev/null 2>&1 && [[ "$start_time" == *.* ]]; then + duration=$(echo "$end_time - $start_time" | bc -l 2>/dev/null || echo "N/A") + else + duration=$((end_time - start_time)) + fi + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Analysis completed in ${duration}s${NC}" + fi + + # Save timing info for comparison + if [[ -n "$SAVE_BASELINE" ]]; then + mkdir -p "baselines/performance/$SAVE_BASELINE" + echo "$duration" >"baselines/performance/$SAVE_BASELINE/analysis_time.txt" + echo "$TEST_PACKAGE_PATH" >"baselines/performance/$SAVE_BASELINE/test_package.txt" + # Copy Criterion benchmark results for local development + if [[ -d "target/criterion" ]]; then + cp -r "target/criterion" "baselines/performance/$SAVE_BASELINE/criterion" + fi + fi + + # Compare timing if in compare mode + if [[ "$COMPARE_MODE" == "true" && -f "baselines/performance/$LOAD_BASELINE/analysis_time.txt" ]]; then + local baseline_time + baseline_time=$(cat "baselines/performance/$LOAD_BASELINE/analysis_time.txt") + compare_analysis_times "$baseline_time" "$duration" + fi + else + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}! RustOwl binary not found, skipping analysis benchmark${NC}" + fi + fi + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ Benchmarks completed${NC}" + echo "" + fi } compare_analysis_times() { - local baseline_time="$1" - local current_time="$2" - - if [[ "$baseline_time" == "N/A" || "$current_time" == "N/A" ]]; then - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}! Could not compare analysis times (timing unavailable)${NC}" - fi - return 0 - fi - - # Calculate percentage change - local change=0 - if command -v bc >/dev/null 2>&1; then - change=$(echo "scale=2; (($current_time - $baseline_time) / $baseline_time) * 100" | bc -l 2>/dev/null || echo 0) - fi - local threshold_num=$(echo "$REGRESSION_THRESHOLD" | tr -d '%') - # Report comparison - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${BLUE}Analysis Time Comparison:${NC}" - echo -e " Baseline: ${baseline_time}s" - echo -e " Current: ${current_time}s" - echo -e " Change: ${change}%" - fi - # Flag regression only on slowdown beyond threshold - if (( $(echo "$change > $threshold_num" | bc -l 2>/dev/null || echo 0) )); then - [[ "$SHOW_OUTPUT" == "true" ]] && echo -e "${RED}⚠ Performance regression detected! (+${change}% > ${REGRESSION_THRESHOLD})${NC}" - return 1 - # Improvement beyond threshold - elif (( $(echo "$change < -$threshold_num" | bc -l 2>/dev/null || echo 0) )); then - [[ "$SHOW_OUTPUT" == "true" ]] && echo -e "${GREEN}✓ Performance improvement detected! (${change}%)${NC}" - else - [[ "$SHOW_OUTPUT" == "true" ]] && echo -e "${GREEN}✓ Performance within acceptable range (±${threshold_num}%)${NC}" - fi + local baseline_time="$1" + local current_time="$2" + + if [[ "$baseline_time" == "N/A" || "$current_time" == "N/A" ]]; then + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}! Could not compare analysis times (timing unavailable)${NC}" + fi + return 0 + fi + + # Calculate percentage change + local change=0 + if command -v bc >/dev/null 2>&1; then + change=$(echo "scale=2; (($current_time - $baseline_time) / $baseline_time) * 100" | bc -l 2>/dev/null || echo 0) + fi + local threshold_num + threshold_num=$(echo "$REGRESSION_THRESHOLD" | tr -d '%') + # Report comparison + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${BLUE}Analysis Time Comparison:${NC}" + echo -e " Baseline: ${baseline_time}s" + echo -e " Current: ${current_time}s" + echo -e " Change: ${change}%" + fi + # Flag regression only on slowdown beyond threshold + if (($(echo "$change > $threshold_num" | bc -l 2>/dev/null || echo 0))); then + [[ "$SHOW_OUTPUT" == "true" ]] && echo -e "${RED}⚠ Performance regression detected! (+${change}% > ${REGRESSION_THRESHOLD})${NC}" + return 1 + # Improvement beyond threshold + elif (($(echo "$change < -$threshold_num" | bc -l 2>/dev/null || echo 0))); then + [[ "$SHOW_OUTPUT" == "true" ]] && echo -e "${GREEN}✓ Performance improvement detected! (${change}%)${NC}" + else + [[ "$SHOW_OUTPUT" == "true" ]] && echo -e "${GREEN}✓ Performance within acceptable range (±${threshold_num}%)${NC}" + fi } # Analyze benchmark output for regressions analyze_regressions() { - if [[ "$COMPARE_MODE" != "true" ]]; then - return 0 - fi - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Analyzing benchmark results for regressions...${NC}" - fi - - # Look for Criterion output files - local criterion_dir="target/criterion" - local regression_found=false - - if [[ -d "$criterion_dir" ]]; then - # Only do detailed HTML check in non-verbose (CI) mode - if [[ "$SHOW_OUTPUT" == "false" ]]; then - # Check for regression indicators in Criterion reports - if find "$criterion_dir" -name "*.html" -print0 2>/dev/null | xargs -0 grep -l "regressed\|slower" 2>/dev/null | head -1 >/dev/null; then - regression_found=true - fi - fi - - # Create a comprehensive summary file for CI - if [[ -f "$criterion_dir/report/index.html" ]]; then - cat > benchmark-summary.txt << EOF + if [[ "$COMPARE_MODE" != "true" ]]; then + return 0 + fi + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Analyzing benchmark results for regressions...${NC}" + fi + + # Look for Criterion output files + local criterion_dir="target/criterion" + local regression_found=false + + if [[ -d "$criterion_dir" ]]; then + # Only do detailed HTML check in non-verbose (CI) mode + if [[ "$SHOW_OUTPUT" == "false" ]]; then + # Check for regression indicators in Criterion reports + if find "$criterion_dir" -name "*.html" -print0 2>/dev/null | xargs -0 grep -l "regressed\|slower" 2>/dev/null | head -1 >/dev/null; then + regression_found=true + fi + fi + + # Create a comprehensive summary file for CI + if [[ -f "$criterion_dir/report/index.html" ]]; then + cat >benchmark-summary.txt </dev/null 2>&1; then - echo "### Detailed Timings (JSON extracted)" >> benchmark-summary.txt - find "$criterion_dir" -name "estimates.json" -exec bash -c ' + + # Extract key timing information from JSON files + if command -v jq >/dev/null 2>&1; then + echo "### Detailed Timings (JSON extracted)" >>benchmark-summary.txt + find "$criterion_dir" -name "estimates.json" -exec bash -c ' dir=$(dirname "$1" | sed "s|target/criterion/||") val=$(jq -r ".mean.point_estimate" "$1" 2>/dev/null || echo "N/A") if [ "$val" != "N/A" ] && [ "$val" != "null" ]; then @@ -479,102 +486,106 @@ EOF echo "$dir: ${sec}s" else echo "$dir: N/A" - fi' bash {} \; | sort >> benchmark-summary.txt 2>/dev/null || true - - # Add summary statistics - echo "" >> benchmark-summary.txt - echo "### Summary Statistics" >> benchmark-summary.txt - echo "Sample Size: $(find "$criterion_dir" -name "sample.json" | head -1 | xargs jq -r 'length' 2>/dev/null || echo 'N/A') measurements per benchmark" >> benchmark-summary.txt - measurement_time=$(find "$criterion_dir" -name "estimates.json" -exec jq -r ".measurement_time" {} 2>/dev/null | head -1 || echo "300") - echo "Measurement Time: ${measurement_time}s per benchmark" >> benchmark-summary.txt - echo "Warm-up Time: 5s per benchmark" >> benchmark-summary.txt - else - echo "### Quick Summary (grep extracted)" >> benchmark-summary.txt - find "$criterion_dir" -name "*.json" -exec grep -h "\"mean\"" {} \; 2>/dev/null | head -10 >> benchmark-summary.txt || true - fi - - # Add regression status if comparing - if [[ "$COMPARE_MODE" == "true" ]]; then - echo "" >> benchmark-summary.txt - echo "## Regression Analysis" >> benchmark-summary.txt - if [[ "$regression_found" == "true" ]]; then - echo "⚠️ REGRESSION DETECTED" >> benchmark-summary.txt - else - echo "✅ No significant regressions" >> benchmark-summary.txt - fi - echo "Threshold: $REGRESSION_THRESHOLD" >> benchmark-summary.txt - fi - fi - fi - - if [[ "$regression_found" == "true" ]]; then - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${RED}⚠ Performance regressions detected in detailed analysis${NC}" - echo -e "${YELLOW}Check the HTML report for details: target/criterion/report/index.html${NC}" - fi - return 1 - else - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${GREEN}✓ No significant regressions detected${NC}" - fi - return 0 - fi + fi' bash {} \; | sort >>benchmark-summary.txt 2>/dev/null || true + + # Add summary statistics + echo "" >>benchmark-summary.txt + echo "### Summary Statistics" >>benchmark-summary.txt + echo "Sample Size: $(find "$criterion_dir" -name "sample.json" | head -1 | xargs jq -r 'length' 2>/dev/null || echo 'N/A') measurements per benchmark" >>benchmark-summary.txt + local measurement_time="300" first_estimate + first_estimate=$(find "$criterion_dir" -name "estimates.json" -print -quit 2>/dev/null || true) + if [[ -n "$first_estimate" ]]; then + measurement_time=$(jq -r '.measurement_time // 300' "$first_estimate" 2>/dev/null || echo "300") + fi + echo "Measurement Time: ${measurement_time}s per benchmark" >>benchmark-summary.txt + echo "Warm-up Time: 5s per benchmark" >>benchmark-summary.txt + else + echo "### Quick Summary (grep extracted)" >>benchmark-summary.txt + find "$criterion_dir" -name "*.json" -exec grep -h "\"mean\"" {} \; 2>/dev/null | head -10 >>benchmark-summary.txt || true + fi + + # Add regression status if comparing + if [[ "$COMPARE_MODE" == "true" ]]; then + echo "" >>benchmark-summary.txt + echo "## Regression Analysis" >>benchmark-summary.txt + if [[ "$regression_found" == "true" ]]; then + echo "⚠️ REGRESSION DETECTED" >>benchmark-summary.txt + else + echo "✅ No significant regressions" >>benchmark-summary.txt + fi + echo "Threshold: $REGRESSION_THRESHOLD" >>benchmark-summary.txt + fi + fi + fi + + if [[ "$regression_found" == "true" ]]; then + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${RED}⚠ Performance regressions detected in detailed analysis${NC}" + echo -e "${YELLOW}Check the HTML report for details: target/criterion/report/index.html${NC}" + fi + return 1 + else + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${GREEN}✓ No significant regressions detected${NC}" + fi + return 0 + fi } open_report() { - if [[ "$OPEN_REPORT" == "true" && -f "target/criterion/report/index.html" ]]; then - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Opening benchmark report...${NC}" - fi - - # Try to open the report in the default browser - if command -v xdg-open >/dev/null 2>&1; then - xdg-open "target/criterion/report/index.html" 2>/dev/null & - elif command -v open >/dev/null 2>&1; then - open "target/criterion/report/index.html" 2>/dev/null & - elif command -v start >/dev/null 2>&1; then - start "target/criterion/report/index.html" 2>/dev/null & - else - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${YELLOW}Could not auto-open report. Please open: target/criterion/report/index.html${NC}" - fi - fi - fi + if [[ "$OPEN_REPORT" == "true" && -f "target/criterion/report/index.html" ]]; then + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Opening benchmark report...${NC}" + fi + + # Try to open the report in the default browser + if command -v xdg-open >/dev/null 2>&1; then + xdg-open "target/criterion/report/index.html" 2>/dev/null & + elif command -v open >/dev/null 2>&1; then + open "target/criterion/report/index.html" 2>/dev/null & + elif command -v start >/dev/null 2>&1; then + start "target/criterion/report/index.html" 2>/dev/null & + else + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${YELLOW}Could not auto-open report. Please open: target/criterion/report/index.html${NC}" + fi + fi + fi } show_results_location() { - if [[ "$SHOW_OUTPUT" == "true" ]]; then - echo -e "${BLUE}${BOLD}Results Location:${NC}" - - if [[ -f "target/criterion/report/index.html" ]]; then - echo -e "${GREEN}✓ HTML Report: target/criterion/report/index.html${NC}" - fi - - if [[ -n "$SAVE_BASELINE" && -d "baselines/performance/$SAVE_BASELINE" ]]; then - echo -e "${GREEN}✓ Saved baseline: baselines/performance/$SAVE_BASELINE/${NC}" - fi - - if [[ -f "benchmark-summary.txt" ]]; then - echo -e "${GREEN}✓ Summary: benchmark-summary.txt${NC}" - fi - - echo -e "${BLUE}✓ Test package used: $TEST_PACKAGE_PATH${NC}" - - echo "" - echo -e "${YELLOW}Tips:${NC}" - echo -e " • Use --open to automatically open the HTML report" - echo -e " • Use --save to create a baseline for future comparisons" - echo -e " • Use --load to compare against a saved baseline" - echo -e " • Use --test-package to benchmark specific test data" - echo "" - fi + if [[ "$SHOW_OUTPUT" == "true" ]]; then + echo -e "${BLUE}${BOLD}Results Location:${NC}" + + if [[ -f "target/criterion/report/index.html" ]]; then + echo -e "${GREEN}✓ HTML Report: target/criterion/report/index.html${NC}" + fi + + if [[ -n "$SAVE_BASELINE" && -d "baselines/performance/$SAVE_BASELINE" ]]; then + echo -e "${GREEN}✓ Saved baseline: baselines/performance/$SAVE_BASELINE/${NC}" + fi + + if [[ -f "benchmark-summary.txt" ]]; then + echo -e "${GREEN}✓ Summary: benchmark-summary.txt${NC}" + fi + + echo -e "${BLUE}✓ Test package used: $TEST_PACKAGE_PATH${NC}" + + echo "" + echo -e "${YELLOW}Tips:${NC}" + echo -e " • Use --open to automatically open the HTML report" + echo -e " • Use --save to create a baseline for future comparisons" + echo -e " • Use --load to compare against a saved baseline" + echo -e " • Use --test-package to benchmark specific test data" + echo "" + fi } # Create a basic summary file even without detailed Criterion data create_basic_summary() { - # Create a basic summary file even without detailed Criterion data - if [[ ! -f "benchmark-summary.txt" ]]; then - cat > benchmark-summary.txt << EOF + # Create a basic summary file even without detailed Criterion data + if [[ ! -f "benchmark-summary.txt" ]]; then + cat >benchmark-summary.txt <> benchmark-summary.txt - fi - - # Add comparison info if available - if [[ "$COMPARE_MODE" == "true" && -f "baselines/performance/$LOAD_BASELINE/analysis_time.txt" ]]; then - local baseline_time=$(cat "baselines/performance/$LOAD_BASELINE/analysis_time.txt") - echo "Baseline Time: ${baseline_time}s" >> benchmark-summary.txt - echo "Threshold: $REGRESSION_THRESHOLD" >> benchmark-summary.txt - fi - - # Add build info - echo "" >> benchmark-summary.txt - echo "## Environment" >> benchmark-summary.txt - echo "Rust Version: $(rustc --version 2>/dev/null || echo 'Unknown')" >> benchmark-summary.txt - echo "Host: $(rustc -vV 2>/dev/null | grep host | cut -d' ' -f2 || echo 'Unknown')" >> benchmark-summary.txt - fi + + # Add analysis timing if available + if [[ -n "$SAVE_BASELINE" && -f "baselines/performance/$SAVE_BASELINE/analysis_time.txt" ]]; then + local analysis_time + analysis_time=$(cat "baselines/performance/$SAVE_BASELINE/analysis_time.txt") + echo "Analysis Time: ${analysis_time}s" >>benchmark-summary.txt + fi + + # Add comparison info if available + if [[ "$COMPARE_MODE" == "true" && -f "baselines/performance/$LOAD_BASELINE/analysis_time.txt" ]]; then + local baseline_time + baseline_time=$(cat "baselines/performance/$LOAD_BASELINE/analysis_time.txt") + echo "Baseline Time: ${baseline_time}s" >>benchmark-summary.txt + echo "Threshold: $REGRESSION_THRESHOLD" >>benchmark-summary.txt + fi + + # Add build info + echo "" >>benchmark-summary.txt + echo "## Environment" >>benchmark-summary.txt + echo "Rust Version: $(rustc --version 2>/dev/null || echo 'Unknown')" >>benchmark-summary.txt + echo "Host: $(rustc -vV 2>/dev/null | grep host | cut -d' ' -f2 || echo 'Unknown')" >>benchmark-summary.txt + fi } # Main execution main() { - print_header - check_prerequisites - clean_build - build_rustowl - run_benchmarks - - # Check for regressions and set exit code - local exit_code=0 - if ! analyze_regressions; then - exit_code=1 - fi - - # Ensure we have a summary file for CI - create_basic_summary - - open_report - show_results_location - - if [[ "$SHOW_OUTPUT" == "true" ]]; then - if [[ $exit_code -eq 0 ]]; then - echo -e "${GREEN}${BOLD}✓ Benchmark completed successfully!${NC}" - else - echo -e "${RED}${BOLD}⚠ Benchmark completed with performance regressions detected${NC}" - fi - fi - - exit $exit_code + print_header + check_prerequisites + clean_build + build_rustowl + run_benchmarks + + # Check for regressions and set exit code + local exit_code=0 + if ! analyze_regressions; then + exit_code=1 + fi + + # Ensure we have a summary file for CI + create_basic_summary + + open_report + show_results_location + + if [[ "$SHOW_OUTPUT" == "true" ]]; then + if [[ $exit_code -eq 0 ]]; then + echo -e "${GREEN}${BOLD}✓ Benchmark completed successfully!${NC}" + else + echo -e "${RED}${BOLD}⚠ Benchmark completed with performance regressions detected${NC}" + fi + fi + + exit $exit_code } # Run main function diff --git a/scripts/build/channel b/scripts/build/channel index 636ea711..7f229af9 100644 --- a/scripts/build/channel +++ b/scripts/build/channel @@ -1 +1 @@ -1.89.0 +1.92.0 diff --git a/scripts/build/print-env.sh b/scripts/build/print-env.sh index e1242764..6f97501d 100755 --- a/scripts/build/print-env.sh +++ b/scripts/build/print-env.sh @@ -1,67 +1,86 @@ #!/bin/sh -e if [ $# -ne 1 ]; then - echo "Usage: $0 " - echo "Example: $0 1.89.0" - exit 1 + echo "Usage: $0 " + echo "Example: $0 1.92.0" + exit 1 fi TOOLCHAIN_CHANNEL="$1" # print host-tuple host_tuple() { - if [ -z "$TOOLCHAIN_OS" ]; then - # Get OS - case "$(uname -s)" in - Linux) - TOOLCHAIN_OS="unknown-linux-gnu" - ;; - Darwin) - TOOLCHAIN_OS="apple-darwin" - ;; - CYGWIN*|MINGW32*|MSYS*|MINGW*) - TOOLCHAIN_OS="pc-windows-msvc" - ;; - *) - echo "Unsupported OS: $(uname -s)" >&2 - exit 1 - ;; - esac - fi + if [ -z "$TOOLCHAIN_OS" ]; then + # Get OS + case "$(uname -s)" in + Linux) + TOOLCHAIN_OS="unknown-linux-gnu" + ;; + Darwin) + TOOLCHAIN_OS="apple-darwin" + ;; + CYGWIN* | MINGW32* | MSYS* | MINGW*) + TOOLCHAIN_OS="pc-windows-msvc" + ;; + *) + echo "Unsupported OS: $(uname -s)" >&2 + exit 1 + ;; + esac + fi - if [ -z "$TOOLCHAIN_ARCH" ]; then - # Get architecture - case "$(uname -m)" in - arm64|aarch64) - TOOLCHAIN_ARCH="aarch64" - ;; - x86_64|amd64) - TOOLCHAIN_ARCH="x86_64" - ;; - *) - echo "Unsupported architecture: $(uname -m)" >&2 - exit 1 - ;; - esac - fi + if [ -z "$TOOLCHAIN_ARCH" ]; then + # Get architecture + # + # On Windows CI (MSYS2/Git-Bash), `uname -m` often reports the MSYS + # environment (e.g. x86_64) even on Windows ARM64. Prefer Windows-provided + # env vars when available. + # Prefer signals from GitHub Actions runner / WOW env. + arch_hint="${RUNNER_ARCH:-${PROCESSOR_ARCHITEW6432:-${PROCESSOR_ARCHITECTURE:-${MSYSTEM_CARCH:-}}}}" + case "${arch_hint}" in + ARM64 | arm64 | aarch64) + TOOLCHAIN_ARCH="aarch64" + ;; + AMD64 | X64 | amd64 | x86_64) + TOOLCHAIN_ARCH="x86_64" + ;; + "") + case "$(uname -m)" in + arm64 | aarch64) + TOOLCHAIN_ARCH="aarch64" + ;; + x86_64 | amd64) + TOOLCHAIN_ARCH="x86_64" + ;; + *) + echo "Unsupported architecture: $(uname -m)" >&2 + exit 1 + ;; + esac + ;; + *) + echo "Unsupported architecture hint: ${arch_hint}" >&2 + exit 1 + ;; + esac + fi - echo "$TOOLCHAIN_ARCH-$TOOLCHAIN_OS" + echo "$TOOLCHAIN_ARCH-$TOOLCHAIN_OS" } print_toolchain() { - echo "${TOOLCHAIN_CHANNEL}-$(host_tuple)" + echo "${TOOLCHAIN_CHANNEL}-$(host_tuple)" } - print_env() { - echo "TOOLCHAIN_CHANNEL=${TOOLCHAIN_CHANNEL}" - toolchain="$(print_toolchain)" - echo "RUSTOWL_TOOLCHAIN=$toolchain" - echo "HOST_TUPLE=$(host_tuple)" - sysroot="${SYSROOT:-"$HOME/.rustowl/sysroot/$toolchain"}" - echo "SYSROOT=$sysroot" - echo "PATH=$sysroot/bin:$PATH" - echo "RUSTC_BOOTSTRAP=rustowlc" + echo "TOOLCHAIN_CHANNEL=${TOOLCHAIN_CHANNEL}" + toolchain="$(print_toolchain)" + echo "RUSTOWL_TOOLCHAIN=$toolchain" + echo "HOST_TUPLE=$(host_tuple)" + sysroot="${SYSROOT:-"$HOME/.rustowl/sysroot/$toolchain"}" + echo "SYSROOT=$sysroot" + echo "PATH=$sysroot/bin:$PATH" + echo "RUSTC_BOOTSTRAP=rustowlc" } print_env diff --git a/scripts/bump.sh b/scripts/bump.sh index d07c9613..2ef346d2 100755 --- a/scripts/bump.sh +++ b/scripts/bump.sh @@ -1,16 +1,19 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to update version numbers in multiple files and create a git tag # Usage: ./bump.sh v0.3.1 # Ensure a version argument is provided if [ $# -ne 1 ]; then - echo "Usage: $0 " - echo "Example: $0 v0.3.1" - exit 1 + echo "Usage: $0 " + echo "Example: $0 v0.3.1" + exit 1 fi -[[ $(which gsed > /dev/null 2>&1; echo $?) = 0 ]] && sed="gsed" || sed="sed" +[[ $( + which gsed >/dev/null 2>&1 + echo $? +) = 0 ]] && sed="gsed" || sed="sed" VERSION=$1 VERSION_WITHOUT_V="${VERSION#v}" @@ -19,57 +22,57 @@ echo "Updating to version: $VERSION" # Check if version contains alpha, beta, rc, dev, or other pre-release identifiers if echo "$VERSION_WITHOUT_V" | grep -q -E 'alpha|beta|rc|dev|pre|snapshot'; then - IS_PRERELEASE=true - echo "Pre-release version detected ($VERSION_WITHOUT_V). PKGBUILD will not be updated." + IS_PRERELEASE=true + echo "Pre-release version detected ($VERSION_WITHOUT_V). PKGBUILD will not be updated." else - IS_PRERELEASE=false - echo "Stable version detected ($VERSION_WITHOUT_V)." + IS_PRERELEASE=false + echo "Stable version detected ($VERSION_WITHOUT_V)." fi # 1. Update Cargo.toml in root directory (only the first version field) if [ -f Cargo.toml ]; then - echo "Updating Cargo.toml..." - # Use sed to replace only the first occurrence of the version line - $sed -i '0,/^version = .*/{s/^version = .*/version = "'$VERSION_WITHOUT_V'"/}' Cargo.toml + echo "Updating Cargo.toml..." + # Use sed to replace only the first occurrence of the version line + $sed -i '0,/^version = .*/{s/^version = .*/version = "'"$VERSION_WITHOUT_V"'"/}' Cargo.toml else - echo "Error: Cargo.toml not found in current directory" - exit 1 + echo "Error: Cargo.toml not found in current directory" + exit 1 fi # 2. Update vscode/package.json if [ -f vscode/package.json ]; then - echo "Updating vscode/package.json..." - # Use sed to replace the "version": "x.x.x" line - $sed -i "s/\"version\": \".*\"/\"version\": \"$VERSION_WITHOUT_V\"/" vscode/package.json + echo "Updating vscode/package.json..." + # Use sed to replace the "version": "x.x.x" line + $sed -i "s/\"version\": \".*\"/\"version\": \"$VERSION_WITHOUT_V\"/" vscode/package.json else - echo "Warning: vscode/package.json not found" + echo "Warning: vscode/package.json not found" fi # 3. Update aur/PKGBUILD only for stable releases if [ "$IS_PRERELEASE" = false ] && [ -f aur/PKGBUILD ]; then - echo "Updating aur/PKGBUILD..." - # Use sed to replace the pkgver line - $sed -i "s/^pkgver=.*/pkgver=$VERSION_WITHOUT_V/" aur/PKGBUILD + echo "Updating aur/PKGBUILD..." + # Use sed to replace the pkgver line + $sed -i "s/^pkgver=.*/pkgver=$VERSION_WITHOUT_V/" aur/PKGBUILD elif [ -f aur/PKGBUILD ]; then - echo "Skipping aur/PKGBUILD update for pre-release version" + echo "Skipping aur/PKGBUILD update for pre-release version" else - echo "Warning: aur/PKGBUILD not found" + echo "Warning: aur/PKGBUILD not found" fi # 4. Update aur/PKGBUILD-BIN only for stable releases if [ "$IS_PRERELEASE" = false ] && [ -f aur/PKGBUILD-BIN ]; then - echo "Updating aur/PKGBUILD..." - # Use sed to replace the pkgver line - $sed -i "s/^pkgver=.*/pkgver=$VERSION_WITHOUT_V/" aur/PKGBUILD-BIN + echo "Updating aur/PKGBUILD..." + # Use sed to replace the pkgver line + $sed -i "s/^pkgver=.*/pkgver=$VERSION_WITHOUT_V/" aur/PKGBUILD-BIN elif [ -f aur/PKGBUILD-BIN ]; then - echo "Skipping aur/PKGBUILD-BIN update for pre-release version" + echo "Skipping aur/PKGBUILD-BIN update for pre-release version" else - echo "Warning: aur/PKGBUILD-BIN not found" + echo "Warning: aur/PKGBUILD-BIN not found" fi # 5. Create a git tag echo "Creating git tag: $VERSION" -git tag $VERSION +git tag "$VERSION" echo "Version bump complete. Changes have been made to the files." echo "Remember to commit your changes before pushing the tag." diff --git a/scripts/dev-checks.sh b/scripts/dev-checks.sh index 0d4ec360..f8895b1b 100755 --- a/scripts/dev-checks.sh +++ b/scripts/dev-checks.sh @@ -10,7 +10,7 @@ REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" cd "$REPO_ROOT" -RUST_MIN_VERSION="1.87" +RUST_MIN_VERSION=$(cat "$SCRIPT_DIR/build/channel") AUTO_FIX=false # Colors for output @@ -21,268 +21,268 @@ BLUE='\033[0;34m' NC='\033[0m' # No Color show_help() { - echo "RustOwl Development Checks and Fixes" - echo "" - echo "USAGE:" - echo " $0 [OPTIONS]" - echo "" - echo "OPTIONS:" - echo " -h, --help Show this help message" - echo " -f, --fix Automatically fix issues where possible" - echo " --check-only Only run checks, don't fix anything (default)" - echo "" - echo "CHECKS PERFORMED:" - echo " - Rust toolchain version (minimum $RUST_MIN_VERSION)" - echo " - Code formatting (rustfmt)" - echo " - Linting (clippy)" - echo " - Build test" - echo " - VS Code extension checks (if pnpm is available)" - echo "" - echo "FIXES APPLIED (with --fix):" - echo " - Format code with rustfmt" - echo " - Apply clippy suggestions where possible" - echo " - Format VS Code extension code" - echo "" - echo "EXAMPLES:" - echo " $0 # Run checks only" - echo " $0 --fix # Run checks and fix issues" - echo " $0 --check-only # Explicitly run checks only" + echo "RustOwl Development Checks and Fixes" + echo "" + echo "USAGE:" + echo " $0 [OPTIONS]" + echo "" + echo "OPTIONS:" + echo " -h, --help Show this help message" + echo " -f, --fix Automatically fix issues where possible" + echo " --check-only Only run checks, don't fix anything (default)" + echo "" + echo "CHECKS PERFORMED:" + echo " - Rust toolchain version (minimum $RUST_MIN_VERSION)" + echo " - Code formatting (rustfmt)" + echo " - Linting (clippy)" + echo " - Build test" + echo " - VS Code extension checks (if pnpm is available)" + echo "" + echo "FIXES APPLIED (with --fix):" + echo " - Format code with rustfmt" + echo " - Apply clippy suggestions where possible" + echo " - Format VS Code extension code" + echo "" + echo "EXAMPLES:" + echo " $0 # Run checks only" + echo " $0 --fix # Run checks and fix issues" + echo " $0 --check-only # Explicitly run checks only" } log_info() { - echo -e "${BLUE}[INFO]${NC} $1" + echo -e "${BLUE}[INFO]${NC} $1" } log_success() { - echo -e "${GREEN}[SUCCESS]${NC} $1" + echo -e "${GREEN}[SUCCESS]${NC} $1" } log_warning() { - echo -e "${YELLOW}[WARNING]${NC} $1" + echo -e "${YELLOW}[WARNING]${NC} $1" } log_error() { - echo -e "${RED}[ERROR]${NC} $1" + echo -e "${RED}[ERROR]${NC} $1" } check_rust_version() { - log_info "Checking Rust version..." - - if ! command -v rustc &> /dev/null; then - log_error "rustc not found. Please install Rust." - return 1 - fi - - local rust_version - rust_version=$(rustc --version | grep -oE '[0-9]+\.[0-9]+' | head -1) - - if [ -z "$rust_version" ]; then - log_error "Could not determine Rust version" - return 1 - fi - - # Compare versions (basic comparison for major.minor) - local min_major min_minor cur_major cur_minor - min_major=$(echo "$RUST_MIN_VERSION" | cut -d. -f1) - min_minor=$(echo "$RUST_MIN_VERSION" | cut -d. -f2) - cur_major=$(echo "$rust_version" | cut -d. -f1) - cur_minor=$(echo "$rust_version" | cut -d. -f2) - - if [ "$cur_major" -lt "$min_major" ] || - ([ "$cur_major" -eq "$min_major" ] && [ "$cur_minor" -lt "$min_minor" ]); then - log_error "Rust version $rust_version is below minimum required version $RUST_MIN_VERSION" - return 1 - fi - - log_success "Rust version $rust_version >= $RUST_MIN_VERSION" + log_info "Checking Rust version..." + + if ! command -v rustc &>/dev/null; then + log_error "rustc not found. Please install Rust." + return 1 + fi + + local rust_version + rust_version=$(rustc --version | grep -oE '[0-9]+\.[0-9]+' | head -1) + + if [ -z "$rust_version" ]; then + log_error "Could not determine Rust version" + return 1 + fi + + # Compare versions (basic comparison for major.minor) + local min_major min_minor cur_major cur_minor + min_major=$(echo "$RUST_MIN_VERSION" | cut -d. -f1) + min_minor=$(echo "$RUST_MIN_VERSION" | cut -d. -f2) + cur_major=$(echo "$rust_version" | cut -d. -f1) + cur_minor=$(echo "$rust_version" | cut -d. -f2) + + if [ "$cur_major" -lt "$min_major" ] || + ([ "$cur_major" -eq "$min_major" ] && [ "$cur_minor" -lt "$min_minor" ]); then + log_error "Rust version $rust_version is below minimum required version $RUST_MIN_VERSION" + return 1 + fi + + log_success "Rust version $rust_version >= $RUST_MIN_VERSION" } check_formatting() { - log_info "Checking code formatting..." - - if $AUTO_FIX; then - log_info "Applying code formatting..." - if cargo fmt; then - log_success "Code formatted successfully" - else - log_error "Failed to format code" - return 1 - fi - else - if cargo fmt --check; then - log_success "Code is properly formatted" - else - log_error "Code formatting issues found. Run with --fix to auto-format." - return 1 - fi - fi + log_info "Checking code formatting..." + + if $AUTO_FIX; then + log_info "Applying code formatting..." + if cargo fmt; then + log_success "Code formatted successfully" + else + log_error "Failed to format code" + return 1 + fi + else + if cargo fmt --check; then + log_success "Code is properly formatted" + else + log_error "Code formatting issues found. Run with --fix to auto-format." + return 1 + fi + fi } check_clippy() { - log_info "Running clippy lints..." - - if $AUTO_FIX; then - log_info "Applying clippy fixes where possible..." - # First try to fix what we can - if cargo clippy --fix --allow-dirty --allow-staged 2>/dev/null || true; then - log_info "Applied some clippy fixes" - fi - # Then check for remaining issues - if cargo clippy --all-targets --all-features -- -D warnings; then - log_success "All clippy checks passed" - else - log_warning "Some clippy issues remain that couldn't be auto-fixed" - return 1 - fi - else - if cargo clippy --all-targets --all-features -- -D warnings; then - log_success "All clippy checks passed" - else - log_error "Clippy found issues. Run with --fix to apply automatic fixes." - return 1 - fi - fi + log_info "Running clippy lints..." + + if $AUTO_FIX; then + log_info "Applying clippy fixes where possible..." + # First try to fix what we can + if cargo clippy --fix --allow-dirty --allow-staged 2>/dev/null || true; then + log_info "Applied some clippy fixes" + fi + # Then check for remaining issues + if cargo clippy --all-targets --all-features -- -D warnings; then + log_success "All clippy checks passed" + else + log_warning "Some clippy issues remain that couldn't be auto-fixed" + return 1 + fi + else + if cargo clippy --all-targets --all-features -- -D warnings; then + log_success "All clippy checks passed" + else + log_error "Clippy found issues. Run with --fix to apply automatic fixes." + return 1 + fi + fi } check_build() { - log_info "Testing build..." - - if ./scrips/build/toolchain cargo build --release; then - log_success "Build successful" - else - log_error "Build failed" - return 1 - fi + log_info "Testing build..." + + if ./scripts/build/toolchain cargo build --release; then + log_success "Build successful" + else + log_error "Build failed" + return 1 + fi } check_tests() { - log_info "Checking for unit tests..." - - # Check if there are actual unit tests (not doc tests) - local unit_test_output - unit_test_output=$(cargo test --lib --bins 2>&1) - - # Count only unit tests, not doc tests - local unit_test_count - unit_test_count=$(echo "$unit_test_output" | grep -E "running [0-9]+ tests" | awk '{sum += $2} END {print sum+0}') - - if [ "$unit_test_count" -eq 0 ]; then - log_info "No unit tests found (this is expected for RustOwl)" - return 0 - else - log_info "Running $unit_test_count unit tests..." - if cargo test --lib --bins; then - log_success "All unit tests passed" - else - log_error "Some unit tests failed" - return 1 - fi - fi + log_info "Checking for unit tests..." + + # Check if there are actual unit tests (not doc tests) + local unit_test_output + unit_test_output=$(cargo test --lib --bins 2>&1) + + # Count only unit tests, not doc tests + local unit_test_count + unit_test_count=$(echo "$unit_test_output" | grep -E "running [0-9]+ tests" | awk '{sum += $2} END {print sum+0}') + + if [ "$unit_test_count" -eq 0 ]; then + log_info "No unit tests found (this is expected for RustOwl)" + return 0 + else + log_info "Running $unit_test_count unit tests..." + if cargo test --lib --bins; then + log_success "All unit tests passed" + else + log_error "Some unit tests failed" + return 1 + fi + fi } check_vscode_extension() { - if [ ! -d "vscode" ]; then - log_info "VS Code extension directory not found, skipping" - return 0 - fi - - log_info "Checking VS Code extension..." - - if ! command -v pnpm &> /dev/null; then - log_warning "pnpm not found, skipping VS Code extension checks" - return 0 - fi - - cd vscode - - # Install dependencies if needed - if [ ! -d "node_modules" ]; then - log_info "Installing VS Code extension dependencies..." - pnpm install --frozen-lockfile - fi - - if $AUTO_FIX; then - log_info "Formatting VS Code extension code..." - if pnpm prettier --write src; then - log_success "VS Code extension code formatted" - else - log_warning "Failed to format VS Code extension code" - fi - else - if pnpm prettier --check src; then - log_success "VS Code extension code is properly formatted" - else - log_error "VS Code extension formatting issues found. Run with --fix to auto-format." - cd "$REPO_ROOT" - return 1 - fi - fi - - # Type checking and linting - if pnpm lint && pnpm check-types; then - log_success "VS Code extension checks passed" - else - log_error "VS Code extension checks failed" - cd "$REPO_ROOT" - return 1 - fi - - cd "$REPO_ROOT" + if [ ! -d "vscode" ]; then + log_info "VS Code extension directory not found, skipping" + return 0 + fi + + log_info "Checking VS Code extension..." + + if ! command -v pnpm &>/dev/null; then + log_warning "pnpm not found, skipping VS Code extension checks" + return 0 + fi + + cd vscode + + # Install dependencies if needed + if [ ! -d "node_modules" ]; then + log_info "Installing VS Code extension dependencies..." + pnpm install --frozen-lockfile + fi + + if $AUTO_FIX; then + log_info "Formatting VS Code extension code..." + if pnpm prettier --write src; then + log_success "VS Code extension code formatted" + else + log_warning "Failed to format VS Code extension code" + fi + else + if pnpm prettier --check src; then + log_success "VS Code extension code is properly formatted" + else + log_error "VS Code extension formatting issues found. Run with --fix to auto-format." + cd "$REPO_ROOT" + return 1 + fi + fi + + # Type checking and linting + if pnpm lint && pnpm check-types; then + log_success "VS Code extension checks passed" + else + log_error "VS Code extension checks failed" + cd "$REPO_ROOT" + return 1 + fi + + cd "$REPO_ROOT" } main() { - # Parse arguments - while [[ $# -gt 0 ]]; do - case $1 in - -h|--help) - show_help - exit 0 - ;; - -f|--fix) - AUTO_FIX=true - shift - ;; - --check-only) - AUTO_FIX=false - shift - ;; - *) - log_error "Unknown option: $1" - show_help - exit 1 - ;; - esac - done - - log_info "Starting development checks..." - if $AUTO_FIX; then - log_info "Auto-fix mode enabled" - else - log_info "Check-only mode (use --fix to enable auto-fixes)" - fi - echo "" - - local failed_checks=0 - - # Run all checks - check_rust_version || ((failed_checks++)) - check_formatting || ((failed_checks++)) - check_clippy || ((failed_checks++)) - check_build || ((failed_checks++)) - check_tests || ((failed_checks++)) - check_vscode_extension || ((failed_checks++)) - - echo "" - if [ $failed_checks -eq 0 ]; then - log_success "All development checks passed! ✅" - exit 0 - else - log_error "$failed_checks check(s) failed" - if ! $AUTO_FIX; then - log_info "Try running with --fix to automatically resolve some issues" - fi - exit 1 - fi + # Parse arguments + while [[ $# -gt 0 ]]; do + case $1 in + -h | --help) + show_help + exit 0 + ;; + -f | --fix) + AUTO_FIX=true + shift + ;; + --check-only) + AUTO_FIX=false + shift + ;; + *) + log_error "Unknown option: $1" + show_help + exit 1 + ;; + esac + done + + log_info "Starting development checks..." + if $AUTO_FIX; then + log_info "Auto-fix mode enabled" + else + log_info "Check-only mode (use --fix to enable auto-fixes)" + fi + echo "" + + local failed_checks=0 + + # Run all checks + check_rust_version || ((failed_checks++)) + check_formatting || ((failed_checks++)) + check_clippy || ((failed_checks++)) + check_build || ((failed_checks++)) + check_tests || ((failed_checks++)) + check_vscode_extension || ((failed_checks++)) + + echo "" + if [ $failed_checks -eq 0 ]; then + log_success "All development checks passed! ✅" + exit 0 + else + log_error "$failed_checks check(s) failed" + if ! $AUTO_FIX; then + log_info "Try running with --fix to automatically resolve some issues" + fi + exit 1 + fi } main "$@" diff --git a/scripts/package.json b/scripts/package.json deleted file mode 100644 index e8dc0246..00000000 --- a/scripts/package.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "devDependencies": { - "@commitlint/cli": "^20.3.1", - "@commitlint/config-conventional": "^20.3.1", - "@commitlint/format": "^20.3.1" - }, - "packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748" -} diff --git a/scripts/pnpm-lock.yaml b/scripts/pnpm-lock.yaml deleted file mode 100644 index 0f3bde47..00000000 --- a/scripts/pnpm-lock.yaml +++ /dev/null @@ -1,789 +0,0 @@ -lockfileVersion: '9.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - devDependencies: - '@commitlint/cli': - specifier: ^20.3.1 - version: 20.3.1(@types/node@25.0.3)(typescript@5.9.2) - '@commitlint/config-conventional': - specifier: ^20.3.1 - version: 20.3.1 - '@commitlint/format': - specifier: ^20.3.1 - version: 20.3.1 - -packages: - - '@babel/code-frame@7.27.1': - resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} - engines: {node: '>=6.9.0'} - - '@babel/helper-validator-identifier@7.28.5': - resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} - engines: {node: '>=6.9.0'} - - '@commitlint/cli@20.3.1': - resolution: {integrity: sha512-NtInjSlyev/+SLPvx/ulz8hRE25Wf5S9dLNDcIwazq0JyB4/w1ROF/5nV0ObPTX8YpRaKYeKtXDYWqumBNHWsw==} - engines: {node: '>=v18'} - hasBin: true - - '@commitlint/config-conventional@20.3.1': - resolution: {integrity: sha512-NCzwvxepstBZbmVXsvg49s+shCxlJDJPWxXqONVcAtJH9wWrOlkMQw/zyl+dJmt8lyVopt5mwQ3mR5M2N2rUWg==} - engines: {node: '>=v18'} - - '@commitlint/config-validator@20.3.1': - resolution: {integrity: sha512-ErVLC/IsHhcvxCyh+FXo7jy12/nkQySjWXYgCoQbZLkFp4hysov8KS6CdxBB0cWjbZWjvNOKBMNoUVqkmGmahw==} - engines: {node: '>=v18'} - - '@commitlint/ensure@20.3.1': - resolution: {integrity: sha512-h664FngOEd7bHAm0j8MEKq+qm2mH+V+hwJiIE2bWcw3pzJMlO0TPKtk0ATyRAtV6jQw+xviRYiIjjSjfajiB5w==} - engines: {node: '>=v18'} - - '@commitlint/execute-rule@20.0.0': - resolution: {integrity: sha512-xyCoOShoPuPL44gVa+5EdZsBVao/pNzpQhkzq3RdtlFdKZtjWcLlUFQHSWBuhk5utKYykeJPSz2i8ABHQA+ZZw==} - engines: {node: '>=v18'} - - '@commitlint/format@20.3.1': - resolution: {integrity: sha512-jfsjGPFTd2Yti2YHwUH4SPRPbWKAJAwrfa3eNa9bXEdrXBb9mCwbIrgYX38LdEJK9zLJ3AsLBP4/FLEtxyu2AA==} - engines: {node: '>=v18'} - - '@commitlint/is-ignored@20.3.1': - resolution: {integrity: sha512-tWwAoh93QvAhxgp99CzCuHD86MgxE4NBtloKX+XxQxhfhSwHo7eloiar/yzx53YW9eqSLP95zgW2KDDk4/WX+A==} - engines: {node: '>=v18'} - - '@commitlint/lint@20.3.1': - resolution: {integrity: sha512-LaOtrQ24+6SfUaWg8A+a+Wc77bvLbO5RIr6iy9F7CI3/0iq1uPEWgGRCwqWTuLGHkZDAcwaq0gZ01zpwZ1jCGw==} - engines: {node: '>=v18'} - - '@commitlint/load@20.3.1': - resolution: {integrity: sha512-YDD9XA2XhgYgbjju8itZ/weIvOOobApDqwlPYCX5NLO/cPtw2UMO5Cmn44Ks8RQULUVI5fUT6roKvyxcoLbNmw==} - engines: {node: '>=v18'} - - '@commitlint/message@20.0.0': - resolution: {integrity: sha512-gLX4YmKnZqSwkmSB9OckQUrI5VyXEYiv3J5JKZRxIp8jOQsWjZgHSG/OgEfMQBK9ibdclEdAyIPYggwXoFGXjQ==} - engines: {node: '>=v18'} - - '@commitlint/parse@20.3.1': - resolution: {integrity: sha512-TuUTdbLpyUNLgDzLDYlI2BeTE6V/COZbf3f8WwsV0K6eq/2nSpNTMw7wHtXb+YxeY9wwxBp/Ldad4P+YIxHJoA==} - engines: {node: '>=v18'} - - '@commitlint/read@20.3.1': - resolution: {integrity: sha512-nCmJAdIg3OdNVUpQW0Idk/eF/vfOo2W2xzmvRmNeptLrzFK7qhwwl/kIwy1Q1LZrKHUFNj7PGNpIT5INbgZWzA==} - engines: {node: '>=v18'} - - '@commitlint/resolve-extends@20.3.1': - resolution: {integrity: sha512-iGTGeyaoDyHDEZNjD8rKeosjSNs8zYanmuowY4ful7kFI0dnY4b5QilVYaFQJ6IM27S57LAeH5sKSsOHy4bw5w==} - engines: {node: '>=v18'} - - '@commitlint/rules@20.3.1': - resolution: {integrity: sha512-/uic4P+4jVNpqQxz02+Y6vvIC0A2J899DBztA1j6q3f3MOKwydlNrojSh0dQmGDxxT1bXByiRtDhgFnOFnM6Pg==} - engines: {node: '>=v18'} - - '@commitlint/to-lines@20.0.0': - resolution: {integrity: sha512-2l9gmwiCRqZNWgV+pX1X7z4yP0b3ex/86UmUFgoRt672Ez6cAM2lOQeHFRUTuE6sPpi8XBCGnd8Kh3bMoyHwJw==} - engines: {node: '>=v18'} - - '@commitlint/top-level@20.0.0': - resolution: {integrity: sha512-drXaPSP2EcopukrUXvUXmsQMu3Ey/FuJDc/5oiW4heoCfoE5BdLQyuc7veGeE3aoQaTVqZnh4D5WTWe2vefYKg==} - engines: {node: '>=v18'} - - '@commitlint/types@20.3.1': - resolution: {integrity: sha512-VmIFV/JkBRhDRRv7N5B7zEUkNZIx9Mp+8Pe65erz0rKycXLsi8Epcw0XJ+btSeRXgTzE7DyOyA9bkJ9mn/yqVQ==} - engines: {node: '>=v18'} - - '@types/conventional-commits-parser@5.0.2': - resolution: {integrity: sha512-BgT2szDXnVypgpNxOK8aL5SGjUdaQbC++WZNjF1Qge3Og2+zhHj+RWhmehLhYyvQwqAmvezruVfOf8+3m74W+g==} - - '@types/node@25.0.3': - resolution: {integrity: sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==} - - JSONStream@1.3.5: - resolution: {integrity: sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==} - hasBin: true - - ajv@8.17.1: - resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} - - ansi-regex@5.0.1: - resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} - engines: {node: '>=8'} - - ansi-styles@4.3.0: - resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} - engines: {node: '>=8'} - - argparse@2.0.1: - resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - - array-ify@1.0.0: - resolution: {integrity: sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==} - - callsites@3.1.0: - resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} - engines: {node: '>=6'} - - chalk@5.6.2: - resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - - cliui@8.0.1: - resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} - engines: {node: '>=12'} - - color-convert@2.0.1: - resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} - engines: {node: '>=7.0.0'} - - color-name@1.1.4: - resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} - - compare-func@2.0.0: - resolution: {integrity: sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==} - - conventional-changelog-angular@7.0.0: - resolution: {integrity: sha512-ROjNchA9LgfNMTTFSIWPzebCwOGFdgkEq45EnvvrmSLvCtAw0HSmrCs7/ty+wAeYUZyNay0YMUNYFTRL72PkBQ==} - engines: {node: '>=16'} - - conventional-changelog-conventionalcommits@7.0.2: - resolution: {integrity: sha512-NKXYmMR/Hr1DevQegFB4MwfM5Vv0m4UIxKZTTYuD98lpTknaZlSRrDOG4X7wIXpGkfsYxZTghUN+Qq+T0YQI7w==} - engines: {node: '>=16'} - - conventional-commits-parser@5.0.0: - resolution: {integrity: sha512-ZPMl0ZJbw74iS9LuX9YIAiW8pfM5p3yh2o/NbXHbkFuZzY5jvdi5jFycEOkmBW5H5I7nA+D6f3UcsCLP2vvSEA==} - engines: {node: '>=16'} - hasBin: true - - cosmiconfig-typescript-loader@6.2.0: - resolution: {integrity: sha512-GEN39v7TgdxgIoNcdkRE3uiAzQt3UXLyHbRHD6YoL048XAeOomyxaP+Hh/+2C6C2wYjxJ2onhJcsQp+L4YEkVQ==} - engines: {node: '>=v18'} - peerDependencies: - '@types/node': '*' - cosmiconfig: '>=9' - typescript: '>=5' - - cosmiconfig@9.0.0: - resolution: {integrity: sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==} - engines: {node: '>=14'} - peerDependencies: - typescript: '>=4.9.5' - peerDependenciesMeta: - typescript: - optional: true - - dargs@8.1.0: - resolution: {integrity: sha512-wAV9QHOsNbwnWdNW2FYvE1P56wtgSbM+3SZcdGiWQILwVjACCXDCI3Ai8QlCjMDB8YK5zySiXZYBiwGmNY3lnw==} - engines: {node: '>=12'} - - dot-prop@5.3.0: - resolution: {integrity: sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==} - engines: {node: '>=8'} - - emoji-regex@8.0.0: - resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - - env-paths@2.2.1: - resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} - engines: {node: '>=6'} - - error-ex@1.3.4: - resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==} - - escalade@3.2.0: - resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} - engines: {node: '>=6'} - - fast-deep-equal@3.1.3: - resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} - - fast-uri@3.1.0: - resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} - - find-up@7.0.0: - resolution: {integrity: sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==} - engines: {node: '>=18'} - - get-caller-file@2.0.5: - resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} - engines: {node: 6.* || 8.* || >= 10.*} - - git-raw-commits@4.0.0: - resolution: {integrity: sha512-ICsMM1Wk8xSGMowkOmPrzo2Fgmfo4bMHLNX6ytHjajRJUqvHOw/TFapQ+QG75c3X/tTDDhOSRPGC52dDbNM8FQ==} - engines: {node: '>=16'} - hasBin: true - - global-directory@4.0.1: - resolution: {integrity: sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==} - engines: {node: '>=18'} - - import-fresh@3.3.1: - resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} - engines: {node: '>=6'} - - import-meta-resolve@4.2.0: - resolution: {integrity: sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==} - - ini@4.1.1: - resolution: {integrity: sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==} - engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} - - is-arrayish@0.2.1: - resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} - - is-fullwidth-code-point@3.0.0: - resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} - engines: {node: '>=8'} - - is-obj@2.0.0: - resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==} - engines: {node: '>=8'} - - is-text-path@2.0.0: - resolution: {integrity: sha512-+oDTluR6WEjdXEJMnC2z6A4FRwFoYuvShVVEGsS7ewc0UTi2QtAKMDJuL4BDEVt+5T7MjFo12RP8ghOM75oKJw==} - engines: {node: '>=8'} - - jiti@2.6.1: - resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} - hasBin: true - - js-tokens@4.0.0: - resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - - js-yaml@4.1.1: - resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} - hasBin: true - - json-parse-even-better-errors@2.3.1: - resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - - json-schema-traverse@1.0.0: - resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} - - jsonparse@1.3.1: - resolution: {integrity: sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==} - engines: {'0': node >= 0.2.0} - - lines-and-columns@1.2.4: - resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} - - locate-path@7.2.0: - resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - lodash.camelcase@4.3.0: - resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} - - lodash.isplainobject@4.0.6: - resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} - - lodash.kebabcase@4.1.1: - resolution: {integrity: sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==} - - lodash.merge@4.6.2: - resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} - - lodash.mergewith@4.6.2: - resolution: {integrity: sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==} - - lodash.snakecase@4.1.1: - resolution: {integrity: sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==} - - lodash.startcase@4.4.0: - resolution: {integrity: sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==} - - lodash.uniq@4.5.0: - resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==} - - lodash.upperfirst@4.3.1: - resolution: {integrity: sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==} - - meow@12.1.1: - resolution: {integrity: sha512-BhXM0Au22RwUneMPwSCnyhTOizdWoIEPU9sp0Aqa1PnDMR5Wv2FGXYDjuzJEIX+Eo2Rb8xuYe5jrnm5QowQFkw==} - engines: {node: '>=16.10'} - - minimist@1.2.8: - resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - - p-limit@4.0.0: - resolution: {integrity: sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - p-locate@6.0.0: - resolution: {integrity: sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - parent-module@1.0.1: - resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} - engines: {node: '>=6'} - - parse-json@5.2.0: - resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} - engines: {node: '>=8'} - - path-exists@5.0.0: - resolution: {integrity: sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - - picocolors@1.1.1: - resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} - - require-directory@2.1.1: - resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} - engines: {node: '>=0.10.0'} - - require-from-string@2.0.2: - resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} - engines: {node: '>=0.10.0'} - - resolve-from@4.0.0: - resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} - engines: {node: '>=4'} - - resolve-from@5.0.0: - resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} - engines: {node: '>=8'} - - semver@7.7.3: - resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} - engines: {node: '>=10'} - hasBin: true - - split2@4.2.0: - resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} - engines: {node: '>= 10.x'} - - string-width@4.2.3: - resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} - engines: {node: '>=8'} - - strip-ansi@6.0.1: - resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} - engines: {node: '>=8'} - - text-extensions@2.4.0: - resolution: {integrity: sha512-te/NtwBwfiNRLf9Ijqx3T0nlqZiQ2XrrtBvu+cLL8ZRrGkO0NHTug8MYFKyoSrv/sHTaSKfilUkizV6XhxMJ3g==} - engines: {node: '>=8'} - - through@2.3.8: - resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} - - tinyexec@1.0.2: - resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} - engines: {node: '>=18'} - - typescript@5.9.2: - resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} - engines: {node: '>=14.17'} - hasBin: true - - undici-types@7.16.0: - resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==} - - unicorn-magic@0.1.0: - resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} - engines: {node: '>=18'} - - wrap-ansi@7.0.0: - resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} - engines: {node: '>=10'} - - y18n@5.0.8: - resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} - engines: {node: '>=10'} - - yargs-parser@21.1.1: - resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} - engines: {node: '>=12'} - - yargs@17.7.2: - resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} - engines: {node: '>=12'} - - yocto-queue@1.2.2: - resolution: {integrity: sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==} - engines: {node: '>=12.20'} - -snapshots: - - '@babel/code-frame@7.27.1': - dependencies: - '@babel/helper-validator-identifier': 7.28.5 - js-tokens: 4.0.0 - picocolors: 1.1.1 - - '@babel/helper-validator-identifier@7.28.5': {} - - '@commitlint/cli@20.3.1(@types/node@25.0.3)(typescript@5.9.2)': - dependencies: - '@commitlint/format': 20.3.1 - '@commitlint/lint': 20.3.1 - '@commitlint/load': 20.3.1(@types/node@25.0.3)(typescript@5.9.2) - '@commitlint/read': 20.3.1 - '@commitlint/types': 20.3.1 - tinyexec: 1.0.2 - yargs: 17.7.2 - transitivePeerDependencies: - - '@types/node' - - typescript - - '@commitlint/config-conventional@20.3.1': - dependencies: - '@commitlint/types': 20.3.1 - conventional-changelog-conventionalcommits: 7.0.2 - - '@commitlint/config-validator@20.3.1': - dependencies: - '@commitlint/types': 20.3.1 - ajv: 8.17.1 - - '@commitlint/ensure@20.3.1': - dependencies: - '@commitlint/types': 20.3.1 - lodash.camelcase: 4.3.0 - lodash.kebabcase: 4.1.1 - lodash.snakecase: 4.1.1 - lodash.startcase: 4.4.0 - lodash.upperfirst: 4.3.1 - - '@commitlint/execute-rule@20.0.0': {} - - '@commitlint/format@20.3.1': - dependencies: - '@commitlint/types': 20.3.1 - chalk: 5.6.2 - - '@commitlint/is-ignored@20.3.1': - dependencies: - '@commitlint/types': 20.3.1 - semver: 7.7.3 - - '@commitlint/lint@20.3.1': - dependencies: - '@commitlint/is-ignored': 20.3.1 - '@commitlint/parse': 20.3.1 - '@commitlint/rules': 20.3.1 - '@commitlint/types': 20.3.1 - - '@commitlint/load@20.3.1(@types/node@25.0.3)(typescript@5.9.2)': - dependencies: - '@commitlint/config-validator': 20.3.1 - '@commitlint/execute-rule': 20.0.0 - '@commitlint/resolve-extends': 20.3.1 - '@commitlint/types': 20.3.1 - chalk: 5.6.2 - cosmiconfig: 9.0.0(typescript@5.9.2) - cosmiconfig-typescript-loader: 6.2.0(@types/node@25.0.3)(cosmiconfig@9.0.0(typescript@5.9.2))(typescript@5.9.2) - lodash.isplainobject: 4.0.6 - lodash.merge: 4.6.2 - lodash.uniq: 4.5.0 - transitivePeerDependencies: - - '@types/node' - - typescript - - '@commitlint/message@20.0.0': {} - - '@commitlint/parse@20.3.1': - dependencies: - '@commitlint/types': 20.3.1 - conventional-changelog-angular: 7.0.0 - conventional-commits-parser: 5.0.0 - - '@commitlint/read@20.3.1': - dependencies: - '@commitlint/top-level': 20.0.0 - '@commitlint/types': 20.3.1 - git-raw-commits: 4.0.0 - minimist: 1.2.8 - tinyexec: 1.0.2 - - '@commitlint/resolve-extends@20.3.1': - dependencies: - '@commitlint/config-validator': 20.3.1 - '@commitlint/types': 20.3.1 - global-directory: 4.0.1 - import-meta-resolve: 4.2.0 - lodash.mergewith: 4.6.2 - resolve-from: 5.0.0 - - '@commitlint/rules@20.3.1': - dependencies: - '@commitlint/ensure': 20.3.1 - '@commitlint/message': 20.0.0 - '@commitlint/to-lines': 20.0.0 - '@commitlint/types': 20.3.1 - - '@commitlint/to-lines@20.0.0': {} - - '@commitlint/top-level@20.0.0': - dependencies: - find-up: 7.0.0 - - '@commitlint/types@20.3.1': - dependencies: - '@types/conventional-commits-parser': 5.0.2 - chalk: 5.6.2 - - '@types/conventional-commits-parser@5.0.2': - dependencies: - '@types/node': 25.0.3 - - '@types/node@25.0.3': - dependencies: - undici-types: 7.16.0 - - JSONStream@1.3.5: - dependencies: - jsonparse: 1.3.1 - through: 2.3.8 - - ajv@8.17.1: - dependencies: - fast-deep-equal: 3.1.3 - fast-uri: 3.1.0 - json-schema-traverse: 1.0.0 - require-from-string: 2.0.2 - - ansi-regex@5.0.1: {} - - ansi-styles@4.3.0: - dependencies: - color-convert: 2.0.1 - - argparse@2.0.1: {} - - array-ify@1.0.0: {} - - callsites@3.1.0: {} - - chalk@5.6.2: {} - - cliui@8.0.1: - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 7.0.0 - - color-convert@2.0.1: - dependencies: - color-name: 1.1.4 - - color-name@1.1.4: {} - - compare-func@2.0.0: - dependencies: - array-ify: 1.0.0 - dot-prop: 5.3.0 - - conventional-changelog-angular@7.0.0: - dependencies: - compare-func: 2.0.0 - - conventional-changelog-conventionalcommits@7.0.2: - dependencies: - compare-func: 2.0.0 - - conventional-commits-parser@5.0.0: - dependencies: - JSONStream: 1.3.5 - is-text-path: 2.0.0 - meow: 12.1.1 - split2: 4.2.0 - - cosmiconfig-typescript-loader@6.2.0(@types/node@25.0.3)(cosmiconfig@9.0.0(typescript@5.9.2))(typescript@5.9.2): - dependencies: - '@types/node': 25.0.3 - cosmiconfig: 9.0.0(typescript@5.9.2) - jiti: 2.6.1 - typescript: 5.9.2 - - cosmiconfig@9.0.0(typescript@5.9.2): - dependencies: - env-paths: 2.2.1 - import-fresh: 3.3.1 - js-yaml: 4.1.1 - parse-json: 5.2.0 - optionalDependencies: - typescript: 5.9.2 - - dargs@8.1.0: {} - - dot-prop@5.3.0: - dependencies: - is-obj: 2.0.0 - - emoji-regex@8.0.0: {} - - env-paths@2.2.1: {} - - error-ex@1.3.4: - dependencies: - is-arrayish: 0.2.1 - - escalade@3.2.0: {} - - fast-deep-equal@3.1.3: {} - - fast-uri@3.1.0: {} - - find-up@7.0.0: - dependencies: - locate-path: 7.2.0 - path-exists: 5.0.0 - unicorn-magic: 0.1.0 - - get-caller-file@2.0.5: {} - - git-raw-commits@4.0.0: - dependencies: - dargs: 8.1.0 - meow: 12.1.1 - split2: 4.2.0 - - global-directory@4.0.1: - dependencies: - ini: 4.1.1 - - import-fresh@3.3.1: - dependencies: - parent-module: 1.0.1 - resolve-from: 4.0.0 - - import-meta-resolve@4.2.0: {} - - ini@4.1.1: {} - - is-arrayish@0.2.1: {} - - is-fullwidth-code-point@3.0.0: {} - - is-obj@2.0.0: {} - - is-text-path@2.0.0: - dependencies: - text-extensions: 2.4.0 - - jiti@2.6.1: {} - - js-tokens@4.0.0: {} - - js-yaml@4.1.1: - dependencies: - argparse: 2.0.1 - - json-parse-even-better-errors@2.3.1: {} - - json-schema-traverse@1.0.0: {} - - jsonparse@1.3.1: {} - - lines-and-columns@1.2.4: {} - - locate-path@7.2.0: - dependencies: - p-locate: 6.0.0 - - lodash.camelcase@4.3.0: {} - - lodash.isplainobject@4.0.6: {} - - lodash.kebabcase@4.1.1: {} - - lodash.merge@4.6.2: {} - - lodash.mergewith@4.6.2: {} - - lodash.snakecase@4.1.1: {} - - lodash.startcase@4.4.0: {} - - lodash.uniq@4.5.0: {} - - lodash.upperfirst@4.3.1: {} - - meow@12.1.1: {} - - minimist@1.2.8: {} - - p-limit@4.0.0: - dependencies: - yocto-queue: 1.2.2 - - p-locate@6.0.0: - dependencies: - p-limit: 4.0.0 - - parent-module@1.0.1: - dependencies: - callsites: 3.1.0 - - parse-json@5.2.0: - dependencies: - '@babel/code-frame': 7.27.1 - error-ex: 1.3.4 - json-parse-even-better-errors: 2.3.1 - lines-and-columns: 1.2.4 - - path-exists@5.0.0: {} - - picocolors@1.1.1: {} - - require-directory@2.1.1: {} - - require-from-string@2.0.2: {} - - resolve-from@4.0.0: {} - - resolve-from@5.0.0: {} - - semver@7.7.3: {} - - split2@4.2.0: {} - - string-width@4.2.3: - dependencies: - emoji-regex: 8.0.0 - is-fullwidth-code-point: 3.0.0 - strip-ansi: 6.0.1 - - strip-ansi@6.0.1: - dependencies: - ansi-regex: 5.0.1 - - text-extensions@2.4.0: {} - - through@2.3.8: {} - - tinyexec@1.0.2: {} - - typescript@5.9.2: {} - - undici-types@7.16.0: {} - - unicorn-magic@0.1.0: {} - - wrap-ansi@7.0.0: - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - - y18n@5.0.8: {} - - yargs-parser@21.1.1: {} - - yargs@17.7.2: - dependencies: - cliui: 8.0.1 - escalade: 3.2.0 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - string-width: 4.2.3 - y18n: 5.0.8 - yargs-parser: 21.1.1 - - yocto-queue@1.2.2: {} diff --git a/scripts/run_nvim_tests.sh b/scripts/run_nvim_tests.sh index c38e7376..0ce4ad0f 100755 --- a/scripts/run_nvim_tests.sh +++ b/scripts/run_nvim_tests.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/usr/bin/env bash printf "\033[1;36m\n================= Rustowl Test Suite =================\n\033[0m\n\n" @@ -23,3 +23,28 @@ else printf "\n\033[1;31m❌ SOME TESTS FAILED\033[0m\n\n" exit 1 fi +#!/bin/sh + +printf "\033[1;36m\n================= Rustowl Test Suite =================\n\033[0m\n\n" + +# Capture the output of the test run +output=$(nvim --headless --noplugin -u ./nvim-tests/minimal_init.lua \ + -c "lua MiniTest.run()" \ + -c "qa" 2>&1) + +nvim_exit_code=$? + +# Print the output +echo "$output" + +echo "" +printf "\033[1;36m\n================= Rustowl Test Summary =================\n\033[0m\n" + +# Check for failures in the output +if echo "$output" | grep -q "Fails (0) and Notes (0)" && [ "$nvim_exit_code" -eq 0 ]; then + printf "\n\033[1;32m✅ ALL TESTS PASSED\033[0m\n\n" + exit 0 +else + printf "\n\033[1;31m❌ SOME TESTS FAILED\033[0m\n\n" + exit 1 +fi diff --git a/scripts/security.sh b/scripts/security.sh index 6f8c340c..604ac3c2 100755 --- a/scripts/security.sh +++ b/scripts/security.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # RustOwl Security & Memory Safety Testing Script # Tests for undefined behavior, memory leaks, and security vulnerabilities # Automatically detects platform capabilities and runs appropriate tests @@ -16,7 +16,7 @@ BOLD='\033[1m' NC='\033[0m' # No Color # Configuration -MIN_RUST_VERSION="1.89.0" +MIN_RUST_VERSION=$(cat "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/build/channel") TEST_TARGET_PATH="./perf-tests/dummy-package" # Output logging configuration @@ -41,799 +41,820 @@ HAS_VALGRIND=0 HAS_CARGO_AUDIT=0 HAS_INSTRUMENTS=0 HAS_CARGO_MACHETE=0 +HAS_NEXTEST=0 # OS detection with more robust platform detection detect_platform() { - if [[ "$OSTYPE" == "linux-gnu"* ]]; then - OS_TYPE="Linux" - elif [[ "$OSTYPE" == "darwin"* ]]; then - OS_TYPE="macOS" - else - # Fallback to uname - local uname_result=$(uname 2>/dev/null || echo "unknown") - case "$uname_result" in - Linux*) OS_TYPE="Linux" ;; - Darwin*) OS_TYPE="macOS" ;; - *) OS_TYPE="Unknown" ;; - esac - fi - - echo -e "${BLUE}Detected platform: $OS_TYPE${NC}" + if [[ "$OSTYPE" == "linux-gnu"* ]]; then + OS_TYPE="Linux" + elif [[ "$OSTYPE" == "darwin"* ]]; then + OS_TYPE="macOS" + else + # Fallback to uname + local uname_result=$(uname 2>/dev/null || echo "unknown") + case "$uname_result" in + Linux*) OS_TYPE="Linux" ;; + Darwin*) OS_TYPE="macOS" ;; + *) OS_TYPE="Unknown" ;; + esac + fi + + echo -e "${BLUE}Detected platform: $OS_TYPE${NC}" } # Detect CI environment and configure accordingly detect_ci_environment() { - # Check for common CI environment variables - if [[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]]; then - IS_CI=1 - CI_AUTO_INSTALL=1 - VERBOSE_OUTPUT=1 # Enable verbose output in CI - echo -e "${BLUE}CI environment detected${NC}" - - # Show which CI system we detected - if [[ -n "${GITHUB_ACTIONS:-}" ]]; then - echo -e "${BLUE} Running on GitHub Actions${NC}" - else - echo -e "${BLUE} Running on unknown CI system${NC}" - fi - - echo -e "${BLUE} Auto-installation enabled for missing tools${NC}" - echo -e "${BLUE} Verbose output enabled for detailed logging${NC}" - else - echo -e "${BLUE}Interactive environment detected${NC}" - fi + # Check for common CI environment variables + if [[ -n "${CI:-}" ]] || [[ -n "${GITHUB_ACTIONS:-}" ]]; then + IS_CI=1 + CI_AUTO_INSTALL=1 + VERBOSE_OUTPUT=1 # Enable verbose output in CI + echo -e "${BLUE}CI environment detected${NC}" + + # Show which CI system we detected + if [[ -n "${GITHUB_ACTIONS:-}" ]]; then + echo -e "${BLUE} Running on GitHub Actions${NC}" + else + echo -e "${BLUE} Running on unknown CI system${NC}" + fi + + echo -e "${BLUE} Auto-installation enabled for missing tools${NC}" + echo -e "${BLUE} Verbose output enabled for detailed logging${NC}" + else + echo -e "${BLUE}Interactive environment detected${NC}" + fi } # Install missing tools automatically in CI install_required_tools() { - echo -e "${BLUE}Installing missing security tools...${NC}" - - # Install cargo-audit - if [[ $HAS_CARGO_AUDIT -eq 0 ]] && [[ $RUN_AUDIT -eq 1 ]]; then - echo "Installing cargo-audit..." - if ! cargo install cargo-audit; then - echo -e "${RED}Failed to install cargo-audit${NC}" - fi - fi - - # Install cargo-machete - if [[ $HAS_CARGO_MACHETE -eq 0 ]] && [[ $RUN_CARGO_MACHETE -eq 1 ]]; then - echo "Installing cargo-machete..." - if ! cargo install cargo-machete; then - echo -e "${RED}Failed to install cargo-machete${NC}" - fi - fi - - # Install Miri component if missing and needed - if [[ $HAS_MIRI -eq 0 ]] && [[ $RUN_MIRI -eq 1 ]]; then - echo "Installing Miri component..." - if rustup component add miri --toolchain nightly; then - echo -e "${GREEN}Miri component installed successfully${NC}" - HAS_MIRI=1 - else - echo -e "${RED}Failed to install Miri component${NC}" - fi - fi - - # Install Valgrind on Linux (if package manager available) - if [[ "$OS_TYPE" == "Linux" ]] && [[ $HAS_VALGRIND -eq 0 ]] && [[ $RUN_VALGRIND -eq 1 ]]; then - echo "Attempting to install Valgrind..." - if command -v apt-get >/dev/null 2>&1; then - if sudo apt-get update && sudo apt-get install -y valgrind; then - echo -e "${GREEN}Valgrind installed successfully${NC}" - HAS_VALGRIND=1 - else - echo -e "${RED}Failed to install Valgrind via apt-get${NC}" - fi - elif command -v yum >/dev/null 2>&1; then - if sudo yum install -y valgrind; then - echo -e "${GREEN}Valgrind installed successfully${NC}" - HAS_VALGRIND=1 - else - echo -e "${RED}Failed to install Valgrind via yum${NC}" - fi - elif command -v pacman >/dev/null 2>&1; then - if sudo pacman -S --noconfirm valgrind; then - echo -e "${GREEN}Valgrind installed successfully${NC}" - HAS_VALGRIND=1 - else - echo -e "${RED}Failed to install Valgrind via pacman${NC}" - fi - else - echo -e "${YELLOW}No supported package manager found for Valgrind installation${NC}" - fi - fi - - # Install/setup Xcode on macOS (CI environments) - if [[ "$OS_TYPE" == "macOS" ]] && [[ $IS_CI -eq 1 ]] && [[ $HAS_INSTRUMENTS -eq 0 ]] && [[ $RUN_INSTRUMENTS -eq 1 ]]; then - echo "Setting up Xcode for CI environment..." - - # First, try to install/setup command line tools - if sudo xcode-select --install 2>/dev/null || true; then - echo "Xcode command line tools installation initiated..." - fi - - # Set the developer directory - if [[ -d "/Applications/Xcode.app" ]]; then - echo "Found Xcode.app, setting developer directory..." - sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer - elif [[ -d "/Library/Developer/CommandLineTools" ]]; then - echo "Using Command Line Tools..." - sudo xcode-select --switch /Library/Developer/CommandLineTools - fi - - # Accept license if needed - if sudo xcodebuild -license accept 2>/dev/null; then - echo "Xcode license accepted" - fi - - # Verify setup - if xcode-select -p >/dev/null 2>&1; then - echo "Xcode developer directory: $(xcode-select -p)" - - # Check if instruments is now available - if command -v instruments >/dev/null 2>&1; then - if timeout 10s instruments -help >/dev/null 2>&1; then - HAS_INSTRUMENTS=1 - echo -e "${GREEN}Instruments is now available${NC}" - else - echo -e "${YELLOW}Instruments found but may not be fully functional${NC}" - fi - else - echo -e "${YELLOW}Instruments still not available after Xcode setup${NC}" - fi - else - echo -e "${RED}Failed to set up Xcode properly${NC}" - fi - fi - - echo "" + echo -e "${BLUE}Installing missing security tools...${NC}" + + # Install cargo-audit + if [[ $HAS_CARGO_AUDIT -eq 0 ]] && [[ $RUN_AUDIT -eq 1 ]]; then + echo "Installing cargo-audit..." + if ! cargo install cargo-audit; then + echo -e "${RED}Failed to install cargo-audit${NC}" + fi + fi + + # Install cargo-machete + if [[ $HAS_CARGO_MACHETE -eq 0 ]] && [[ $RUN_CARGO_MACHETE -eq 1 ]]; then + echo "Installing cargo-machete..." + if ! cargo install cargo-machete; then + echo -e "${RED}Failed to install cargo-machete${NC}" + fi + fi + + # Install Miri component if missing and needed + if [[ $HAS_MIRI -eq 0 ]] && [[ $RUN_MIRI -eq 1 ]]; then + echo "Installing Miri component..." + if rustup component add miri --toolchain nightly; then + echo -e "${GREEN}Miri component installed successfully${NC}" + HAS_MIRI=1 + else + echo -e "${RED}Failed to install Miri component${NC}" + fi + fi + + # Install Valgrind on Linux (if package manager available) + if [[ "$OS_TYPE" == "Linux" ]] && [[ $HAS_VALGRIND -eq 0 ]] && [[ $RUN_VALGRIND -eq 1 ]]; then + echo "Attempting to install Valgrind..." + if command -v apt-get >/dev/null 2>&1; then + if sudo apt-get update && sudo apt-get install -y valgrind; then + echo -e "${GREEN}Valgrind installed successfully${NC}" + HAS_VALGRIND=1 + else + echo -e "${RED}Failed to install Valgrind via apt-get${NC}" + fi + elif command -v yum >/dev/null 2>&1; then + if sudo yum install -y valgrind; then + echo -e "${GREEN}Valgrind installed successfully${NC}" + HAS_VALGRIND=1 + else + echo -e "${RED}Failed to install Valgrind via yum${NC}" + fi + elif command -v pacman >/dev/null 2>&1; then + if sudo pacman -S --noconfirm valgrind; then + echo -e "${GREEN}Valgrind installed successfully${NC}" + HAS_VALGRIND=1 + else + echo -e "${RED}Failed to install Valgrind via pacman${NC}" + fi + else + echo -e "${YELLOW}No supported package manager found for Valgrind installation${NC}" + fi + fi + + # Install/setup Xcode on macOS (CI environments) + if [[ "$OS_TYPE" == "macOS" ]] && [[ $IS_CI -eq 1 ]] && [[ $HAS_INSTRUMENTS -eq 0 ]] && [[ $RUN_INSTRUMENTS -eq 1 ]]; then + echo "Setting up Xcode for CI environment..." + + # First, try to install/setup command line tools + if sudo xcode-select --install 2>/dev/null || true; then + echo "Xcode command line tools installation initiated..." + fi + + # Set the developer directory + if [[ -d "/Applications/Xcode.app" ]]; then + echo "Found Xcode.app, setting developer directory..." + sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer + elif [[ -d "/Library/Developer/CommandLineTools" ]]; then + echo "Using Command Line Tools..." + sudo xcode-select --switch /Library/Developer/CommandLineTools + fi + + # Accept license if needed + if sudo xcodebuild -license accept 2>/dev/null; then + echo "Xcode license accepted" + fi + + # Verify setup + if xcode-select -p >/dev/null 2>&1; then + echo "Xcode developer directory: $(xcode-select -p)" + + # Check if instruments is now available + if command -v instruments >/dev/null 2>&1; then + if timeout 10s instruments -help >/dev/null 2>&1; then + HAS_INSTRUMENTS=1 + echo -e "${GREEN}Instruments is now available${NC}" + else + echo -e "${YELLOW}Instruments found but may not be fully functional${NC}" + fi + else + echo -e "${YELLOW}Instruments still not available after Xcode setup${NC}" + fi + else + echo -e "${RED}Failed to set up Xcode properly${NC}" + fi + fi + + echo "" } # Install Xcode for macOS CI environments install_xcode_ci() { - if [[ "$OS_TYPE" != "macOS" ]] || [[ $IS_CI -ne 1 ]]; then - return 0 - fi - - echo "Setting up Xcode for CI environment..." - - # First, try to install/setup command line tools - if sudo xcode-select --install 2>/dev/null || true; then - echo "Xcode command line tools installation initiated..." - fi - - # Set the developer directory - if [[ -d "/Applications/Xcode.app" ]]; then - echo "Found Xcode.app, setting developer directory..." - sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer - elif [[ -d "/Library/Developer/CommandLineTools" ]]; then - echo "Using Command Line Tools..." - sudo xcode-select --switch /Library/Developer/CommandLineTools - fi - - # Accept license if needed - if sudo xcodebuild -license accept 2>/dev/null; then - echo "Xcode license accepted" - fi - - # Verify setup - if xcode-select -p >/dev/null 2>&1; then - echo "Xcode developer directory: $(xcode-select -p)" - - # Check if instruments is now available - if command -v instruments >/dev/null 2>&1; then - if timeout 10s instruments -help >/dev/null 2>&1; then - HAS_INSTRUMENTS=1 - echo -e "${GREEN}Instruments is now available${NC}" - else - echo -e "${YELLOW}Instruments found but may not be fully functional${NC}" - fi - else - echo -e "${YELLOW}Instruments still not available after Xcode setup${NC}" - fi - else - echo -e "${RED}Failed to set up Xcode properly${NC}" - fi - - echo "" + if [[ "$OS_TYPE" != "macOS" ]] || [[ $IS_CI -ne 1 ]]; then + return 0 + fi + + echo "Setting up Xcode for CI environment..." + + # First, try to install/setup command line tools + if sudo xcode-select --install 2>/dev/null || true; then + echo "Xcode command line tools installation initiated..." + fi + + # Set the developer directory + if [[ -d "/Applications/Xcode.app" ]]; then + echo "Found Xcode.app, setting developer directory..." + sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer + elif [[ -d "/Library/Developer/CommandLineTools" ]]; then + echo "Using Command Line Tools..." + sudo xcode-select --switch /Library/Developer/CommandLineTools + fi + + # Accept license if needed + if sudo xcodebuild -license accept 2>/dev/null; then + echo "Xcode license accepted" + fi + + # Verify setup + if xcode-select -p >/dev/null 2>&1; then + echo "Xcode developer directory: $(xcode-select -p)" + + # Check if instruments is now available + if command -v instruments >/dev/null 2>&1; then + if timeout 10s instruments -help >/dev/null 2>&1; then + HAS_INSTRUMENTS=1 + echo -e "${GREEN}Instruments is now available${NC}" + else + echo -e "${YELLOW}Instruments found but may not be fully functional${NC}" + fi + else + echo -e "${YELLOW}Instruments still not available after Xcode setup${NC}" + fi + else + echo -e "${RED}Failed to set up Xcode properly${NC}" + fi + + echo "" } # Auto-configure tests based on platform capabilities and toolchain compatibility auto_configure_tests() { - echo -e "${YELLOW}Auto-configuring tests for $OS_TYPE...${NC}" - - case "$OS_TYPE" in - "Linux") - # Linux: Full test suite available - echo " Linux detected: Enabling Miri, Valgrind, and Audit" - ;; - "macOS") - # macOS: Focus on Rust-native tools and macOS-compatible alternatives - echo " macOS detected: Enabling Miri, Audit, and macOS-compatible tools" - echo " Disabling Valgrind (unreliable on macOS)" - echo " Enabling cargo-machete for unused dependency detection" - echo " Disabling Instruments (complex Xcode setup required)" - RUN_VALGRIND=0 - RUN_THREAD_SANITIZER=0 - RUN_CARGO_MACHETE=1 # Detect unused dependencies - RUN_INSTRUMENTS=0 # Disable by default (complex setup required) - ;; - *) - echo " Unknown platform: Enabling basic tests only" - RUN_VALGRIND=0 - RUN_INSTRUMENTS=0 - # Also disable nightly-dependent features on unknown platforms - RUN_MIRI=0 - ;; - esac - - echo "" + echo -e "${YELLOW}Auto-configuring tests for $OS_TYPE...${NC}" + + case "$OS_TYPE" in + "Linux") + # Linux: Full test suite available + echo " Linux detected: Enabling Miri, Valgrind, and Audit" + ;; + "macOS") + # macOS: Focus on Rust-native tools and macOS-compatible alternatives + echo " macOS detected: Enabling Miri, Audit, and macOS-compatible tools" + echo " Disabling Valgrind (unreliable on macOS)" + echo " Enabling cargo-machete for unused dependency detection" + echo " Disabling Instruments (complex Xcode setup required)" + RUN_VALGRIND=0 + RUN_THREAD_SANITIZER=0 + RUN_CARGO_MACHETE=1 # Detect unused dependencies + RUN_INSTRUMENTS=0 # Disable by default (complex setup required) + ;; + *) + echo " Unknown platform: Enabling basic tests only" + RUN_VALGRIND=0 + RUN_INSTRUMENTS=0 + # Also disable nightly-dependent features on unknown platforms + RUN_MIRI=0 + ;; + esac + + echo "" } usage() { - echo "Usage: $0 [OPTIONS]" - echo "" - echo "Security and Memory Safety Testing Script" - echo "Automatically detects platform and runs appropriate security tests" - echo "" - echo "Options:" - echo " -h, --help Show this help message" - echo " --check Check tool availability and system readiness" - echo " --install Install missing security tools automatically" - echo " --ci Force CI mode (auto-install tools)" - echo " --no-auto-install Disable automatic installation in CI" - echo " --no-miri Skip Miri tests" - echo " --no-valgrind Skip Valgrind tests" - echo " --no-audit Skip cargo audit security check" - echo " --no-instruments Skip Instruments tests" - echo "" - echo "Platform Support:" - echo " Linux: Miri, Valgrind, cargo-audit" - echo " macOS: Miri, cargo-audit, cargo-machete" - echo "" - echo "CI Environment:" - echo " The script automatically detects CI environments and installs missing tools." - echo " Supported: GitHub Actions, GitLab CI, Travis CI, CircleCI, Jenkins," - echo " Buildkite, Azure DevOps, and others with CI environment variables." - echo "" - echo "Tests performed:" - echo " - Miri: Detects undefined behavior in Rust code" - echo " - Valgrind: Memory error detection (Linux)" - echo " - cargo-audit: Security vulnerability scanning" - echo "" - echo "Examples:" - echo " $0 # Auto-detect platform and run appropriate tests" - echo " $0 --check # Check which tools are available" - echo " $0 --install # Install missing tools automatically" - echo " $0 --ci # Force CI mode with auto-installation" - echo " $0 --no-miri # Run tests but skip Miri" - echo "" + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Security and Memory Safety Testing Script" + echo "Automatically detects platform and runs appropriate security tests" + echo "" + echo "Options:" + echo " -h, --help Show this help message" + echo " --check Check tool availability and system readiness" + echo " --install Install missing security tools automatically" + echo " --ci Force CI mode (auto-install tools)" + echo " --no-auto-install Disable automatic installation in CI" + echo " --no-miri Skip Miri tests" + echo " --no-valgrind Skip Valgrind tests" + echo " --no-audit Skip cargo audit security check" + echo " --no-instruments Skip Instruments tests" + echo "" + echo "Platform Support:" + echo " Linux: Miri, Valgrind, cargo-audit" + echo " macOS: Miri, cargo-audit, cargo-machete" + echo "" + echo "CI Environment:" + echo " The script automatically detects CI environments and installs missing tools." + echo " Supported: GitHub Actions, GitLab CI, Travis CI, CircleCI, Jenkins," + echo " Buildkite, Azure DevOps, and others with CI environment variables." + echo "" + echo "Tests performed:" + echo " - Miri: Detects undefined behavior in Rust code" + echo " - Valgrind: Memory error detection (Linux)" + echo " - cargo-audit: Security vulnerability scanning" + echo "" + echo "Examples:" + echo " $0 # Auto-detect platform and run appropriate tests" + echo " $0 --check # Check which tools are available" + echo " $0 --install # Install missing tools automatically" + echo " $0 --ci # Force CI mode with auto-installation" + echo " $0 --no-miri # Run tests but skip Miri" + echo "" } # Parse command line arguments while [[ $# -gt 0 ]]; do - case $1 in - -h|--help) - usage - exit 0 - ;; - --check) - MODE="check" - shift - ;; - --install) - MODE="install" - shift - ;; - --ci) - IS_CI=1 - CI_AUTO_INSTALL=1 - shift - ;; - --no-auto-install) - CI_AUTO_INSTALL=0 - shift - ;; - --no-miri) - RUN_MIRI=0 - shift - ;; - --no-valgrind) - RUN_VALGRIND=0 - shift - ;; - --no-audit) - RUN_AUDIT=0 - shift - ;; - --no-instruments) - RUN_INSTRUMENTS=0 - shift - ;; - *) - echo -e "${RED}Unknown option: $1${NC}" - usage - exit 1 - ;; - esac + case $1 in + -h | --help) + usage + exit 0 + ;; + --check) + MODE="check" + shift + ;; + --install) + MODE="install" + shift + ;; + --ci) + IS_CI=1 + CI_AUTO_INSTALL=1 + shift + ;; + --no-auto-install) + CI_AUTO_INSTALL=0 + shift + ;; + --no-miri) + RUN_MIRI=0 + shift + ;; + --no-valgrind) + RUN_VALGRIND=0 + shift + ;; + --no-audit) + RUN_AUDIT=0 + shift + ;; + --no-instruments) + RUN_INSTRUMENTS=0 + shift + ;; + *) + echo -e "${RED}Unknown option: $1${NC}" + usage + exit 1 + ;; + esac done # Helper function to print section headers print_section_header() { - local title="$1" - local description="$2" - echo -e "${BLUE}${BOLD}$title${NC}" - echo -e "${BLUE}================================${NC}" - echo "$description" - echo "" + local title="$1" + local description="$2" + echo -e "${BLUE}${BOLD}$title${NC}" + echo -e "${BLUE}================================${NC}" + echo "$description" + echo "" } # Check Rust version compatibility check_rust_version() { - if ! command -v rustc >/dev/null 2>&1; then - echo -e "${RED}[ERROR] Rust compiler not found. Please install Rust: https://rustup.rs/${NC}" - exit 1 - fi - - local current_version=$(rustc --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1) - local min_version="$MIN_RUST_VERSION" - - if [ -z "$current_version" ]; then - echo -e "${YELLOW}[WARN] Could not determine Rust version, proceeding anyway...${NC}" - return 0 - fi - - # Simple version comparison (assumes semantic versioning) - if printf '%s\n%s\n' "$min_version" "$current_version" | sort -V -C; then - echo -e "${GREEN}[OK] Rust $current_version >= $min_version (minimum required)${NC}" - return 0 - else - echo -e "${RED}[ERROR] Rust $current_version < $min_version (minimum required)${NC}" - echo -e "${YELLOW}Please update Rust: rustup update${NC}" - exit 1 - fi + if ! command -v rustc >/dev/null 2>&1; then + echo -e "${RED}[ERROR] Rust compiler not found. Please install Rust: https://rustup.rs/${NC}" + exit 1 + fi + + local current_version=$(rustc --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1) + local min_version="$MIN_RUST_VERSION" + + if [ -z "$current_version" ]; then + echo -e "${YELLOW}[WARN] Could not determine Rust version, proceeding anyway...${NC}" + return 0 + fi + + # Simple version comparison (assumes semantic versioning) + if printf '%s\n%s\n' "$min_version" "$current_version" | sort -V -C; then + echo -e "${GREEN}[OK] Rust $current_version >= $min_version (minimum required)${NC}" + return 0 + else + echo -e "${RED}[ERROR] Rust $current_version < $min_version (minimum required)${NC}" + echo -e "${YELLOW}Please update Rust: rustup update${NC}" + exit 1 + fi } # Detect available tools based on platform detect_tools() { - echo -e "${BLUE}Detecting available security tools...${NC}" - - # Check for cargo-audit - if command -v cargo-audit >/dev/null 2>&1; then - HAS_CARGO_AUDIT=1 - echo -e "${GREEN}[OK] cargo-audit available${NC}" - else - echo -e "${YELLOW}! cargo-audit not found${NC}" - HAS_CARGO_AUDIT=0 - fi - - # Check for cargo-machete - if command -v cargo-machete >/dev/null 2>&1; then - HAS_CARGO_MACHETE=1 - echo -e "${GREEN}[OK] cargo-machete available${NC}" - else - echo -e "${YELLOW}! cargo-machete not found${NC}" - HAS_CARGO_MACHETE=0 - fi - - # Platform-specific tool detection - case "$OS_TYPE" in - "macOS") - # Check for Instruments (part of Xcode) - # In CI environments, we'll try to install Xcode, so check normally - if command -v instruments >/dev/null 2>&1; then - # Additional check: try to run instruments to see if it actually works - if timeout 10s instruments -help >/dev/null 2>&1; then - HAS_INSTRUMENTS=1 - echo -e "${GREEN}[OK] Instruments available${NC}" - else - HAS_INSTRUMENTS=0 - echo -e "${YELLOW}! Instruments found but not working (needs Xcode setup)${NC}" - fi - else - HAS_INSTRUMENTS=0 - echo -e "${YELLOW}! Instruments not found (will try to install Xcode in CI)${NC}" - fi - ;; - "Linux") - # Check for Valgrind - if command -v valgrind >/dev/null 2>&1; then - HAS_VALGRIND=1 - echo -e "${GREEN}[OK] Valgrind available${NC}" - else - echo -e "${YELLOW}! Valgrind not found${NC}" - HAS_VALGRIND=0 - fi - ;; - esac - - # Check nightly toolchain availability for advanced features - local current_toolchain=$(rustup show active-toolchain | cut -d' ' -f1) - echo -e "${BLUE}Active toolchain: $current_toolchain${NC}" - - if [[ "$current_toolchain" == *"nightly"* ]]; then - echo -e "${GREEN}[OK] Nightly toolchain is active (from rust-toolchain.toml)${NC}" - else - echo -e "${YELLOW}! Stable toolchain detected${NC}" - echo -e "${YELLOW}Some advanced features require nightly (check rust-toolchain.toml)${NC}" - fi - - # Check if Miri component is available on current toolchain - if rustup component list --installed | grep -q miri 2>/dev/null; then - HAS_MIRI=1 - echo -e "${GREEN}[OK] Miri is available${NC}" - else - echo -e "${YELLOW}! Miri component not installed${NC}" - echo -e "${YELLOW}Install with: rustup component add miri${NC}" - HAS_MIRI=0 - fi - - echo "" + echo -e "${BLUE}Detecting available security tools...${NC}" + + # Check for cargo-audit + if command -v cargo-audit >/dev/null 2>&1; then + HAS_CARGO_AUDIT=1 + echo -e "${GREEN}[OK] cargo-audit available${NC}" + else + echo -e "${YELLOW}! cargo-audit not found${NC}" + HAS_CARGO_AUDIT=0 + fi + + # Check for cargo-machete + if command -v cargo-machete >/dev/null 2>&1; then + HAS_CARGO_MACHETE=1 + echo -e "${GREEN}[OK] cargo-machete available${NC}" + else + echo -e "${YELLOW}! cargo-machete not found${NC}" + HAS_CARGO_MACHETE=0 + fi + + # Check for cargo-nextest + if cargo nextest --version >/dev/null 2>&1; then + HAS_NEXTEST=1 + echo -e "${GREEN}[OK] cargo-nextest available${NC}" + else + echo -e "${YELLOW}! cargo-nextest not found${NC}" + HAS_NEXTEST=0 + fi + + # Platform-specific tool detection + case "$OS_TYPE" in + "macOS") + # Check for Instruments (part of Xcode) + # In CI environments, we'll try to install Xcode, so check normally + if command -v instruments >/dev/null 2>&1; then + # Additional check: try to run instruments to see if it actually works + if timeout 10s instruments -help >/dev/null 2>&1; then + HAS_INSTRUMENTS=1 + echo -e "${GREEN}[OK] Instruments available${NC}" + else + HAS_INSTRUMENTS=0 + echo -e "${YELLOW}! Instruments found but not working (needs Xcode setup)${NC}" + fi + else + HAS_INSTRUMENTS=0 + echo -e "${YELLOW}! Instruments not found (will try to install Xcode in CI)${NC}" + fi + ;; + "Linux") + # Check for Valgrind + if command -v valgrind >/dev/null 2>&1; then + HAS_VALGRIND=1 + echo -e "${GREEN}[OK] Valgrind available${NC}" + else + echo -e "${YELLOW}! Valgrind not found${NC}" + HAS_VALGRIND=0 + fi + ;; + esac + + # Check nightly toolchain availability for advanced features + local current_toolchain=$(rustup show active-toolchain | cut -d' ' -f1) + echo -e "${BLUE}Active toolchain: $current_toolchain${NC}" + + if [[ "$current_toolchain" == *"nightly"* ]]; then + echo -e "${GREEN}[OK] Nightly toolchain is active (from rust-toolchain.toml)${NC}" + else + echo -e "${YELLOW}! Stable toolchain detected${NC}" + echo -e "${YELLOW}Some advanced features require nightly (check rust-toolchain.toml)${NC}" + fi + + # Check if Miri component is available on current toolchain + if rustup component list --installed | grep -q miri 2>/dev/null; then + HAS_MIRI=1 + echo -e "${GREEN}[OK] Miri is available${NC}" + else + echo -e "${YELLOW}! Miri component not installed${NC}" + echo -e "${YELLOW}Install with: rustup component add miri${NC}" + HAS_MIRI=0 + fi + + echo "" } # Build the project with the toolchain specified in rust-toolchain.toml build_project() { - echo -e "${YELLOW}Building RustOwl in security mode...${NC}" - echo -e "${BLUE}Using toolchain from rust-toolchain.toml${NC}" - - # Build with the current toolchain (specified by rust-toolchain.toml) - RUSTC_BOOTSTRAP=1 cargo build --profile=security - - local binary_name="rustowl" - - if [ ! -f "./target/security/$binary_name" ]; then - echo -e "${RED}[ERROR] Failed to build rustowl binary${NC}" - exit 1 - fi - - echo -e "${GREEN}[OK] Build completed successfully${NC}" - echo "" + echo -e "${YELLOW}Building RustOwl in security mode...${NC}" + echo -e "${BLUE}Using toolchain from rust-toolchain.toml${NC}" + + # Build with the current toolchain (specified by rust-toolchain.toml) + RUSTC_BOOTSTRAP=1 cargo build --profile=security + + local binary_name="rustowl" + + if [ ! -f "./target/security/$binary_name" ]; then + echo -e "${RED}[ERROR] Failed to build rustowl binary${NC}" + exit 1 + fi + + echo -e "${GREEN}[OK] Build completed successfully${NC}" + echo "" } # Show tool status summary show_tool_status() { - echo -e "${BLUE}${BOLD}Tool Availability Summary${NC}" - echo -e "${BLUE}================================${NC}" - echo "" - - echo -e "${BLUE}Platform: $OS_TYPE${NC}" - echo "" - - echo "Security Tools:" - echo -e " Miri (UB detection): $([ $HAS_MIRI -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" - - if [[ "$OS_TYPE" == "Linux" ]]; then - echo -e " Valgrind (memory errors): $([ $HAS_VALGRIND -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" - fi - - echo -e " cargo-audit (vulnerabilities): $([ $HAS_CARGO_AUDIT -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" - - if [[ "$OS_TYPE" == "macOS" ]]; then - echo -e " Instruments (performance): $([ $HAS_INSTRUMENTS -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" - fi - - echo "" - - # Check nightly toolchain for other advanced features - local current_toolchain=$(rustup show active-toolchain | cut -d' ' -f1) - echo "Advanced Features:" - if [[ "$current_toolchain" == *"nightly"* ]]; then - echo -e " Nightly toolchain: ${GREEN}[OK] Available${NC}" - echo -e " Advanced features: ${GREEN}[OK] Supported${NC}" - else - echo -e " Nightly toolchain: ${YELLOW}! Stable toolchain active${NC}" - echo -e " Advanced features: ${YELLOW}! Require nightly${NC}" - fi - - echo "" - echo "Test Configuration:" - echo -e " Run Miri: $([ $RUN_MIRI -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" - echo -e " Run Valgrind: $([ $RUN_VALGRIND -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" - echo -e " Run Audit: $([ $RUN_AUDIT -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" - echo -e " Run Instruments: $([ $RUN_INSTRUMENTS -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" - - echo "" + echo -e "${BLUE}${BOLD}Tool Availability Summary${NC}" + echo -e "${BLUE}================================${NC}" + echo "" + + echo -e "${BLUE}Platform: $OS_TYPE${NC}" + echo "" + + echo "Security Tools:" + echo -e " Miri (UB detection): $([ $HAS_MIRI -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" + + if [[ "$OS_TYPE" == "Linux" ]]; then + echo -e " Valgrind (memory errors): $([ $HAS_VALGRIND -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" + fi + + echo -e " cargo-audit (vulnerabilities): $([ $HAS_CARGO_AUDIT -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" + + if [[ "$OS_TYPE" == "macOS" ]]; then + echo -e " Instruments (performance): $([ $HAS_INSTRUMENTS -eq 1 ] && echo -e "${GREEN}[OK] Available${NC}" || echo -e "${RED}[ERROR] Missing${NC}")" + fi + + echo "" + + # Check nightly toolchain for other advanced features + local current_toolchain=$(rustup show active-toolchain | cut -d' ' -f1) + echo "Advanced Features:" + if [[ "$current_toolchain" == *"nightly"* ]]; then + echo -e " Nightly toolchain: ${GREEN}[OK] Available${NC}" + echo -e " Advanced features: ${GREEN}[OK] Supported${NC}" + else + echo -e " Nightly toolchain: ${YELLOW}! Stable toolchain active${NC}" + echo -e " Advanced features: ${YELLOW}! Require nightly${NC}" + fi + + echo "" + echo "Test Configuration:" + echo -e " Run Miri: $([ $RUN_MIRI -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" + echo -e " Run Valgrind: $([ $RUN_VALGRIND -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" + echo -e " Run Audit: $([ $RUN_AUDIT -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" + echo -e " Run Instruments: $([ $RUN_INSTRUMENTS -eq 1 ] && echo -e "${GREEN}Enabled${NC}" || echo -e "${YELLOW}Disabled${NC}")" + + echo "" } # Create security summary with tool outputs create_security_summary() { - local summary_file="$LOG_DIR/security_summary_${TIMESTAMP}.md" - - mkdir -p "$LOG_DIR" - - echo "# Security Testing Summary" > "$summary_file" - echo "" >> "$summary_file" - echo "**Generated:** $(date)" >> "$summary_file" - echo "**Platform:** $OS_TYPE" >> "$summary_file" - echo "**CI Environment:** $([ $IS_CI -eq 1 ] && echo "Yes" || echo "No")" >> "$summary_file" - echo "**Rust Version:** $(rustc --version 2>/dev/null || echo 'N/A')" >> "$summary_file" - echo "" >> "$summary_file" - - # Tool availability summary - echo "## Tool Availability" >> "$summary_file" - echo "" >> "$summary_file" - echo "| Tool | Status | Notes |" >> "$summary_file" - echo "|------|--------|-------|" >> "$summary_file" - echo "| Miri | $([ $HAS_MIRI -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing") | Undefined behavior detection |" >> "$summary_file" - echo "| Valgrind | $([ $HAS_VALGRIND -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing/N/A") | Memory error detection (Linux) |" >> "$summary_file" - echo "| cargo-audit | $([ $HAS_CARGO_AUDIT -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing") | Security vulnerability scanning |" >> "$summary_file" - echo "| Instruments | $([ $HAS_INSTRUMENTS -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing/N/A") | Performance analysis (macOS) |" >> "$summary_file" - echo "" >> "$summary_file" + local summary_file="$LOG_DIR/security_summary_${TIMESTAMP}.md" + + mkdir -p "$LOG_DIR" + + echo "# Security Testing Summary" >"$summary_file" + echo "" >>"$summary_file" + echo "**Generated:** $(date)" >>"$summary_file" + echo "**Platform:** $OS_TYPE" >>"$summary_file" + echo "**CI Environment:** $([ $IS_CI -eq 1 ] && echo "Yes" || echo "No")" >>"$summary_file" + echo "**Rust Version:** $(rustc --version 2>/dev/null || echo 'N/A')" >>"$summary_file" + echo "" >>"$summary_file" + + # Tool availability summary + echo "## Tool Availability" >>"$summary_file" + echo "" >>"$summary_file" + echo "| Tool | Status | Notes |" >>"$summary_file" + echo "|------|--------|-------|" >>"$summary_file" + echo "| Miri | $([ $HAS_MIRI -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing") | Undefined behavior detection |" >>"$summary_file" + echo "| Valgrind | $([ $HAS_VALGRIND -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing/N/A") | Memory error detection (Linux) |" >>"$summary_file" + echo "| cargo-audit | $([ $HAS_CARGO_AUDIT -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing") | Security vulnerability scanning |" >>"$summary_file" + echo "| Instruments | $([ $HAS_INSTRUMENTS -eq 1 ] && echo "[OK] Available" || echo "[FAIL] Missing/N/A") | Performance analysis (macOS) |" >>"$summary_file" + echo "" >>"$summary_file" } # Run Miri tests using the current toolchain run_miri_tests() { - if [[ $RUN_MIRI -eq 0 ]]; then - return 0 - fi - - if [[ $HAS_MIRI -eq 0 ]]; then - echo -e "${YELLOW}Skipping Miri tests (component not installed)${NC}" - return 0 - fi - - echo -e "${BLUE}${BOLD}Running Miri Tests${NC}" - echo -e "${BLUE}================================${NC}" - echo "Miri detects undefined behavior in Rust code" - echo "" - - # First run unit tests which are guaranteed to work with Miri - echo -e "${BLUE}Running RustOwl unit tests with Miri...${NC}" - echo -e "${BLUE}Using Miri flags: -Zmiri-disable-isolation -Zmiri-permissive-provenance${NC}" - if MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" RUSTFLAGS="--cfg miri" log_command_detailed "miri_unit_tests" "cargo miri test --lib"; then - echo -e "${GREEN}[OK] RustOwl unit tests passed with Miri${NC}" - else - echo -e "${RED}[FAIL] RustOwl unit tests failed with Miri${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/miri_unit_tests_${TIMESTAMP}.log${NC}" - return 1 - fi - - # Test RustOwl's main functionality with Miri - echo -e "${YELLOW}Testing RustOwl execution with Miri...${NC}" - - if [ -d "$TEST_TARGET_PATH" ]; then - echo -e "${BLUE}Running RustOwl analysis with Miri...${NC}" - echo -e "${BLUE}Using Miri flags: -Zmiri-disable-isolation -Zmiri-permissive-provenance${NC}" - if MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" RUSTFLAGS="--cfg miri" log_command_detailed "miri_rustowl_analysis" "cargo miri run --bin rustowl -- check $TEST_TARGET_PATH"; then - echo -e "${GREEN}[OK] RustOwl analysis completed with Miri${NC}" - else - echo -e "${YELLOW}[WARN] Miri could not complete analysis (process spawning limitations)${NC}" - echo -e "${YELLOW} This is expected: RustOwl spawns cargo processes which Miri doesn't support${NC}" - echo -e "${YELLOW} Core RustOwl memory safety is validated by the system allocator switch${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/miri_rustowl_analysis_${TIMESTAMP}.log${NC}" - fi - else - echo -e "${YELLOW}[WARN] No test target found at $TEST_TARGET_PATH${NC}" - # Fallback: test basic RustOwl execution with --help - echo -e "${BLUE}Fallback: Testing basic RustOwl execution with Miri...${NC}" - echo -e "${BLUE}Using Miri flags: -Zmiri-disable-isolation -Zmiri-permissive-provenance${NC}" - - if MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" RUSTFLAGS="--cfg miri" log_command_detailed "miri_basic_execution" "cargo miri run --bin rustowl -- --help"; then - echo -e "${GREEN}[OK] RustOwl basic execution passed with Miri${NC}" - else - echo -e "${YELLOW}[WARN] Miri could not complete basic execution${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/miri_basic_execution_${TIMESTAMP}.log${NC}" - fi - fi - - echo "" + if [[ $RUN_MIRI -eq 0 ]]; then + return 0 + fi + + if [[ $HAS_MIRI -eq 0 ]]; then + echo -e "${YELLOW}Skipping Miri tests (component not installed)${NC}" + return 0 + fi + + echo -e "${BLUE}${BOLD}Running Miri Tests${NC}" + echo -e "${BLUE}================================${NC}" + echo "Miri detects undefined behavior in Rust code" + echo "" + + # First run unit tests which are guaranteed to work with Miri + echo -e "${BLUE}Running RustOwl unit tests with Miri...${NC}" + echo -e "${BLUE}Using Miri flags: -Zmiri-disable-isolation -Zmiri-permissive-provenance${NC}" + + # Choose test runner based on availability + local test_command + if [[ $HAS_NEXTEST -eq 1 ]]; then + test_command="cargo miri nextest run --lib" + echo -e "${BLUE}Using cargo-nextest for faster test execution${NC}" + else + test_command="cargo miri test --lib" + echo -e "${BLUE}Using standard cargo test${NC}" + fi + + if MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" RUSTFLAGS="--cfg miri" log_command_detailed "miri_unit_tests" "$test_command"; then + echo -e "${GREEN}[OK] RustOwl unit tests passed with Miri${NC}" + else + echo -e "${RED}[FAIL] RustOwl unit tests failed with Miri${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/miri_unit_tests_${TIMESTAMP}.log${NC}" + return 1 + fi + + # Test RustOwl's main functionality with Miri + echo -e "${YELLOW}Testing RustOwl execution with Miri...${NC}" + + if [ -d "$TEST_TARGET_PATH" ]; then + echo -e "${BLUE}Running RustOwl analysis with Miri...${NC}" + echo -e "${BLUE}Using Miri flags: -Zmiri-disable-isolation -Zmiri-permissive-provenance${NC}" + if MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" RUSTFLAGS="--cfg miri" log_command_detailed "miri_rustowl_analysis" "cargo miri run --bin rustowl -- check $TEST_TARGET_PATH"; then + echo -e "${GREEN}[OK] RustOwl analysis completed with Miri${NC}" + else + echo -e "${YELLOW}[WARN] Miri could not complete analysis (process spawning limitations)${NC}" + echo -e "${YELLOW} This is expected: RustOwl spawns cargo processes which Miri doesn't support${NC}" + echo -e "${YELLOW} Core RustOwl memory safety is validated by the system allocator switch${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/miri_rustowl_analysis_${TIMESTAMP}.log${NC}" + fi + else + echo -e "${YELLOW}[WARN] No test target found at $TEST_TARGET_PATH${NC}" + # Fallback: test basic RustOwl execution with --help + echo -e "${BLUE}Fallback: Testing basic RustOwl execution with Miri...${NC}" + echo -e "${BLUE}Using Miri flags: -Zmiri-disable-isolation -Zmiri-permissive-provenance${NC}" + + if MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" RUSTFLAGS="--cfg miri" log_command_detailed "miri_basic_execution" "cargo miri run --bin rustowl -- --help"; then + echo -e "${GREEN}[OK] RustOwl basic execution passed with Miri${NC}" + else + echo -e "${YELLOW}[WARN] Miri could not complete basic execution${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/miri_basic_execution_${TIMESTAMP}.log${NC}" + fi + fi + + echo "" } run_thread_sanitizer_tests() { - if [[ $RUN_THREAD_SANITIZER -eq 0 ]]; then - return 0 - fi - - echo -e "${BLUE}Running ThreadSanitizer tests...${NC}" - echo -e "${BLUE}ThreadSanitizer detects data races and threading issues${NC}" - echo "" - - # ThreadSanitizer flags (generally more stable on macOS than AddressSanitizer) - local TSAN_FLAGS="-Zsanitizer=thread" - - echo -e "${BLUE}Running RustOwl with ThreadSanitizer...${NC}" - echo -e "${BLUE}Using RUSTFLAGS: ${TSAN_FLAGS}${NC}" - - if [ -d "$TEST_TARGET_PATH" ]; then - if RUSTFLAGS="${TSAN_FLAGS}" log_command_detailed "tsan_rustowl_analysis" "cargo +nightly run --bin rustowl -- check $TEST_TARGET_PATH"; then - echo -e "${GREEN}[OK] RustOwl analysis completed with ThreadSanitizer${NC}" - else - echo -e "${YELLOW}[WARN] ThreadSanitizer test completed with warnings${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/tsan_rustowl_analysis_${TIMESTAMP}.log${NC}" - fi - else - echo -e "${YELLOW}[WARN] No test target found at $TEST_TARGET_PATH${NC}" - if RUSTFLAGS="${TSAN_FLAGS}" log_command_detailed "tsan_basic_execution" "cargo +nightly run --bin rustowl -- --help"; then - echo -e "${GREEN}[OK] RustOwl basic execution passed with ThreadSanitizer${NC}" - else - echo -e "${YELLOW}[WARN] ThreadSanitizer basic test completed with warnings${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/tsan_basic_execution_${TIMESTAMP}.log${NC}" - fi - fi - - echo "" + if [[ $RUN_THREAD_SANITIZER -eq 0 ]]; then + return 0 + fi + + echo -e "${BLUE}Running ThreadSanitizer tests...${NC}" + echo -e "${BLUE}ThreadSanitizer detects data races and threading issues${NC}" + echo "" + + # ThreadSanitizer flags (generally more stable on macOS than AddressSanitizer) + local TSAN_FLAGS="-Zsanitizer=thread" + + echo -e "${BLUE}Running RustOwl with ThreadSanitizer...${NC}" + echo -e "${BLUE}Using RUSTFLAGS: ${TSAN_FLAGS}${NC}" + + if [ -d "$TEST_TARGET_PATH" ]; then + if RUSTFLAGS="${TSAN_FLAGS}" log_command_detailed "tsan_rustowl_analysis" "cargo +nightly run --bin rustowl -- check $TEST_TARGET_PATH"; then + echo -e "${GREEN}[OK] RustOwl analysis completed with ThreadSanitizer${NC}" + else + echo -e "${YELLOW}[WARN] ThreadSanitizer test completed with warnings${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/tsan_rustowl_analysis_${TIMESTAMP}.log${NC}" + fi + else + echo -e "${YELLOW}[WARN] No test target found at $TEST_TARGET_PATH${NC}" + if RUSTFLAGS="${TSAN_FLAGS}" log_command_detailed "tsan_basic_execution" "cargo +nightly run --bin rustowl -- --help"; then + echo -e "${GREEN}[OK] RustOwl basic execution passed with ThreadSanitizer${NC}" + else + echo -e "${YELLOW}[WARN] ThreadSanitizer basic test completed with warnings${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/tsan_basic_execution_${TIMESTAMP}.log${NC}" + fi + fi + + echo "" } run_valgrind_tests() { - if [[ $RUN_VALGRIND -eq 0 ]]; then - return 0 - fi - - if [[ $HAS_VALGRIND -eq 0 ]]; then - echo -e "${YELLOW}Skipping Valgrind tests (not available on this platform)${NC}" - return 0 - fi - - echo -e "${BLUE}${BOLD}Running Valgrind Tests${NC}" - echo -e "${BLUE}================================${NC}" - echo "Valgrind detects memory errors, leaks, and memory corruption" - echo "" - - # Build RustOwl for Valgrind testing (use release profile for better performance) - echo -e "${BLUE}Building RustOwl for Valgrind testing...${NC}" - if ! ./scripts/build/toolchain cargo build --release >/dev/null 2>&1; then - echo -e "${RED}[FAIL] Failed to build RustOwl for Valgrind testing${NC}" - return 1 - fi - - local rustowl_binary="./target/release/rustowl" - if [[ ! -f "$rustowl_binary" ]]; then - echo -e "${RED}[FAIL] RustOwl binary not found at $rustowl_binary${NC}" - return 1 - fi - - # Check if we have Valgrind suppressions file - local valgrind_suppressions="" - if [[ -f ".valgrind-suppressions" ]]; then - valgrind_suppressions="--suppressions=.valgrind-suppressions" - echo -e "${BLUE}Using suppressions file: $(pwd)/.valgrind-suppressions${NC}" - fi - - # Run Valgrind memory check on RustOwl - echo -e "${BLUE}Running RustOwl with Valgrind...${NC}" - echo -e "${BLUE}Using Valgrind flags: --tool=memcheck --leak-check=full --show-leak-kinds=all --track-origins=yes${NC}" - if [ -d "$TEST_TARGET_PATH" ]; then - echo -e "${BLUE}Testing RustOwl analysis with Valgrind...${NC}" - local valgrind_cmd="valgrind --tool=memcheck --leak-check=full --show-leak-kinds=all --track-origins=yes $valgrind_suppressions $rustowl_binary check $TEST_TARGET_PATH" - - if log_command_detailed "valgrind_rustowl_analysis" "$valgrind_cmd"; then - echo -e "${GREEN}[OK] RustOwl analysis completed with Valgrind (no memory errors detected)${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/valgrind_rustowl_analysis_${TIMESTAMP}.log${NC}" - else - echo -e "${RED}[FAIL] Valgrind detected memory errors in RustOwl analysis${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/valgrind_rustowl_analysis_${TIMESTAMP}.log${NC}" - return 1 - fi - else - echo -e "${YELLOW}[WARN] No test target found at $TEST_TARGET_PATH${NC}" - echo -e "${BLUE}Fallback: Testing basic RustOwl execution with Valgrind...${NC}" - - local valgrind_cmd="valgrind --tool=memcheck --leak-check=full --show-leak-kinds=all --track-origins=yes $valgrind_suppressions $rustowl_binary --help" - if log_command_detailed "valgrind_basic_execution" "$valgrind_cmd"; then - echo -e "${GREEN}[OK] RustOwl basic execution passed with Valgrind${NC}" - else - echo -e "${YELLOW}[WARN] Valgrind basic test completed with warnings${NC}" - return 1 - fi - echo -e "${BLUE} Full output captured in: $LOG_DIR/valgrind_basic_execution_${TIMESTAMP}.log${NC}" - fi - - echo "" + if [[ $RUN_VALGRIND -eq 0 ]]; then + return 0 + fi + + if [[ $HAS_VALGRIND -eq 0 ]]; then + echo -e "${YELLOW}Skipping Valgrind tests (not available on this platform)${NC}" + return 0 + fi + + echo -e "${BLUE}${BOLD}Running Valgrind Tests${NC}" + echo -e "${BLUE}================================${NC}" + echo "Valgrind detects memory errors, leaks, and memory corruption" + echo "" + + # Build RustOwl for Valgrind testing (use release profile for better performance) + echo -e "${BLUE}Building RustOwl for Valgrind testing...${NC}" + if ! ./scripts/build/toolchain cargo build --release >/dev/null 2>&1; then + echo -e "${RED}[FAIL] Failed to build RustOwl for Valgrind testing${NC}" + return 1 + fi + + local rustowl_binary="./target/release/rustowl" + if [[ ! -f "$rustowl_binary" ]]; then + echo -e "${RED}[FAIL] RustOwl binary not found at $rustowl_binary${NC}" + return 1 + fi + + # Check if we have Valgrind suppressions file + local valgrind_suppressions="" + if [[ -f ".valgrind-suppressions" ]]; then + valgrind_suppressions="--suppressions=.valgrind-suppressions" + echo -e "${BLUE}Using suppressions file: $(pwd)/.valgrind-suppressions${NC}" + fi + + # Run Valgrind memory check on RustOwl + echo -e "${BLUE}Running RustOwl with Valgrind...${NC}" + echo -e "${BLUE}Using Valgrind flags: --tool=memcheck --leak-check=full --show-leak-kinds=all --track-origins=yes${NC}" + if [ -d "$TEST_TARGET_PATH" ]; then + echo -e "${BLUE}Testing RustOwl analysis with Valgrind...${NC}" + local valgrind_cmd="valgrind --tool=memcheck --leak-check=full --show-leak-kinds=all --track-origins=yes $valgrind_suppressions $rustowl_binary check $TEST_TARGET_PATH" + + if log_command_detailed "valgrind_rustowl_analysis" "$valgrind_cmd"; then + echo -e "${GREEN}[OK] RustOwl analysis completed with Valgrind (no memory errors detected)${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/valgrind_rustowl_analysis_${TIMESTAMP}.log${NC}" + else + echo -e "${RED}[FAIL] Valgrind detected memory errors in RustOwl analysis${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/valgrind_rustowl_analysis_${TIMESTAMP}.log${NC}" + return 1 + fi + else + echo -e "${YELLOW}[WARN] No test target found at $TEST_TARGET_PATH${NC}" + echo -e "${BLUE}Fallback: Testing basic RustOwl execution with Valgrind...${NC}" + + local valgrind_cmd="valgrind --tool=memcheck --leak-check=full --show-leak-kinds=all --track-origins=yes $valgrind_suppressions $rustowl_binary --help" + if log_command_detailed "valgrind_basic_execution" "$valgrind_cmd"; then + echo -e "${GREEN}[OK] RustOwl basic execution passed with Valgrind${NC}" + else + echo -e "${YELLOW}[WARN] Valgrind basic test completed with warnings${NC}" + return 1 + fi + echo -e "${BLUE} Full output captured in: $LOG_DIR/valgrind_basic_execution_${TIMESTAMP}.log${NC}" + fi + + echo "" } # AddressSanitizer removed - incompatible with RustOwl's proc-macro dependencies # Alternative memory safety checking is provided by Valgrind and Miri run_audit_check() { - if [[ $RUN_AUDIT -eq 0 ]] || [[ $HAS_CARGO_AUDIT -eq 0 ]]; then - if [[ $RUN_AUDIT -eq 1 ]] && [[ $HAS_CARGO_AUDIT -eq 0 ]]; then - echo -e "${YELLOW}Skipping cargo-audit (not installed)${NC}" - fi - return 0 - fi - - echo -e "${BLUE}Scanning dependencies for vulnerabilities...${NC}" - if cargo audit; then - echo -e "${GREEN}[OK] No known vulnerabilities found${NC}" - else - echo -e "${RED}[ERROR] Security vulnerabilities detected${NC}" - return 1 - fi - - echo "" + if [[ $RUN_AUDIT -eq 0 ]] || [[ $HAS_CARGO_AUDIT -eq 0 ]]; then + if [[ $RUN_AUDIT -eq 1 ]] && [[ $HAS_CARGO_AUDIT -eq 0 ]]; then + echo -e "${YELLOW}Skipping cargo-audit (not installed)${NC}" + fi + return 0 + fi + + echo -e "${BLUE}Scanning dependencies for vulnerabilities...${NC}" + if cargo audit; then + echo -e "${GREEN}[OK] No known vulnerabilities found${NC}" + else + echo -e "${RED}[ERROR] Security vulnerabilities detected${NC}" + return 1 + fi + + echo "" } run_cargo_machete_tests() { - if [[ $RUN_CARGO_MACHETE -eq 0 ]]; then - return 0 - fi - - if [[ $HAS_CARGO_MACHETE -eq 0 ]]; then - echo -e "${YELLOW}Skipping cargo-machete tests (not installed)${NC}" - return 0 - fi - - echo -e "${BLUE}${BOLD}Running cargo-machete Tests${NC}" - echo -e "${BLUE}================================${NC}" - echo "cargo-machete detects unused dependencies in Cargo.toml" - echo "" - - echo -e "${BLUE}Scanning for unused dependencies...${NC}" - - # Run cargo-machete and capture output - if log_command_detailed "cargo_machete_analysis" "cargo machete"; then - echo -e "${GREEN}[OK] cargo-machete analysis completed${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log${NC}" - - # Check the log for unused dependencies - local log_file="$LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log" - if grep -q "unused dependencies" "$log_file" 2>/dev/null; then - local unused_count=$(grep -c "unused dependencies" "$log_file" 2>/dev/null || echo "0") - if [[ "$unused_count" -gt 0 ]]; then - echo -e "${YELLOW}[WARN] Found potential unused dependencies - check log for details${NC}" - echo -e "${YELLOW} Note: cargo-machete may report false positives for conditionally used deps${NC}" - else - echo -e "${GREEN}[OK] No unused dependencies detected${NC}" - fi - else - echo -e "${GREEN}[OK] No unused dependencies detected${NC}" - fi - else - # cargo-machete exits with non-zero when it finds unused dependencies - echo -e "${YELLOW}[INFO] cargo-machete found potential issues${NC}" - echo -e "${BLUE} Full output captured in: $LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log${NC}" - - # Don't fail the test suite for this - unused deps are warnings, not errors - local log_file="$LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log" - if [[ -f "$log_file" ]]; then - echo -e "${YELLOW} Check the log file to review any unused dependencies${NC}" - echo -e "${YELLOW} Note: Some dependencies may be used conditionally (features, targets, etc.)${NC}" - fi - fi - - echo "" + if [[ $RUN_CARGO_MACHETE -eq 0 ]]; then + return 0 + fi + + if [[ $HAS_CARGO_MACHETE -eq 0 ]]; then + echo -e "${YELLOW}Skipping cargo-machete tests (not installed)${NC}" + return 0 + fi + + echo -e "${BLUE}${BOLD}Running cargo-machete Tests${NC}" + echo -e "${BLUE}================================${NC}" + echo "cargo-machete detects unused dependencies in Cargo.toml" + echo "" + + echo -e "${BLUE}Scanning for unused dependencies...${NC}" + + # Run cargo-machete and capture output + if log_command_detailed "cargo_machete_analysis" "cargo machete"; then + echo -e "${GREEN}[OK] cargo-machete analysis completed${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log${NC}" + + # Check the log for unused dependencies + local log_file="$LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log" + if grep -q "unused dependencies" "$log_file" 2>/dev/null; then + local unused_count=$(grep -c "unused dependencies" "$log_file" 2>/dev/null || echo "0") + if [[ "$unused_count" -gt 0 ]]; then + echo -e "${YELLOW}[WARN] Found potential unused dependencies - check log for details${NC}" + echo -e "${YELLOW} Note: cargo-machete may report false positives for conditionally used deps${NC}" + else + echo -e "${GREEN}[OK] No unused dependencies detected${NC}" + fi + else + echo -e "${GREEN}[OK] No unused dependencies detected${NC}" + fi + else + # cargo-machete exits with non-zero when it finds unused dependencies + echo -e "${YELLOW}[INFO] cargo-machete found potential issues${NC}" + echo -e "${BLUE} Full output captured in: $LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log${NC}" + + # Don't fail the test suite for this - unused deps are warnings, not errors + local log_file="$LOG_DIR/cargo_machete_analysis_${TIMESTAMP}.log" + if [[ -f "$log_file" ]]; then + echo -e "${YELLOW} Check the log file to review any unused dependencies${NC}" + echo -e "${YELLOW} Note: Some dependencies may be used conditionally (features, targets, etc.)${NC}" + fi + fi + + echo "" } run_instruments_tests() { - echo -e "${YELLOW}Instruments tests not yet implemented${NC}" - return 0 + echo -e "${YELLOW}Instruments tests not yet implemented${NC}" + return 0 } # Enhanced logging function for tool outputs log_command_detailed() { - local test_name="$1" - local command="$2" - local log_file="$LOG_DIR/${test_name}_${TIMESTAMP}.log" - - # Create log directory if it doesn't exist - mkdir -p "$LOG_DIR" - - echo "===========================================" >> "$log_file" - echo "Test: $test_name" >> "$log_file" - echo "Command: $command" >> "$log_file" - echo "Timestamp: $(date)" >> "$log_file" - echo "Working Directory: $(pwd)" >> "$log_file" - echo "Environment: OS=$OS_TYPE, CI=$IS_CI" >> "$log_file" - echo "===========================================" >> "$log_file" - echo "" >> "$log_file" - - # Run the command and capture both stdout and stderr - echo "=== COMMAND OUTPUT ===" >> "$log_file" - if eval "$command" >> "$log_file" 2>&1; then - local exit_code=0 - echo "" >> "$log_file" - echo "=== COMMAND COMPLETED SUCCESSFULLY ===" >> "$log_file" - else - local exit_code=$? - echo "" >> "$log_file" - echo "=== COMMAND FAILED WITH EXIT CODE: $exit_code ===" >> "$log_file" - fi - - echo "End timestamp: $(date)" >> "$log_file" - echo "===========================================" >> "$log_file" - - return $exit_code + local test_name="$1" + local command="$2" + local log_file="$LOG_DIR/${test_name}_${TIMESTAMP}.log" + + # Create log directory if it doesn't exist + mkdir -p "$LOG_DIR" + + echo "===========================================" >>"$log_file" + echo "Test: $test_name" >>"$log_file" + echo "Command: $command" >>"$log_file" + echo "Timestamp: $(date)" >>"$log_file" + echo "Working Directory: $(pwd)" >>"$log_file" + echo "Environment: OS=$OS_TYPE, CI=$IS_CI" >>"$log_file" + echo "===========================================" >>"$log_file" + echo "" >>"$log_file" + + # Run the command and capture both stdout and stderr + echo "=== COMMAND OUTPUT ===" >>"$log_file" + if eval "$command" >>"$log_file" 2>&1; then + local exit_code=0 + echo "" >>"$log_file" + echo "=== COMMAND COMPLETED SUCCESSFULLY ===" >>"$log_file" + else + local exit_code=$? + echo "" >>"$log_file" + echo "=== COMMAND FAILED WITH EXIT CODE: $exit_code ===" >>"$log_file" + fi + + echo "End timestamp: $(date)" >>"$log_file" + echo "===========================================" >>"$log_file" + + return $exit_code } # Logging configuration @@ -851,15 +872,15 @@ detect_ci_environment # Check for --check flag early to show tool status if [[ "$1" == "--check" ]]; then - echo -e "${BLUE}Checking tool availability and system readiness...${NC}" - echo "" - - detect_tools - show_tool_status - - echo "" - echo -e "${GREEN}System check completed.${NC}" - exit 0 + echo -e "${BLUE}Checking tool availability and system readiness...${NC}" + echo "" + + detect_tools + show_tool_status + + echo "" + echo -e "${GREEN}System check completed.${NC}" + exit 0 fi echo -e "${BLUE}Running security and memory safety analysis...${NC}" @@ -873,9 +894,9 @@ auto_configure_tests # Install missing tools if in CI or explicitly requested if [[ $IS_CI -eq 1 ]] || [[ "$1" == "--install" ]]; then - install_required_tools - # Re-detect tools after installation - detect_tools + install_required_tools + # Re-detect tools after installation + detect_tools fi # Check Rust version compatibility @@ -896,44 +917,44 @@ test_failures=0 # Run Miri tests if ! run_miri_tests; then - test_failures=$((test_failures + 1)) + test_failures=$((test_failures + 1)) fi # Run Valgrind tests (Linux only) if [[ "$OS_TYPE" == "Linux" ]] && [[ $RUN_VALGRIND -eq 1 ]]; then - if ! run_valgrind_tests; then - test_failures=$((test_failures + 1)) - fi + if ! run_valgrind_tests; then + test_failures=$((test_failures + 1)) + fi fi # Run cargo audit if ! run_audit_check; then - test_failures=$((test_failures + 1)) + test_failures=$((test_failures + 1)) fi # Run cargo machete if available if [[ $HAS_CARGO_MACHETE -eq 1 ]] && [[ $RUN_CARGO_MACHETE -eq 1 ]]; then - if ! run_cargo_machete_tests; then - test_failures=$((test_failures + 1)) - fi + if ! run_cargo_machete_tests; then + test_failures=$((test_failures + 1)) + fi fi # Run Instruments tests (macOS only) if [[ "$OS_TYPE" == "macOS" ]] && [[ $RUN_INSTRUMENTS -eq 1 ]] && [[ $HAS_INSTRUMENTS -eq 1 ]]; then - if ! run_instruments_tests; then - test_failures=$((test_failures + 1)) - fi + if ! run_instruments_tests; then + test_failures=$((test_failures + 1)) + fi fi # Final summary echo "" if [[ $test_failures -eq 0 ]]; then - echo -e "${GREEN}${BOLD}All security tests passed!${NC}" - echo -e "${GREEN}No security issues detected.${NC}" - exit 0 + echo -e "${GREEN}${BOLD}All security tests passed!${NC}" + echo -e "${GREEN}No security issues detected.${NC}" + exit 0 else - echo -e "${RED}${BOLD}Security tests failed!${NC}" - echo -e "${RED}$test_failures test suite(s) failed.${NC}" - echo -e "${BLUE}Check logs in $LOG_DIR/ for details.${NC}" - exit 1 + echo -e "${RED}${BOLD}Security tests failed!${NC}" + echo -e "${RED}$test_failures test suite(s) failed.${NC}" + echo -e "${BLUE}Check logs in $LOG_DIR/ for details.${NC}" + exit 1 fi diff --git a/scripts/size-check.sh b/scripts/size-check.sh index b0760aa8..2bb84e2e 100755 --- a/scripts/size-check.sh +++ b/scripts/size-check.sh @@ -12,7 +12,7 @@ cd "$REPO_ROOT" # Configuration SIZE_BASELINE_FILE="baselines/size_baseline.txt" -SIZE_THRESHOLD_PCT=10 # Warn if binary size increases by more than 10% +SIZE_THRESHOLD_PCT=10 # Warn if binary size increases by more than 10% # Colors for output RED='\033[0;31m' @@ -22,324 +22,324 @@ BLUE='\033[0;34m' NC='\033[0m' # No Color show_help() { - echo "RustOwl Binary Size Monitoring" - echo "" - echo "USAGE:" - echo " $0 [OPTIONS] [COMMAND]" - echo "" - echo "COMMANDS:" - echo " check Check current binary sizes (default)" - echo " baseline Create/update size baseline" - echo " compare Compare current sizes with baseline" - echo " clean Remove baseline file" - echo "" - echo "OPTIONS:" - echo " -h, --help Show this help message" - echo " -t, --threshold Set size increase threshold (default: ${SIZE_THRESHOLD_PCT}%)" - echo " -v, --verbose Show verbose output" - echo "" - echo "EXAMPLES:" - echo " $0 # Check current binary sizes" - echo " $0 baseline # Create baseline from current build" - echo " $0 compare # Compare with baseline" - echo " $0 -t 15 compare # Compare with 15% threshold" + echo "RustOwl Binary Size Monitoring" + echo "" + echo "USAGE:" + echo " $0 [OPTIONS] [COMMAND]" + echo "" + echo "COMMANDS:" + echo " check Check current binary sizes (default)" + echo " baseline Create/update size baseline" + echo " compare Compare current sizes with baseline" + echo " clean Remove baseline file" + echo "" + echo "OPTIONS:" + echo " -h, --help Show this help message" + echo " -t, --threshold Set size increase threshold (default: ${SIZE_THRESHOLD_PCT}%)" + echo " -v, --verbose Show verbose output" + echo "" + echo "EXAMPLES:" + echo " $0 # Check current binary sizes" + echo " $0 baseline # Create baseline from current build" + echo " $0 compare # Compare with baseline" + echo " $0 -t 15 compare # Compare with 15% threshold" } log_info() { - echo -e "${BLUE}[INFO]${NC} $1" + echo -e "${BLUE}[INFO]${NC} $1" } log_success() { - echo -e "${GREEN}[SUCCESS]${NC} $1" + echo -e "${GREEN}[SUCCESS]${NC} $1" } log_warning() { - echo -e "${YELLOW}[WARNING]${NC} $1" + echo -e "${YELLOW}[WARNING]${NC} $1" } log_error() { - echo -e "${RED}[ERROR]${NC} $1" + echo -e "${RED}[ERROR]${NC} $1" } # Get binary size in bytes get_binary_size() { - local binary_path="$1" - if [ -f "$binary_path" ]; then - stat --format="%s" "$binary_path" 2>/dev/null || stat -f%z "$binary_path" 2>/dev/null || echo "0" - else - echo "0" - fi + local binary_path="$1" + if [ -f "$binary_path" ]; then + stat --format="%s" "$binary_path" 2>/dev/null || stat -f%z "$binary_path" 2>/dev/null || echo "0" + else + echo "0" + fi } # Format size for human reading format_size() { - local size="$1" - if command -v numfmt &> /dev/null; then - numfmt --to=iec-i --suffix=B "$size" - else - # Fallback formatting - if [ "$size" -ge 1048576 ]; then - echo "$(($size / 1048576))MB" - elif [ "$size" -ge 1024 ]; then - echo "$(($size / 1024))KB" - else - echo "${size}B" - fi - fi + local size="$1" + if command -v numfmt &>/dev/null; then + numfmt --to=iec-i --suffix=B "$size" + else + # Fallback formatting + if [ "$size" -ge 1048576 ]; then + echo "$(($size / 1048576))MB" + elif [ "$size" -ge 1024 ]; then + echo "$(($size / 1024))KB" + else + echo "${size}B" + fi + fi } # Build binaries if they don't exist ensure_binaries_built() { - local binaries=( - "target/release/rustowl" - "target/release/rustowlc" - ) - - local need_build=false - for binary in "${binaries[@]}"; do - if [ ! -f "$binary" ]; then - need_build=true - break - fi - done - - if $need_build; then - log_info "Building release binaries..." - if ! ./scripts/build/toolchain cargo build --release; then - log_error "Failed to build release binaries" - exit 1 - fi - fi + local binaries=( + "target/release/rustowl" + "target/release/rustowlc" + ) + + local need_build=false + for binary in "${binaries[@]}"; do + if [ ! -f "$binary" ]; then + need_build=true + break + fi + done + + if $need_build; then + log_info "Building release binaries..." + if ! ./scripts/build/toolchain cargo build --release; then + log_error "Failed to build release binaries" + exit 1 + fi + fi } # Check current binary sizes check_sizes() { - log_info "Checking binary sizes..." - - ensure_binaries_built - - local binaries=( - "target/release/rustowl" - "target/release/rustowlc" - ) - - echo "" - printf "%-20s %10s %15s\n" "Binary" "Size" "Formatted" - printf "%-20s %10s %15s\n" "------" "----" "---------" - - for binary in "${binaries[@]}"; do - local size - size=$(get_binary_size "$binary") - local formatted - formatted=$(format_size "$size") - local name - name=$(basename "$binary") - - printf "%-20s %10d %15s\n" "$name" "$size" "$formatted" - done - echo "" + log_info "Checking binary sizes..." + + ensure_binaries_built + + local binaries=( + "target/release/rustowl" + "target/release/rustowlc" + ) + + echo "" + printf "%-20s %10s %15s\n" "Binary" "Size" "Formatted" + printf "%-20s %10s %15s\n" "------" "----" "---------" + + for binary in "${binaries[@]}"; do + local size + size=$(get_binary_size "$binary") + local formatted + formatted=$(format_size "$size") + local name + name=$(basename "$binary") + + printf "%-20s %10d %15s\n" "$name" "$size" "$formatted" + done + echo "" } # Create size baseline create_baseline() { - log_info "Creating size baseline..." - - ensure_binaries_built - - local binaries=( - "target/release/rustowl" - "target/release/rustowlc" - ) - - # Create target directory if it doesn't exist - mkdir -p "$(dirname "$SIZE_BASELINE_FILE")" - - # Write baseline - { - echo "# RustOwl Binary Size Baseline" - echo "# Generated on $(date)" - echo "# Format: binary_name:size_in_bytes" - for binary in "${binaries[@]}"; do - local size - size=$(get_binary_size "$binary") - local name - name=$(basename "$binary") - echo "$name:$size" - done - } > "$SIZE_BASELINE_FILE" - - log_success "Baseline created at $SIZE_BASELINE_FILE" - - # Show what was recorded - echo "" - log_info "Baseline contents:" - check_sizes + log_info "Creating size baseline..." + + ensure_binaries_built + + local binaries=( + "target/release/rustowl" + "target/release/rustowlc" + ) + + # Create target directory if it doesn't exist + mkdir -p "$(dirname "$SIZE_BASELINE_FILE")" + + # Write baseline + { + echo "# RustOwl Binary Size Baseline" + echo "# Generated on $(date)" + echo "# Format: binary_name:size_in_bytes" + for binary in "${binaries[@]}"; do + local size + size=$(get_binary_size "$binary") + local name + name=$(basename "$binary") + echo "$name:$size" + done + } >"$SIZE_BASELINE_FILE" + + log_success "Baseline created at $SIZE_BASELINE_FILE" + + # Show what was recorded + echo "" + log_info "Baseline contents:" + check_sizes } # Compare with baseline compare_with_baseline() { - if [ ! -f "$SIZE_BASELINE_FILE" ]; then - log_error "No baseline file found at $SIZE_BASELINE_FILE" - log_info "Run '$0 baseline' to create one" - exit 1 - fi - - log_info "Comparing with baseline (threshold: ${SIZE_THRESHOLD_PCT}%)..." - - ensure_binaries_built - - local binaries=( - "target/release/rustowl" - "target/release/rustowlc" - ) - - local any_issues=false - - echo "" - printf "%-20s %12s %12s %10s %8s\n" "Binary" "Baseline" "Current" "Diff" "Change" - printf "%-20s %12s %12s %10s %8s\n" "------" "--------" "-------" "----" "------" - - for binary in "${binaries[@]}"; do - local name - name=$(basename "$binary") - - # Get baseline size - local baseline_size - baseline_size=$(grep "^$name:" "$SIZE_BASELINE_FILE" | cut -d: -f2 || echo "0") - - if [ "$baseline_size" = "0" ]; then - log_warning "No baseline found for $name" - continue - fi - - # Get current size - local current_size - current_size=$(get_binary_size "$binary") - - if [ "$current_size" = "0" ]; then - log_error "Binary $name not found" - any_issues=true - continue - fi - - # Calculate difference - local diff=$((current_size - baseline_size)) - local pct_change=0 - - if [ "$baseline_size" -gt 0 ]; then - pct_change=$(echo "scale=1; $diff * 100 / $baseline_size" | bc 2>/dev/null || echo "0") - fi - - # Format for display - local baseline_fmt current_fmt diff_fmt - baseline_fmt=$(format_size "$baseline_size") - current_fmt=$(format_size "$current_size") - - if [ "$diff" -gt 0 ]; then - diff_fmt="+$(format_size "$diff")" - elif [ "$diff" -lt 0 ]; then - diff_fmt="-$(format_size $((-diff)))" - else - diff_fmt="0B" - fi - - printf "%-20s %12s %12s %10s %7s%%\n" "$name" "$baseline_fmt" "$current_fmt" "$diff_fmt" "$pct_change" - - # Check threshold - local abs_pct_change - abs_pct_change=$(echo "$pct_change" | tr -d '-') - - if (( $(echo "$abs_pct_change > $SIZE_THRESHOLD_PCT" | bc -l) )); then - if [ "$diff" -gt 0 ]; then - log_warning "$name size increased by $pct_change% (threshold: ${SIZE_THRESHOLD_PCT}%)" - else - log_info "$name size decreased by $pct_change%" - fi - any_issues=true - fi - done - - echo "" - - if $any_issues; then - log_warning "Some binaries exceeded size thresholds" - exit 1 - else - log_success "All binary sizes within acceptable ranges" - fi + if [ ! -f "$SIZE_BASELINE_FILE" ]; then + log_error "No baseline file found at $SIZE_BASELINE_FILE" + log_info "Run '$0 baseline' to create one" + exit 1 + fi + + log_info "Comparing with baseline (threshold: ${SIZE_THRESHOLD_PCT}%)..." + + ensure_binaries_built + + local binaries=( + "target/release/rustowl" + "target/release/rustowlc" + ) + + local any_issues=false + + echo "" + printf "%-20s %12s %12s %10s %8s\n" "Binary" "Baseline" "Current" "Diff" "Change" + printf "%-20s %12s %12s %10s %8s\n" "------" "--------" "-------" "----" "------" + + for binary in "${binaries[@]}"; do + local name + name=$(basename "$binary") + + # Get baseline size + local baseline_size + baseline_size=$(grep "^$name:" "$SIZE_BASELINE_FILE" | cut -d: -f2 || echo "0") + + if [ "$baseline_size" = "0" ]; then + log_warning "No baseline found for $name" + continue + fi + + # Get current size + local current_size + current_size=$(get_binary_size "$binary") + + if [ "$current_size" = "0" ]; then + log_error "Binary $name not found" + any_issues=true + continue + fi + + # Calculate difference + local diff=$((current_size - baseline_size)) + local pct_change=0 + + if [ "$baseline_size" -gt 0 ]; then + pct_change=$(echo "scale=1; $diff * 100 / $baseline_size" | bc 2>/dev/null || echo "0") + fi + + # Format for display + local baseline_fmt current_fmt diff_fmt + baseline_fmt=$(format_size "$baseline_size") + current_fmt=$(format_size "$current_size") + + if [ "$diff" -gt 0 ]; then + diff_fmt="+$(format_size "$diff")" + elif [ "$diff" -lt 0 ]; then + diff_fmt="-$(format_size $((-diff)))" + else + diff_fmt="0B" + fi + + printf "%-20s %12s %12s %10s %7s%%\n" "$name" "$baseline_fmt" "$current_fmt" "$diff_fmt" "$pct_change" + + # Check threshold + local abs_pct_change + abs_pct_change=$(echo "$pct_change" | tr -d '-') + + if (($(echo "$abs_pct_change > $SIZE_THRESHOLD_PCT" | bc -l))); then + if [ "$diff" -gt 0 ]; then + log_warning "$name size increased by $pct_change% (threshold: ${SIZE_THRESHOLD_PCT}%)" + else + log_info "$name size decreased by $pct_change%" + fi + any_issues=true + fi + done + + echo "" + + if $any_issues; then + log_warning "Some binaries exceeded size thresholds" + exit 1 + else + log_success "All binary sizes within acceptable ranges" + fi } # Clean baseline clean_baseline() { - if [ -f "$SIZE_BASELINE_FILE" ]; then - rm "$SIZE_BASELINE_FILE" - log_success "Baseline file removed" - else - log_info "No baseline file to remove" - fi + if [ -f "$SIZE_BASELINE_FILE" ]; then + rm "$SIZE_BASELINE_FILE" + log_success "Baseline file removed" + else + log_info "No baseline file to remove" + fi } main() { - local command="check" - local verbose=false - - # Parse arguments - while [[ $# -gt 0 ]]; do - case $1 in - -h|--help) - show_help - exit 0 - ;; - -t|--threshold) - if [[ $# -lt 2 ]]; then - log_error "Option --threshold requires a value" - exit 1 - fi - SIZE_THRESHOLD_PCT="$2" - shift 2 - ;; - -v|--verbose) - verbose=true - shift - ;; - check|baseline|compare|clean) - command="$1" - shift - ;; - *) - log_error "Unknown option: $1" - show_help - exit 1 - ;; - esac - done - - # Ensure bc is available for calculations - if ! command -v bc &> /dev/null; then - log_error "bc (basic calculator) is required but not installed" - log_info "Install with: apt-get install bc" - exit 1 - fi - - case $command in - check) - check_sizes - ;; - baseline) - create_baseline - ;; - compare) - compare_with_baseline - ;; - clean) - clean_baseline - ;; - *) - log_error "Unknown command: $command" - show_help - exit 1 - ;; - esac + local command="check" + local verbose=false + + # Parse arguments + while [[ $# -gt 0 ]]; do + case $1 in + -h | --help) + show_help + exit 0 + ;; + -t | --threshold) + if [[ $# -lt 2 ]]; then + log_error "Option --threshold requires a value" + exit 1 + fi + SIZE_THRESHOLD_PCT="$2" + shift 2 + ;; + -v | --verbose) + verbose=true + shift + ;; + check | baseline | compare | clean) + command="$1" + shift + ;; + *) + log_error "Unknown option: $1" + show_help + exit 1 + ;; + esac + done + + # Ensure bc is available for calculations + if ! command -v bc &>/dev/null; then + log_error "bc (basic calculator) is required but not installed" + log_info "Install with: apt-get install bc" + exit 1 + fi + + case $command in + check) + check_sizes + ;; + baseline) + create_baseline + ;; + compare) + compare_with_baseline + ;; + clean) + clean_baseline + ;; + *) + log_error "Unknown command: $command" + show_help + exit 1 + ;; + esac } main "$@" diff --git a/selene.toml b/selene.toml index 5867a2a2..eac3e9b4 100644 --- a/selene.toml +++ b/selene.toml @@ -1,4 +1,4 @@ -std="vim" +std = "vim" [lints] -mixed_table="allow" +mixed_table = "allow" diff --git a/src/bin/core/analyze/transform.rs b/src/bin/core/analyze/transform.rs deleted file mode 100644 index b1a85899..00000000 --- a/src/bin/core/analyze/transform.rs +++ /dev/null @@ -1,259 +0,0 @@ -use rayon::prelude::*; -use rustc_borrowck::consumers::{BorrowIndex, BorrowSet, RichLocation}; -use rustc_hir::def_id::LocalDefId; -use rustc_middle::{ - mir::{ - BasicBlocks, Body, BorrowKind, Local, Location, Operand, Rvalue, StatementKind, - TerminatorKind, VarDebugInfoContents, - }, - ty::{TyCtxt, TypeFoldable, TypeFolder}, -}; -use rustc_span::source_map::SourceMap; -use rustowl::models::*; -use std::collections::{HashMap, HashSet}; - -/// RegionEraser to erase region variables from MIR body -/// This is required to hash MIR body -struct RegionEraser<'tcx> { - tcx: TyCtxt<'tcx>, -} -impl<'tcx> TypeFolder> for RegionEraser<'tcx> { - fn cx(&self) -> TyCtxt<'tcx> { - self.tcx - } - fn fold_region( - &mut self, - _r: as rustc_type_ir::Interner>::Region, - ) -> as rustc_type_ir::Interner>::Region { - self.tcx.lifetimes.re_static - } -} - -/// Erase region variables in MIR body -/// Refer: [`RegionEraser`] -pub fn erase_region_variables<'tcx>(tcx: TyCtxt<'tcx>, body: Body<'tcx>) -> Body<'tcx> { - let mut eraser = RegionEraser { tcx }; - - body.fold_with(&mut eraser) -} - -/// collect user defined variables from debug info in MIR -pub fn collect_user_vars( - source: &str, - offset: u32, - body: &Body<'_>, -) -> HashMap { - body.var_debug_info - // this cannot be par_iter since body cannot send - .iter() - .filter_map(|debug| match &debug.value { - VarDebugInfoContents::Place(place) => { - super::range_from_span(source, debug.source_info.span, offset) - .map(|range| (place.local, (range, debug.name.as_str().to_owned()))) - } - _ => None, - }) - .collect() -} - -/// Collect and transform [`BasicBlocks`] into our data structure [`MirBasicBlock`]s. -pub fn collect_basic_blocks( - fn_id: LocalDefId, - source: &str, - offset: u32, - basic_blocks: &BasicBlocks<'_>, - source_map: &SourceMap, -) -> Vec { - basic_blocks - .iter_enumerated() - .map(|(_bb, bb_data)| { - let statements: Vec<_> = bb_data - .statements - .iter() - // `source_map` is not Send - .filter(|stmt| stmt.source_info.span.is_visible(source_map)) - .collect(); - let statements = statements - .par_iter() - .filter_map(|statement| match &statement.kind { - StatementKind::Assign(v) => { - let (place, rval) = &**v; - let target_local_index = place.local.as_u32(); - let rv = match rval { - Rvalue::Use(Operand::Move(p)) => { - let local = p.local; - super::range_from_span(source, statement.source_info.span, offset) - .map(|range| MirRval::Move { - target_local: FnLocal::new( - local.as_u32(), - fn_id.local_def_index.as_u32(), - ), - range, - }) - } - Rvalue::Ref(_region, kind, place) => { - let mutable = matches!(kind, BorrowKind::Mut { .. }); - let local = place.local; - let outlive = None; - super::range_from_span(source, statement.source_info.span, offset) - .map(|range| MirRval::Borrow { - target_local: FnLocal::new( - local.as_u32(), - fn_id.local_def_index.as_u32(), - ), - range, - mutable, - outlive, - }) - } - _ => None, - }; - super::range_from_span(source, statement.source_info.span, offset).map( - |range| MirStatement::Assign { - target_local: FnLocal::new( - target_local_index, - fn_id.local_def_index.as_u32(), - ), - range, - rval: rv, - }, - ) - } - _ => super::range_from_span(source, statement.source_info.span, offset) - .map(|range| MirStatement::Other { range }), - }) - .collect(); - let terminator = - bb_data - .terminator - .as_ref() - .and_then(|terminator| match &terminator.kind { - TerminatorKind::Drop { place, .. } => { - super::range_from_span(source, terminator.source_info.span, offset).map( - |range| MirTerminator::Drop { - local: FnLocal::new( - place.local.as_u32(), - fn_id.local_def_index.as_u32(), - ), - range, - }, - ) - } - TerminatorKind::Call { - destination, - fn_span, - .. - } => super::range_from_span(source, *fn_span, offset).map(|fn_span| { - MirTerminator::Call { - destination_local: FnLocal::new( - destination.local.as_u32(), - fn_id.local_def_index.as_u32(), - ), - fn_span, - } - }), - _ => super::range_from_span(source, terminator.source_info.span, offset) - .map(|range| MirTerminator::Other { range }), - }); - MirBasicBlock { - statements, - terminator, - } - }) - .collect() -} - -fn statement_location_to_range( - basic_blocks: &[MirBasicBlock], - basic_block: usize, - statement: usize, -) -> Option { - basic_blocks.get(basic_block).and_then(|bb| { - if statement < bb.statements.len() { - bb.statements.get(statement).map(|v| v.range()) - } else { - bb.terminator.as_ref().map(|v| v.range()) - } - }) -} - -pub fn rich_locations_to_ranges( - basic_blocks: &[MirBasicBlock], - locations: &[RichLocation], -) -> Vec { - let mut starts = Vec::new(); - let mut mids = Vec::new(); - for rich in locations { - match rich { - RichLocation::Start(l) => { - starts.push((l.block, l.statement_index)); - } - RichLocation::Mid(l) => { - mids.push((l.block, l.statement_index)); - } - } - } - super::sort_locs(&mut starts); - super::sort_locs(&mut mids); - starts - .par_iter() - .zip(mids.par_iter()) - .filter_map(|(s, m)| { - let sr = statement_location_to_range(basic_blocks, s.0.index(), s.1); - let mr = statement_location_to_range(basic_blocks, m.0.index(), m.1); - match (sr, mr) { - (Some(s), Some(m)) => Range::new(s.from(), m.until()), - _ => None, - } - }) - .collect() -} - -/// Our representation of [`rustc_borrowck::consumers::BorrowData`] -#[allow(unused)] -pub enum BorrowData { - Shared { borrowed: Local, assigned: Local }, - Mutable { borrowed: Local, assigned: Local }, -} - -/// A map type from [`BorrowIndex`] to [`BorrowData`] -pub struct BorrowMap { - location_map: Vec<(Location, BorrowData)>, - local_map: HashMap>, -} -impl BorrowMap { - /// Get [`BorrowMap`] from [`BorrowSet`] - pub fn new(borrow_set: &BorrowSet<'_>) -> Self { - let mut location_map = Vec::new(); - // BorrowIndex corresponds to Location index - for (location, data) in borrow_set.location_map().iter() { - let data = if data.kind().mutability().is_mut() { - BorrowData::Mutable { - borrowed: data.borrowed_place().local, - assigned: data.assigned_place().local, - } - } else { - BorrowData::Shared { - borrowed: data.borrowed_place().local, - assigned: data.assigned_place().local, - } - }; - location_map.push((*location, data)); - } - let local_map = borrow_set - .local_map() - .iter() - .map(|(local, borrows)| (*local, borrows.iter().copied().collect())) - .collect(); - Self { - location_map, - local_map, - } - } - pub fn get_from_borrow_index(&self, borrow: BorrowIndex) -> Option<&(Location, BorrowData)> { - self.location_map.get(borrow.index()) - } - pub fn local_map(&self) -> &HashMap> { - &self.local_map - } -} diff --git a/src/bin/core/cache.rs b/src/bin/core/cache.rs deleted file mode 100644 index f6c069df..00000000 --- a/src/bin/core/cache.rs +++ /dev/null @@ -1,128 +0,0 @@ -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_middle::ty::TyCtxt; -use rustc_query_system::ich::StableHashingContext; -use rustc_stable_hash::{FromStableHash, SipHasher128Hash}; -use rustowl::models::*; -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use std::io::Write; -use std::sync::{LazyLock, Mutex}; - -pub static CACHE: LazyLock>> = LazyLock::new(|| Mutex::new(None)); - -#[derive(Debug, Clone)] -struct StableHashString(String); -impl StableHashString { - pub fn get(self) -> String { - self.0 - } -} -impl FromStableHash for StableHashString { - type Hash = SipHasher128Hash; - fn from(hash: Self::Hash) -> Self { - let byte0 = hash.0[0] as u128; - let byte1 = hash.0[1] as u128; - let byte = (byte0 << 64) | byte1; - Self(format!("{byte:x}")) - } -} - -pub struct Hasher<'a> { - hasher: StableHasher, - hash_ctx: StableHashingContext<'a>, -} - -impl<'tcx> Hasher<'tcx> { - pub fn new(tcx: TyCtxt<'tcx>) -> Self { - Self { - hasher: StableHasher::default(), - hash_ctx: StableHashingContext::new(tcx.sess, tcx.untracked()), - } - } - - fn finish(self) -> String { - self.hasher.finish::().get() - } - - pub fn get_hash( - tcx: TyCtxt<'tcx>, - target: impl HashStable>, - ) -> String { - let mut new = Self::new(tcx); - target.hash_stable(&mut new.hash_ctx, &mut new.hasher); - new.finish() - } -} - -/// Single file cache body -/// -/// this is a map: file hash -> (MIR body hash -> analyze result) -/// -/// Note: Cache can be utilized when neither -/// the MIR body nor the entire file is modified. -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(transparent)] -pub struct CacheData(HashMap>); -impl CacheData { - pub fn new() -> Self { - Self(HashMap::new()) - } - pub fn get_cache(&self, file_hash: &str, mir_hash: &str) -> Option { - self.0.get(file_hash).and_then(|v| v.get(mir_hash)).cloned() - } - pub fn insert_cache(&mut self, file_hash: String, mir_hash: String, analyzed: Function) { - self.0 - .entry(file_hash) - .or_default() - .insert(mir_hash, analyzed); - } -} - -/// Get cache data -/// -/// If cache is not enabled, then return None. -/// If file is not exists, it returns empty [`CacheData`]. -pub fn get_cache(krate: &str) -> Option { - if let Some(cache_path) = rustowl::cache::get_cache_path() { - let cache_path = cache_path.join(format!("{krate}.json")); - let s = match std::fs::read_to_string(&cache_path) { - Ok(v) => v, - Err(e) => { - log::warn!("failed to read incremental cache file: {e}"); - return Some(CacheData::new()); - } - }; - let read = serde_json::from_str(&s).ok(); - log::info!("cache read: {}", cache_path.display()); - read - } else { - None - } -} - -pub fn write_cache(krate: &str, cache: &CacheData) { - if let Some(cache_path) = rustowl::cache::get_cache_path() { - if let Err(e) = std::fs::create_dir_all(&cache_path) { - log::warn!("failed to create cache dir: {e}"); - return; - } - let cache_path = cache_path.join(format!("{krate}.json")); - let s = serde_json::to_string(cache).unwrap(); - let mut f = match std::fs::OpenOptions::new() - .write(true) - .create(true) - .truncate(true) - .open(&cache_path) - { - Ok(v) => v, - Err(e) => { - log::warn!("failed to open incremental cache file: {e}"); - return; - } - }; - if let Err(e) = f.write_all(s.as_bytes()) { - log::warn!("failed to write incremental cache file: {e}"); - } - log::info!("incremental cache saved: {}", cache_path.display()); - } -} diff --git a/src/bin/core/mod.rs b/src/bin/core/mod.rs deleted file mode 100644 index 6827dfd2..00000000 --- a/src/bin/core/mod.rs +++ /dev/null @@ -1,159 +0,0 @@ -mod analyze; -mod cache; - -use analyze::{AnalyzeResult, MirAnalyzer, MirAnalyzerInitResult}; -use rustc_hir::def_id::{LOCAL_CRATE, LocalDefId}; -use rustc_interface::interface; -use rustc_middle::{mir::ConcreteOpaqueTypes, query::queries, ty::TyCtxt, util::Providers}; -use rustc_session::config; -use rustowl::models::*; -use std::collections::HashMap; -use std::env; -use std::sync::{LazyLock, Mutex, atomic::AtomicBool}; -use tokio::{ - runtime::{Builder, Runtime}, - task::JoinSet, -}; - -pub struct RustcCallback; -impl rustc_driver::Callbacks for RustcCallback {} - -static ATOMIC_TRUE: AtomicBool = AtomicBool::new(true); -static TASKS: LazyLock>> = - LazyLock::new(|| Mutex::new(JoinSet::new())); -// make tokio runtime -static RUNTIME: LazyLock = LazyLock::new(|| { - let worker_threads = std::thread::available_parallelism() - .map(|n| (n.get() / 2).clamp(2, 8)) - .unwrap_or(4); - - Builder::new_multi_thread() - .enable_all() - .worker_threads(worker_threads) - .thread_stack_size(128 * 1024 * 1024) - .build() - .unwrap() -}); - -fn override_queries(_session: &rustc_session::Session, local: &mut Providers) { - local.mir_borrowck = mir_borrowck; -} -fn mir_borrowck(tcx: TyCtxt<'_>, def_id: LocalDefId) -> queries::mir_borrowck::ProvidedValue<'_> { - log::info!("start borrowck of {def_id:?}"); - - let analyzer = MirAnalyzer::init(tcx, def_id); - - { - let mut tasks = TASKS.lock().unwrap(); - match analyzer { - MirAnalyzerInitResult::Cached(cached) => { - handle_analyzed_result(tcx, cached); - } - MirAnalyzerInitResult::Analyzer(analyzer) => { - tasks.spawn_on(async move { analyzer.await.analyze() }, RUNTIME.handle()); - } - } - - log::info!("there are {} tasks", tasks.len()); - while let Some(Ok(result)) = tasks.try_join_next() { - log::info!("one task joined"); - handle_analyzed_result(tcx, result); - } - } - - for def_id in tcx.nested_bodies_within(def_id) { - let _ = mir_borrowck(tcx, def_id); - } - - Ok(tcx - .arena - .alloc(ConcreteOpaqueTypes(indexmap::IndexMap::default()))) -} - -pub struct AnalyzerCallback; -impl rustc_driver::Callbacks for AnalyzerCallback { - fn config(&mut self, config: &mut interface::Config) { - config.using_internal_features = &ATOMIC_TRUE; - config.opts.unstable_opts.mir_opt_level = Some(0); - config.opts.unstable_opts.polonius = config::Polonius::Next; - config.opts.incremental = None; - config.override_queries = Some(override_queries); - config.make_codegen_backend = None; - } - fn after_expansion<'tcx>( - &mut self, - _compiler: &interface::Compiler, - tcx: TyCtxt<'tcx>, - ) -> rustc_driver::Compilation { - let result = rustc_driver::catch_fatal_errors(|| tcx.analysis(())); - - // join all tasks after all analysis finished - // - // allow clippy::await_holding_lock because `tokio::sync::Mutex` cannot use - // for TASKS because block_on cannot be used in `mir_borrowck`. - #[allow(clippy::await_holding_lock)] - RUNTIME.block_on(async move { - while let Some(Ok(result)) = { TASKS.lock().unwrap().join_next().await } { - log::info!("one task joined"); - handle_analyzed_result(tcx, result); - } - if let Some(cache) = cache::CACHE.lock().unwrap().as_ref() { - cache::write_cache(&tcx.crate_name(LOCAL_CRATE).to_string(), cache); - } - }); - - if result.is_ok() { - rustc_driver::Compilation::Continue - } else { - rustc_driver::Compilation::Stop - } - } -} - -pub fn handle_analyzed_result(tcx: TyCtxt<'_>, analyzed: AnalyzeResult) { - if let Some(cache) = cache::CACHE.lock().unwrap().as_mut() { - cache.insert_cache( - analyzed.file_hash.clone(), - analyzed.mir_hash.clone(), - analyzed.analyzed.clone(), - ); - } - let krate = Crate(HashMap::from([( - analyzed.file_name.to_owned(), - File { - items: vec![analyzed.analyzed], - }, - )])); - // get currently-compiling crate name - let crate_name = tcx.crate_name(LOCAL_CRATE).to_string(); - let ws = Workspace(HashMap::from([(crate_name.clone(), krate)])); - println!("{}", serde_json::to_string(&ws).unwrap()); -} - -pub fn run_compiler() -> i32 { - let mut args: Vec = env::args().collect(); - // by using `RUSTC_WORKSPACE_WRAPPER`, arguments will be as follows: - // For dependencies: rustowlc [args...] - // For user workspace: rustowlc rustowlc [args...] - // So we skip analysis if currently-compiling crate is one of the dependencies - if args.first() == args.get(1) { - args = args.into_iter().skip(1).collect(); - } else { - return rustc_driver::catch_with_exit_code(|| { - rustc_driver::run_compiler(&args, &mut RustcCallback) - }); - } - - for arg in &args { - // utilize default rustc to avoid unexpected behavior if these arguments are passed - if arg == "-vV" || arg == "--version" || arg.starts_with("--print") { - return rustc_driver::catch_with_exit_code(|| { - rustc_driver::run_compiler(&args, &mut RustcCallback) - }); - } - } - - rustc_driver::catch_with_exit_code(|| { - rustc_driver::run_compiler(&args, &mut AnalyzerCallback); - }) -} diff --git a/src/bin/rustowl.rs b/src/bin/rustowl.rs deleted file mode 100644 index b50ac559..00000000 --- a/src/bin/rustowl.rs +++ /dev/null @@ -1,161 +0,0 @@ -//! # RustOwl cargo-owlsp -//! -//! An LSP server for visualizing ownership and lifetimes in Rust, designed for debugging and optimization. - -use clap::{CommandFactory, Parser}; -use clap_complete::generate; -use rustowl::*; -use std::env; -use std::io; -use tower_lsp::{LspService, Server}; - -use crate::cli::{Cli, Commands, ToolchainCommands}; - -#[cfg(all(not(target_env = "msvc"), not(miri)))] -use tikv_jemallocator::Jemalloc; - -// Use jemalloc by default, but fall back to system allocator for Miri -#[cfg(all(not(target_env = "msvc"), not(miri)))] -#[global_allocator] -static GLOBAL: Jemalloc = Jemalloc; - -fn set_log_level(default: log::LevelFilter) { - log::set_max_level( - env::var("RUST_LOG") - .ok() - .and_then(|v| v.parse().ok()) - .unwrap_or(default), - ); -} - -/// Handles the execution of RustOwl CLI commands. -/// -/// This function processes a specific CLI command and executes the appropriate -/// subcommand. It handles all CLI operations including analysis checking, cache cleaning, -/// toolchain management, and shell completion generation. -/// -/// # Arguments -/// -/// * `command` - The specific command to execute -/// -/// # Returns -/// -/// This function may exit the process with appropriate exit codes: -/// - Exit code 0 on successful analysis -/// - Exit code 1 on analysis failure or toolchain setup errors -async fn handle_command(command: Commands, rustc_threads: usize) { - match command { - Commands::Check(command_options) => { - let path = command_options.path.unwrap_or(env::current_dir().unwrap()); - - if Backend::check_with_options( - &path, - command_options.all_targets, - command_options.all_features, - rustc_threads, - ) - .await - { - log::info!("Successfully analyzed"); - std::process::exit(0); - } - log::error!("Analyze failed"); - std::process::exit(1); - } - Commands::Clean => { - if let Ok(meta) = cargo_metadata::MetadataCommand::new().exec() { - let target = meta.target_directory.join("owl"); - tokio::fs::remove_dir_all(&target).await.ok(); - } - } - Commands::Toolchain(command_options) => { - if let Some(arg) = command_options.command { - match arg { - ToolchainCommands::Install { - path, - skip_rustowl_toolchain, - } => { - let path = path.unwrap_or(toolchain::FALLBACK_RUNTIME_DIR.clone()); - if toolchain::setup_toolchain(&path, skip_rustowl_toolchain) - .await - .is_err() - { - std::process::exit(1); - } - } - ToolchainCommands::Uninstall => { - rustowl::toolchain::uninstall_toolchain().await; - } - } - } - } - Commands::Completions(command_options) => { - set_log_level("off".parse().unwrap()); - let shell = command_options.shell; - generate(shell, &mut Cli::command(), "rustowl", &mut io::stdout()); - } - } -} - -/// Initializes the logging system with colors and default log level -fn initialize_logging() { - simple_logger::SimpleLogger::new() - .with_colors(true) - .init() - .unwrap(); - set_log_level("info".parse().unwrap()); -} - -/// Handles the case when no command is provided (version display or LSP server mode) -async fn handle_no_command(args: Cli, rustc_threads: usize) { - if args.version { - display_version(args.quiet == 0); - return; - } - - start_lsp_server(rustc_threads).await; -} - -/// Displays the version information -fn display_version(show_prefix: bool) { - if show_prefix { - print!("RustOwl "); - } - println!("v{}", clap::crate_version!()); -} - -/// Starts the LSP server -async fn start_lsp_server(rustc_threads: usize) { - set_log_level("warn".parse().unwrap()); - eprintln!("RustOwl v{}", clap::crate_version!()); - eprintln!("This is an LSP server. You can use --help flag to show help."); - - let stdin = tokio::io::stdin(); - let stdout = tokio::io::stdout(); - - let (service, socket) = LspService::build(Backend::new(rustc_threads)) - .custom_method("rustowl/cursor", Backend::cursor) - .custom_method("rustowl/analyze", Backend::analyze) - .finish(); - - Server::new(stdin, stdout, socket).serve(service).await; -} - -#[tokio::main] -async fn main() { - rustls::crypto::aws_lc_rs::default_provider() - .install_default() - .expect("crypto provider already installed"); - - initialize_logging(); - - let parsed_args = Cli::parse(); - let rustc_threads = parsed_args - .rustc_threads - .unwrap_or(utils::get_default_parallel_count()); - - match parsed_args.command { - Some(command) => handle_command(command, rustc_threads).await, - None => handle_no_command(parsed_args, rustc_threads).await, - } -} diff --git a/src/bin/rustowlc.rs b/src/bin/rustowlc.rs deleted file mode 100644 index 5f6b8f5d..00000000 --- a/src/bin/rustowlc.rs +++ /dev/null @@ -1,79 +0,0 @@ -//! # RustOwl rustowlc -//! -//! A compiler implementation for visualizing ownership and lifetimes in Rust, designed for debugging and optimization. - -#![feature(rustc_private)] - -pub extern crate indexmap; -pub extern crate polonius_engine; -pub extern crate rustc_borrowck; -pub extern crate rustc_data_structures; -pub extern crate rustc_driver; -pub extern crate rustc_errors; -pub extern crate rustc_hash; -pub extern crate rustc_hir; -pub extern crate rustc_index; -pub extern crate rustc_interface; -pub extern crate rustc_middle; -pub extern crate rustc_query_system; -pub extern crate rustc_session; -pub extern crate rustc_span; -pub extern crate rustc_stable_hash; -pub extern crate rustc_type_ir; -pub extern crate smallvec; - -pub mod core; - -use std::process::exit; - -fn main() { - // This is cited from [rustc](https://github.com/rust-lang/rust/blob/3014e79f9c8d5510ea7b3a3b70d171d0948b1e96/compiler/rustc/src/main.rs). - // MIT License - #[cfg(not(target_env = "msvc"))] - { - use std::os::raw::{c_int, c_void}; - - use tikv_jemalloc_sys as jemalloc_sys; - - #[used] - static _F1: unsafe extern "C" fn(usize, usize) -> *mut c_void = jemalloc_sys::calloc; - #[used] - static _F2: unsafe extern "C" fn(*mut *mut c_void, usize, usize) -> c_int = - jemalloc_sys::posix_memalign; - #[used] - static _F3: unsafe extern "C" fn(usize, usize) -> *mut c_void = jemalloc_sys::aligned_alloc; - #[used] - static _F4: unsafe extern "C" fn(usize) -> *mut c_void = jemalloc_sys::malloc; - #[used] - static _F5: unsafe extern "C" fn(*mut c_void, usize) -> *mut c_void = jemalloc_sys::realloc; - #[used] - static _F6: unsafe extern "C" fn(*mut c_void) = jemalloc_sys::free; - - #[cfg(target_os = "macos")] - { - unsafe extern "C" { - fn _rjem_je_zone_register(); - } - - #[used] - static _F7: unsafe extern "C" fn() = _rjem_je_zone_register; - } - } - - simple_logger::SimpleLogger::new() - .env() - .with_colors(true) - .init() - .unwrap(); - - // rayon panics without this only on Windows - #[cfg(target_os = "windows")] - { - rayon::ThreadPoolBuilder::new() - .stack_size(4 * 1024 * 1024) - .build_global() - .unwrap(); - } - - exit(core::run_compiler()) -} diff --git a/src/cache.rs b/src/cache.rs deleted file mode 100644 index dc2cad68..00000000 --- a/src/cache.rs +++ /dev/null @@ -1,17 +0,0 @@ -use std::env; -use std::path::{Path, PathBuf}; -use tokio::process::Command; - -pub fn is_cache() -> bool { - !env::var("RUSTOWL_CACHE") - .map(|v| v == "false" || v == "0") - .unwrap_or(false) -} - -pub fn set_cache_path(cmd: &mut Command, target_dir: impl AsRef) { - cmd.env("RUSTOWL_CACHE_DIR", target_dir.as_ref().join("cache")); -} - -pub fn get_cache_path() -> Option { - env::var("RUSTOWL_CACHE_DIR").map(PathBuf::from).ok() -} diff --git a/src/lib.rs b/src/lib.rs deleted file mode 100644 index d836e373..00000000 --- a/src/lib.rs +++ /dev/null @@ -1,17 +0,0 @@ -//! # RustOwl lib -//! -//! Libraries that used in RustOwl - -pub mod cache; -pub mod cli; -pub mod lsp; -pub mod models; -pub mod shells; -pub mod toolchain; -pub mod utils; - -pub use lsp::backend::Backend; - -// Miri-specific memory safety tests -#[cfg(test)] -mod miri_tests; diff --git a/src/lsp/analyze.rs b/src/lsp/analyze.rs deleted file mode 100644 index e1c1150c..00000000 --- a/src/lsp/analyze.rs +++ /dev/null @@ -1,264 +0,0 @@ -use crate::{cache::*, models::*, toolchain}; -use std::path::{Path, PathBuf}; -use std::process::Stdio; -use std::sync::Arc; -use tokio::{ - io::{AsyncBufReadExt, BufReader}, - process, - sync::{Notify, mpsc}, -}; - -#[derive(serde::Deserialize, Clone, Debug)] -pub struct CargoCheckMessageTarget { - name: String, -} -#[derive(serde::Deserialize, Clone, Debug)] -#[serde(tag = "reason", rename_all = "kebab-case")] -pub enum CargoCheckMessage { - #[allow(unused)] - CompilerArtifact { target: CargoCheckMessageTarget }, - #[allow(unused)] - BuildFinished {}, -} - -pub enum AnalyzerEvent { - CrateChecked { - package: String, - package_count: usize, - }, - Analyzed(Workspace), -} - -#[derive(Clone)] -pub struct Analyzer { - path: PathBuf, - metadata: Option, - rustc_threads: usize, -} - -impl Analyzer { - pub async fn new(path: impl AsRef, rustc_threads: usize) -> Result { - let path = path.as_ref().to_path_buf(); - - let mut cargo_cmd = toolchain::setup_cargo_command(rustc_threads).await; - - cargo_cmd - .args([ - "metadata".to_owned(), - "--filter-platform".to_owned(), - toolchain::HOST_TUPLE.to_owned(), - ]) - .current_dir(if path.is_file() { - path.parent().unwrap() - } else { - &path - }) - .stdout(Stdio::piped()) - .stderr(Stdio::null()); - - let metadata = if let Ok(child) = cargo_cmd.spawn() - && let Ok(output) = child.wait_with_output().await - { - let data = String::from_utf8_lossy(&output.stdout); - cargo_metadata::MetadataCommand::parse(data).ok() - } else { - None - }; - - if let Some(metadata) = metadata { - Ok(Self { - path: metadata.workspace_root.as_std_path().to_path_buf(), - metadata: Some(metadata), - rustc_threads, - }) - } else if path.is_file() && path.extension().map(|v| v == "rs").unwrap_or(false) { - Ok(Self { - path, - metadata: None, - rustc_threads, - }) - } else { - log::warn!("Invalid analysis target: {}", path.display()); - Err(()) - } - } - pub fn target_path(&self) -> &Path { - &self.path - } - pub fn workspace_path(&self) -> Option<&Path> { - if self.metadata.is_some() { - Some(&self.path) - } else { - None - } - } - - pub async fn analyze(&self, all_targets: bool, all_features: bool) -> AnalyzeEventIter { - if let Some(metadata) = &self.metadata - && metadata.root_package().is_some() - { - self.analyze_package(metadata, all_targets, all_features) - .await - } else { - self.analyze_single_file(&self.path).await - } - } - - async fn analyze_package( - &self, - metadata: &cargo_metadata::Metadata, - all_targets: bool, - all_features: bool, - ) -> AnalyzeEventIter { - let package_name = metadata.root_package().as_ref().unwrap().name.to_string(); - let target_dir = metadata.target_directory.as_std_path().join("owl"); - log::info!("clear cargo cache"); - let mut command = toolchain::setup_cargo_command(1).await; - command - .args(["clean", "--package", &package_name]) - .env("CARGO_TARGET_DIR", &target_dir) - .current_dir(&self.path) - .stdout(std::process::Stdio::null()) - .stderr(std::process::Stdio::null()); - command.spawn().unwrap().wait().await.ok(); - - let mut command = toolchain::setup_cargo_command(self.rustc_threads).await; - - let mut args = vec!["check", "--workspace"]; - if all_targets { - args.push("--all-targets"); - } - if all_features { - args.push("--all-features"); - } - args.extend_from_slice(&["--keep-going", "--message-format=json"]); - - command - .args(args) - .env("CARGO_TARGET_DIR", &target_dir) - .env_remove("RUSTC_WRAPPER") - .current_dir(&self.path) - .stdout(std::process::Stdio::piped()) - .kill_on_drop(true); - - if is_cache() { - set_cache_path(&mut command, target_dir); - } - - if log::max_level() - .to_level() - .map(|v| v < log::Level::Info) - .unwrap_or(true) - { - command.stderr(std::process::Stdio::null()); - } - - let package_count = metadata.packages.len(); - - log::info!("start analyzing package {package_name}"); - let mut child = command.spawn().unwrap(); - let mut stdout = BufReader::new(child.stdout.take().unwrap()).lines(); - - let (sender, receiver) = mpsc::channel(1024); - let notify = Arc::new(Notify::new()); - let notify_c = notify.clone(); - let _handle = tokio::spawn(async move { - // prevent command from dropped - while let Ok(Some(line)) = stdout.next_line().await { - if let Ok(CargoCheckMessage::CompilerArtifact { target }) = - serde_json::from_str(&line) - { - let checked = target.name; - log::info!("crate {checked} checked"); - - let event = AnalyzerEvent::CrateChecked { - package: checked, - package_count, - }; - let _ = sender.send(event).await; - } - if let Ok(ws) = serde_json::from_str::(&line) { - let event = AnalyzerEvent::Analyzed(ws); - let _ = sender.send(event).await; - } - } - log::info!("stdout closed"); - notify_c.notify_one(); - }); - - AnalyzeEventIter { - receiver, - notify, - child, - } - } - - async fn analyze_single_file(&self, path: &Path) -> AnalyzeEventIter { - let sysroot = toolchain::get_sysroot().await; - let rustowlc_path = toolchain::get_executable_path("rustowlc").await; - - let mut command = process::Command::new(&rustowlc_path); - command - .arg(&rustowlc_path) // rustowlc triggers when first arg is the path of itself - .arg(format!("--sysroot={}", sysroot.display())) - .arg("--crate-type=lib"); - #[cfg(unix)] - command.arg("-o/dev/null"); - #[cfg(windows)] - command.arg("-oNUL"); - command - .arg(path) - .stdout(std::process::Stdio::piped()) - .kill_on_drop(true); - - toolchain::set_rustc_env(&mut command, &sysroot); - - if log::max_level() - .to_level() - .map(|v| v < log::Level::Info) - .unwrap_or(true) - { - command.stderr(std::process::Stdio::null()); - } - - log::info!("start analyzing {}", path.display()); - let mut child = command.spawn().unwrap(); - let mut stdout = BufReader::new(child.stdout.take().unwrap()).lines(); - - let (sender, receiver) = mpsc::channel(1024); - let notify = Arc::new(Notify::new()); - let notify_c = notify.clone(); - let _handle = tokio::spawn(async move { - // prevent command from dropped - while let Ok(Some(line)) = stdout.next_line().await { - if let Ok(ws) = serde_json::from_str::(&line) { - let event = AnalyzerEvent::Analyzed(ws); - let _ = sender.send(event).await; - } - } - log::info!("stdout closed"); - notify_c.notify_one(); - }); - - AnalyzeEventIter { - receiver, - notify, - child, - } - } -} - -pub struct AnalyzeEventIter { - receiver: mpsc::Receiver, - notify: Arc, - #[allow(unused)] - child: process::Child, -} -impl AnalyzeEventIter { - pub async fn next_event(&mut self) -> Option { - tokio::select! { - v = self.receiver.recv() => v, - _ = self.notify.notified() => None, - } - } -} diff --git a/src/lsp/backend.rs b/src/lsp/backend.rs deleted file mode 100644 index e9ec850b..00000000 --- a/src/lsp/backend.rs +++ /dev/null @@ -1,390 +0,0 @@ -use super::analyze::*; -use crate::{lsp::*, models::*, utils}; -use std::collections::BTreeMap; -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use tokio::{sync::RwLock, task::JoinSet}; -use tokio_util::sync::CancellationToken; -use tower_lsp::jsonrpc; -use tower_lsp::lsp_types; -use tower_lsp::{Client, LanguageServer, LspService}; - -#[derive(serde::Deserialize, Clone, Debug)] -#[serde(rename_all = "snake_case")] -pub struct AnalyzeRequest {} -#[derive(serde::Serialize, Clone, Debug)] -pub struct AnalyzeResponse {} - -/// RustOwl LSP server backend -pub struct Backend { - #[allow(unused)] - client: Client, - analyzers: Arc>>, - status: Arc>, - analyzed: Arc>>, - processes: Arc>>, - process_tokens: Arc>>, - work_done_progress: Arc>, - rustc_thread: usize, -} - -impl Backend { - pub fn new(rustc_thread: usize) -> impl Fn(Client) -> Self { - move |client: Client| Self { - client, - analyzers: Arc::new(RwLock::new(Vec::new())), - analyzed: Arc::new(RwLock::new(None)), - status: Arc::new(RwLock::new(progress::AnalysisStatus::Finished)), - processes: Arc::new(RwLock::new(JoinSet::new())), - process_tokens: Arc::new(RwLock::new(BTreeMap::new())), - work_done_progress: Arc::new(RwLock::new(false)), - rustc_thread, - } - } - - async fn add_analyze_target(&self, path: &Path) -> bool { - if let Ok(new_analyzer) = Analyzer::new(&path, self.rustc_thread).await { - let mut analyzers = self.analyzers.write().await; - for analyzer in &*analyzers { - if analyzer.target_path() == new_analyzer.target_path() { - return true; - } - } - analyzers.push(new_analyzer); - true - } else { - false - } - } - - pub async fn analyze(&self, _params: AnalyzeRequest) -> jsonrpc::Result { - log::info!("rustowl/analyze request received"); - self.do_analyze().await; - Ok(AnalyzeResponse {}) - } - async fn do_analyze(&self) { - self.shutdown_subprocesses().await; - self.analyze_with_options(false, false).await; - } - - async fn analyze_with_options(&self, all_targets: bool, all_features: bool) { - log::info!("wait 100ms for rust-analyzer"); - tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; - - log::info!("stop running analysis processes"); - self.shutdown_subprocesses().await; - - log::info!("start analysis"); - { - *self.status.write().await = progress::AnalysisStatus::Analyzing; - } - let analyzers = { self.analyzers.read().await.clone() }; - - log::info!("analyze {} packages...", analyzers.len()); - for analyzer in analyzers { - let analyzed = self.analyzed.clone(); - let client = self.client.clone(); - let work_done_progress = self.work_done_progress.clone(); - let cancellation_token = CancellationToken::new(); - - let cancellation_token_key = { - let token = cancellation_token.clone(); - let mut tokens = self.process_tokens.write().await; - let key = if let Some(key) = tokens.last_entry().map(|v| *v.key()) { - key + 1 - } else { - 1 - }; - tokens.insert(key, token); - key - }; - - let process_tokens = self.process_tokens.clone(); - self.processes.write().await.spawn(async move { - let mut progress_token = None; - if *work_done_progress.read().await { - progress_token = - Some(progress::ProgressToken::begin(client, None::<&str>).await) - }; - - let mut iter = analyzer.analyze(all_targets, all_features).await; - let mut analyzed_package_count = 0; - while let Some(event) = tokio::select! { - _ = cancellation_token.cancelled() => None, - event = iter.next_event() => event, - } { - match event { - AnalyzerEvent::CrateChecked { - package, - package_count, - } => { - analyzed_package_count += 1; - if let Some(token) = &progress_token { - let percentage = - (analyzed_package_count * 100 / package_count).min(100); - token - .report( - Some(format!("{package} analyzed")), - Some(percentage as u32), - ) - .await; - } - } - AnalyzerEvent::Analyzed(ws) => { - let write = &mut *analyzed.write().await; - for krate in ws.0.into_values() { - if let Some(write) = write { - write.merge(krate); - } else { - *write = Some(krate); - } - } - } - } - } - // remove cancellation token from list - process_tokens.write().await.remove(&cancellation_token_key); - - if let Some(progress_token) = progress_token { - progress_token.finish().await; - } - }); - } - - let processes = self.processes.clone(); - let status = self.status.clone(); - let analyzed = self.analyzed.clone(); - tokio::spawn(async move { - while { processes.write().await.join_next().await }.is_some() {} - let mut status = status.write().await; - let analyzed = analyzed.write().await; - if *status != progress::AnalysisStatus::Error { - if analyzed.as_ref().map(|v| v.0.len()).unwrap_or(0) == 0 { - *status = progress::AnalysisStatus::Error; - } else { - *status = progress::AnalysisStatus::Finished; - } - } - }); - } - - async fn decos( - &self, - filepath: &Path, - position: Loc, - ) -> Result, progress::AnalysisStatus> { - let mut selected = decoration::SelectLocal::new(position); - let mut error = progress::AnalysisStatus::Error; - if let Some(analyzed) = &*self.analyzed.read().await { - for (filename, file) in analyzed.0.iter() { - if filepath == PathBuf::from(filename) { - if !file.items.is_empty() { - error = progress::AnalysisStatus::Finished; - } - for item in &file.items { - utils::mir_visit(item, &mut selected); - } - } - } - - let mut calc = decoration::CalcDecos::new(selected.selected().iter().copied()); - for (filename, file) in analyzed.0.iter() { - if filepath == PathBuf::from(filename) { - for item in &file.items { - utils::mir_visit(item, &mut calc); - } - } - } - calc.handle_overlapping(); - let decos = calc.decorations(); - if !decos.is_empty() { - Ok(decos) - } else { - Err(error) - } - } else { - Err(error) - } - } - - pub async fn cursor( - &self, - params: decoration::CursorRequest, - ) -> jsonrpc::Result { - let is_analyzed = self.analyzed.read().await.is_some(); - let status = *self.status.read().await; - if let Some(path) = params.path() - && let Ok(text) = std::fs::read_to_string(&path) - { - let position = params.position(); - let pos = Loc(utils::line_char_to_index( - &text, - position.line, - position.character, - )); - let (decos, status) = match self.decos(&path, pos).await { - Ok(v) => (v, status), - Err(e) => ( - Vec::new(), - if status == progress::AnalysisStatus::Finished { - e - } else { - status - }, - ), - }; - let decorations = decos.into_iter().map(|v| v.to_lsp_range(&text)).collect(); - return Ok(decoration::Decorations { - is_analyzed, - status, - path: Some(path), - decorations, - }); - } - Ok(decoration::Decorations { - is_analyzed, - status, - path: None, - decorations: Vec::new(), - }) - } - - pub async fn check(path: impl AsRef, rustc_thread: usize) -> bool { - Self::check_with_options(path, false, false, rustc_thread).await - } - - pub async fn check_with_options( - path: impl AsRef, - all_targets: bool, - all_features: bool, - rustc_thread: usize, - ) -> bool { - let path = path.as_ref(); - let (service, _) = LspService::build(Backend::new(rustc_thread)).finish(); - let backend = service.inner(); - - if backend.add_analyze_target(path).await { - backend - .analyze_with_options(all_targets, all_features) - .await; - while backend.processes.write().await.join_next().await.is_some() {} - backend - .analyzed - .read() - .await - .as_ref() - .map(|v| !v.0.is_empty()) - .unwrap_or(false) - } else { - false - } - } - - pub async fn shutdown_subprocesses(&self) { - { - let mut tokens = self.process_tokens.write().await; - while let Some((_, token)) = tokens.pop_last() { - token.cancel(); - } - } - self.processes.write().await.shutdown().await; - } -} - -#[tower_lsp::async_trait] -impl LanguageServer for Backend { - async fn initialize( - &self, - params: lsp_types::InitializeParams, - ) -> jsonrpc::Result { - let mut workspaces = Vec::new(); - if let Some(root) = params.root_uri - && let Ok(path) = root.to_file_path() - { - workspaces.push(path); - } - if let Some(wss) = params.workspace_folders { - workspaces.extend(wss.iter().filter_map(|v| v.uri.to_file_path().ok())); - } - for path in workspaces { - self.add_analyze_target(&path).await; - } - self.do_analyze().await; - - let sync_options = lsp_types::TextDocumentSyncOptions { - open_close: Some(true), - save: Some(lsp_types::TextDocumentSyncSaveOptions::Supported(true)), - change: Some(lsp_types::TextDocumentSyncKind::INCREMENTAL), - ..Default::default() - }; - let workspace_cap = lsp_types::WorkspaceServerCapabilities { - workspace_folders: Some(lsp_types::WorkspaceFoldersServerCapabilities { - supported: Some(true), - change_notifications: Some(lsp_types::OneOf::Left(true)), - }), - ..Default::default() - }; - let server_cap = lsp_types::ServerCapabilities { - text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Options(sync_options)), - workspace: Some(workspace_cap), - ..Default::default() - }; - let init_res = lsp_types::InitializeResult { - capabilities: server_cap, - ..Default::default() - }; - let health_checker = async move { - if let Some(process_id) = params.process_id { - loop { - tokio::time::sleep(tokio::time::Duration::from_secs(30)).await; - if !process_alive::state(process_alive::Pid::from(process_id)).is_alive() { - panic!("The client process is dead"); - } - } - } - }; - if params - .capabilities - .window - .and_then(|v| v.work_done_progress) - .unwrap_or(false) - { - *self.work_done_progress.write().await = true; - } - tokio::spawn(health_checker); - Ok(init_res) - } - - async fn did_change_workspace_folders( - &self, - params: lsp_types::DidChangeWorkspaceFoldersParams, - ) -> () { - for added in params.event.added { - if let Ok(path) = added.uri.to_file_path() - && self.add_analyze_target(&path).await - { - self.do_analyze().await; - } - } - } - - async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { - if let Ok(path) = params.text_document.uri.to_file_path() - && path.is_file() - && params.text_document.language_id == "rust" - && self.add_analyze_target(&path).await - { - self.do_analyze().await; - } - } - - async fn did_change(&self, _params: lsp_types::DidChangeTextDocumentParams) { - *self.analyzed.write().await = None; - self.shutdown_subprocesses().await; - } - - async fn shutdown(&self) -> jsonrpc::Result<()> { - self.shutdown_subprocesses().await; - Ok(()) - } -} diff --git a/src/lsp/progress.rs b/src/lsp/progress.rs deleted file mode 100644 index cf2e7108..00000000 --- a/src/lsp/progress.rs +++ /dev/null @@ -1,97 +0,0 @@ -use serde::Serialize; -use tower_lsp::{Client, lsp_types}; - -#[derive(Serialize, Clone, Copy, PartialEq, Eq, Debug)] -#[serde(rename_all = "snake_case")] -pub enum AnalysisStatus { - Analyzing, - Finished, - Error, -} - -pub struct ProgressToken { - client: Option, - token: Option, -} -impl ProgressToken { - pub async fn begin(client: Client, message: Option) -> Self { - let token = lsp_types::NumberOrString::String(format!("{}", uuid::Uuid::new_v4())); - client - .send_request::( - lsp_types::WorkDoneProgressCreateParams { - token: token.clone(), - }, - ) - .await - .ok(); - - let value = lsp_types::ProgressParamsValue::WorkDone(lsp_types::WorkDoneProgress::Begin( - lsp_types::WorkDoneProgressBegin { - title: "RustOwl".to_owned(), - cancellable: Some(false), - message: message.map(|v| v.to_string()), - percentage: Some(0), - }, - )); - client - .send_notification::(lsp_types::ProgressParams { - token: token.clone(), - value, - }) - .await; - - Self { - client: Some(client), - token: Some(token), - } - } - - pub async fn report(&self, message: Option, percentage: Option) { - if let (Some(client), Some(token)) = (self.client.clone(), self.token.clone()) { - let value = lsp_types::ProgressParamsValue::WorkDone( - lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { - cancellable: Some(false), - message: message.map(|v| v.to_string()), - percentage, - }), - ); - client - .send_notification::(lsp_types::ProgressParams { - token, - value, - }) - .await; - } - } - - pub async fn finish(mut self) { - let value = lsp_types::ProgressParamsValue::WorkDone(lsp_types::WorkDoneProgress::End( - lsp_types::WorkDoneProgressEnd { message: None }, - )); - if let (Some(client), Some(token)) = (self.client.take(), self.token.take()) { - client - .send_notification::(lsp_types::ProgressParams { - token, - value, - }) - .await; - } - } -} - -impl Drop for ProgressToken { - fn drop(&mut self) { - let value = lsp_types::ProgressParamsValue::WorkDone(lsp_types::WorkDoneProgress::End( - lsp_types::WorkDoneProgressEnd { message: None }, - )); - if let (Some(client), Some(token)) = (self.client.take(), self.token.take()) { - tokio::spawn(async move { - client - .send_notification::( - lsp_types::ProgressParams { token, value }, - ) - .await; - }); - } - } -} diff --git a/src/miri_tests.rs b/src/miri_tests.rs deleted file mode 100644 index 6ab58812..00000000 --- a/src/miri_tests.rs +++ /dev/null @@ -1,359 +0,0 @@ -//! # Miri Memory Safety Tests -//! -//! This module contains tests specifically designed to run under Miri -//! to validate memory safety and undefined behavior detection in RustOwl's core functionality. -//! -//! These tests avoid external dependencies and process spawning that Miri doesn't support, -//! focusing on pure Rust code paths that can be fully analyzed for memory safety. -//! -//! ## What These Tests Cover: -//! -//! ### Core Data Models & Memory Safety: -//! - **Loc arithmetic**: Position tracking with overflow/underflow protection -//! - **Range validation**: Bounds checking and edge case handling -//! - **FnLocal operations**: Hash map usage and equality checks -//! - **File model**: Vector operations and memory management -//! - **Workspace/Crate hierarchy**: Complex nested HashMap operations -//! - **MirVariable variants**: Enum handling and pattern matching -//! - **Function structures**: Complex nested data structure operations -//! -//! ### Memory Management Patterns: -//! - **String handling**: Unicode support and concatenation safety -//! - **Collection operations**: HashMap/Vector operations with complex nesting -//! - **Clone operations**: Deep copying of complex structures -//! - **Serialization structures**: Data integrity for serde-compatible types -//! - **Capacity management**: Pre-allocation and memory growth patterns -//! -//! ### What Miri Validates: -//! - No use-after-free bugs -//! - No buffer overflows/underflows -//! - No uninitialized memory access -//! - No data races (in single-threaded context) -//! - Proper pointer provenance -//! - Memory leak detection -//! - Undefined behavior in arithmetic operations -//! -//! ## Limitations: -//! These tests cannot cover RustOwl functionality that requires: -//! - Process spawning (cargo metadata calls) -//! - File system operations -//! - Network operations -//! - External tool integration -//! -//! However, they thoroughly validate the core algorithms and data structures -//! that form the foundation of RustOwl's analysis capabilities. -//! -//! ## Usage: -//! ```bash -//! MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-permissive-provenance" cargo miri test --lib -//! ``` - -#[cfg(test)] -mod miri_memory_safety_tests { - use crate::models::*; - use std::collections::HashMap; - - #[test] - fn test_loc_arithmetic_memory_safety() { - // Test Loc model creation and arithmetic operations for memory safety - let loc = Loc::new("test string with unicode 🦀", 5, 0); - let loc2 = loc + 2; - let loc3 = loc2 - 1; - - // Test arithmetic operations don't cause memory issues - assert_eq!(loc3.0, loc.0 + 1); - - // Test boundary conditions - let loc_zero = Loc(0); - let loc_underflow = loc_zero - 10; // Should saturate to 0 - assert_eq!(loc_underflow.0, 0); - - // Test large values (but avoid overflow) - let loc_large = Loc(u32::MAX - 10); - let loc_add = loc_large + 5; // Safe addition - assert_eq!(loc_add.0, u32::MAX - 5); - } - - #[test] - fn test_range_creation_and_validation() { - // Test Range creation with various scenarios - let valid_range = Range::new(Loc(0), Loc(10)).unwrap(); - assert_eq!(valid_range.from().0, 0); - assert_eq!(valid_range.until().0, 10); - assert_eq!(valid_range.size(), 10); - - // Test invalid range (until <= from) - let invalid_range = Range::new(Loc(10), Loc(5)); - assert!(invalid_range.is_none()); - - // Test edge case: same positions - let same_pos_range = Range::new(Loc(5), Loc(5)); - assert!(same_pos_range.is_none()); - - // Test large ranges - let large_range = Range::new(Loc(0), Loc(u32::MAX)).unwrap(); - assert_eq!(large_range.size(), u32::MAX); - } - - #[test] - fn test_fn_local_operations() { - // Test FnLocal model creation and operations - let fn_local1 = FnLocal::new(42, 100); - let fn_local2 = FnLocal::new(43, 100); - let fn_local3 = FnLocal::new(42, 100); - - // Test equality and inequality - assert_eq!(fn_local1, fn_local3); - assert_ne!(fn_local1, fn_local2); - - // Test hashing (via HashMap insertion) - let mut map = HashMap::new(); - map.insert(fn_local1, "first"); - map.insert(fn_local2, "second"); - map.insert(fn_local3, "third"); // Should overwrite first - - assert_eq!(map.len(), 2); - assert_eq!(map.get(&fn_local1), Some(&"third")); - assert_eq!(map.get(&fn_local2), Some(&"second")); - } - - #[test] - fn test_file_model_operations() { - // Test File model with various operations - let mut file = File { items: Vec::new() }; - - // Test vector operations - assert_eq!(file.items.len(), 0); - assert!(file.items.is_empty()); - - // Test vector capacity and memory management - file.items.reserve(1000); - assert!(file.items.capacity() >= 1000); - - // Test cloning (deep copy) - let file_clone = file.clone(); - assert_eq!(file.items.len(), file_clone.items.len()); - } - - #[test] - fn test_workspace_operations() { - // Test Workspace and Crate models - let mut workspace = Workspace(HashMap::new()); - let mut crate1 = Crate(HashMap::new()); - let mut crate2 = Crate(HashMap::new()); - - // Add some files to crates - crate1 - .0 - .insert("lib.rs".to_string(), File { items: Vec::new() }); - crate1 - .0 - .insert("main.rs".to_string(), File { items: Vec::new() }); - - crate2 - .0 - .insert("helper.rs".to_string(), File { items: Vec::new() }); - - // Add crates to workspace - workspace.0.insert("crate1".to_string(), crate1); - workspace.0.insert("crate2".to_string(), crate2); - - assert_eq!(workspace.0.len(), 2); - assert!(workspace.0.contains_key("crate1")); - assert!(workspace.0.contains_key("crate2")); - - // Test workspace merging - let mut other_workspace = Workspace(HashMap::new()); - let crate3 = Crate(HashMap::new()); - other_workspace.0.insert("crate3".to_string(), crate3); - - workspace.merge(other_workspace); - assert_eq!(workspace.0.len(), 3); - assert!(workspace.0.contains_key("crate3")); - } - - #[test] - fn test_mir_variables_operations() { - // Test MirVariables collection operations - let mut mir_vars = MirVariables::new(); - - // Test creation of MirVariable variants - let user_var = MirVariable::User { - index: 1, - live: Range::new(Loc(0), Loc(10)).unwrap(), - dead: Range::new(Loc(10), Loc(20)).unwrap(), - }; - - let other_var = MirVariable::Other { - index: 2, - live: Range::new(Loc(5), Loc(15)).unwrap(), - dead: Range::new(Loc(15), Loc(25)).unwrap(), - }; - - // Test insertion using push method - mir_vars.push(user_var); - mir_vars.push(other_var); - - // Test converting to vector - let vars_vec = mir_vars.clone().to_vec(); - assert_eq!(vars_vec.len(), 2); - - // Test that we can find our variables - let has_user_var = vars_vec - .iter() - .any(|v| matches!(v, MirVariable::User { index: 1, .. })); - let has_other_var = vars_vec - .iter() - .any(|v| matches!(v, MirVariable::Other { index: 2, .. })); - - assert!(has_user_var); - assert!(has_other_var); - - // Test duplicate insertion (should not duplicate) - mir_vars.push(user_var); - let final_vec = mir_vars.to_vec(); - assert_eq!(final_vec.len(), 2); // Still 2, not 3 - } - - #[test] - fn test_function_model_complex_operations() { - // Test Function model with complex nested structures - let function = Function { - fn_id: 42, - basic_blocks: Vec::new(), - decls: Vec::new(), - }; - - // Test cloning of complex nested structures - let function_clone = function.clone(); - assert_eq!(function.fn_id, function_clone.fn_id); - assert_eq!( - function.basic_blocks.len(), - function_clone.basic_blocks.len() - ); - assert_eq!(function.decls.len(), function_clone.decls.len()); - - // Test memory layout and alignment - let function_size = std::mem::size_of::(); - assert!(function_size > 0); - - // Test that we can create multiple instances without memory issues - let mut functions = Vec::new(); - for i in 0..100 { - functions.push(Function { - fn_id: i, - basic_blocks: Vec::new(), - decls: Vec::new(), - }); - } - - assert_eq!(functions.len(), 100); - assert_eq!(functions[50].fn_id, 50); - - // Test vector capacity management - let large_function = Function { - fn_id: 999, - basic_blocks: Vec::with_capacity(1000), - decls: Vec::with_capacity(500), - }; - - assert!(large_function.basic_blocks.capacity() >= 1000); - assert!(large_function.decls.capacity() >= 500); - } - - #[test] - fn test_string_handling_memory_safety() { - // Test string operations that could cause memory issues - let mut strings = Vec::new(); - - // Test various string operations - for i in 0..50 { - let s = format!("test_string_{i}"); - strings.push(s); - } - - // Test string concatenation - let mut concatenated = String::new(); - for s in &strings { - concatenated.push_str(s); - concatenated.push(' '); - } - - assert!(!concatenated.is_empty()); - - // Test unicode handling - let unicode_string = "🦀 Rust 🔥 Memory Safety 🛡️".to_string(); - let _file = File { items: Vec::new() }; - - // Ensure unicode doesn't cause memory issues - assert!(unicode_string.len() > unicode_string.chars().count()); - } - - #[test] - fn test_collections_memory_safety() { - // Test various collection operations for memory safety - let mut map: HashMap> = HashMap::new(); - - // Insert data with complex nesting - for i in 0..20 { - let key = format!("key_{i}"); - let mut vec = Vec::new(); - - for j in 0..5 { - vec.push(FnLocal::new(j, i)); - } - - map.insert(key, vec); - } - - assert_eq!(map.len(), 20); - - // Test iteration and borrowing - for (key, vec) in &map { - assert!(key.starts_with("key_")); - assert_eq!(vec.len(), 5); - - for fn_local in vec { - assert!(fn_local.id < 5); - assert!(fn_local.fn_id < 20); - } - } - - // Test modification during iteration (using drain) - let mut keys_to_remove = Vec::new(); - for key in map.keys() { - if key.ends_with("_1") || key.ends_with("_2") { - keys_to_remove.push(key.clone()); - } - } - - for key in keys_to_remove { - map.remove(&key); - } - - assert_eq!(map.len(), 18); // 20 - 2 - } - - #[test] - fn test_serialization_structures() { - // Test that our serializable structures don't have memory issues - // when working with the underlying data (without actual serialization) - - let range = Range::new(Loc(10), Loc(20)).unwrap(); - let fn_local = FnLocal::new(1, 2); - - // Test that Clone and PartialEq work correctly - let range_clone = range; - let fn_local_clone = fn_local; - - assert_eq!(range, range_clone); - assert_eq!(fn_local, fn_local_clone); - - // Test Debug formatting (without actually printing) - let debug_string = format!("{range:?}"); - assert!(debug_string.contains("Range")); - - let debug_fn_local = format!("{fn_local:?}"); - assert!(debug_fn_local.contains("FnLocal")); - } -} diff --git a/src/models.rs b/src/models.rs deleted file mode 100644 index 8c023c89..00000000 --- a/src/models.rs +++ /dev/null @@ -1,307 +0,0 @@ -#![allow(unused)] - -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub struct FnLocal { - pub id: u32, - pub fn_id: u32, -} - -impl FnLocal { - pub fn new(id: u32, fn_id: u32) -> Self { - Self { id, fn_id } - } -} - -#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] -#[serde(transparent)] -pub struct Loc(pub u32); -impl Loc { - pub fn new(source: &str, byte_pos: u32, offset: u32) -> Self { - let byte_pos = byte_pos.saturating_sub(offset); - // it seems that the compiler is ignoring CR - let source_clean = source.replace("\r", ""); - - // Convert byte position to character position safely - if source_clean.len() < byte_pos as usize { - return Self(source_clean.chars().count() as u32); - } - - // Find the character index corresponding to the byte position - match source_clean - .char_indices() - .position(|(byte_idx, _)| (byte_pos as usize) <= byte_idx) - { - Some(char_idx) => Self(char_idx as u32), - None => Self(source_clean.chars().count() as u32), - } - } -} - -impl std::ops::Add for Loc { - type Output = Loc; - fn add(self, rhs: i32) -> Self::Output { - if rhs < 0 && (self.0 as i32) < -rhs { - Loc(0) - } else { - Loc(self.0 + rhs as u32) - } - } -} - -impl std::ops::Sub for Loc { - type Output = Loc; - fn sub(self, rhs: i32) -> Self::Output { - if 0 < rhs && (self.0 as i32) < rhs { - Loc(0) - } else { - Loc(self.0 - rhs as u32) - } - } -} - -impl From for Loc { - fn from(value: u32) -> Self { - Self(value) - } -} - -impl From for u32 { - fn from(value: Loc) -> Self { - value.0 - } -} - -#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] -pub struct Range { - from: Loc, - until: Loc, -} - -impl Range { - pub fn new(from: Loc, until: Loc) -> Option { - if until.0 <= from.0 { - None - } else { - Some(Self { from, until }) - } - } - pub fn from(&self) -> Loc { - self.from - } - pub fn until(&self) -> Loc { - self.until - } - pub fn size(&self) -> u32 { - self.until.0 - self.from.0 - } -} - -#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum MirVariable { - User { - index: u32, - live: Range, - dead: Range, - }, - Other { - index: u32, - live: Range, - dead: Range, - }, -} - -#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)] -#[serde(transparent)] -pub struct MirVariables(HashMap); - -impl Default for MirVariables { - fn default() -> Self { - Self::new() - } -} - -impl MirVariables { - pub fn new() -> Self { - Self(HashMap::new()) - } - - pub fn push(&mut self, var: MirVariable) { - match &var { - MirVariable::User { index, .. } => { - if !self.0.contains_key(index) { - self.0.insert(*index, var); - } - } - MirVariable::Other { index, .. } => { - if !self.0.contains_key(index) { - self.0.insert(*index, var); - } - } - } - } - - pub fn to_vec(self) -> Vec { - self.0.into_values().collect() - } -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum Item { - Function { span: Range, mir: Function }, -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct File { - pub items: Vec, -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(transparent)] -pub struct Workspace(pub HashMap); - -impl Workspace { - pub fn merge(&mut self, other: Self) { - let Workspace(crates) = other; - for (name, krate) in crates { - if let Some(insert) = self.0.get_mut(&name) { - insert.merge(krate); - } else { - self.0.insert(name, krate); - } - } - } -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(transparent)] -pub struct Crate(pub HashMap); - -impl Crate { - pub fn merge(&mut self, other: Self) { - let Crate(files) = other; - for (file, mir) in files { - if let Some(insert) = self.0.get_mut(&file) { - insert.items.extend_from_slice(&mir.items); - insert.items.dedup_by(|a, b| a.fn_id == b.fn_id); - } else { - self.0.insert(file, mir); - } - } - } -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum MirRval { - Move { - target_local: FnLocal, - range: Range, - }, - Borrow { - target_local: FnLocal, - range: Range, - mutable: bool, - outlive: Option, - }, -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum MirStatement { - StorageLive { - target_local: FnLocal, - range: Range, - }, - StorageDead { - target_local: FnLocal, - range: Range, - }, - Assign { - target_local: FnLocal, - range: Range, - rval: Option, - }, - Other { - range: Range, - }, -} -impl MirStatement { - pub fn range(&self) -> Range { - match self { - Self::StorageLive { range, .. } => *range, - Self::StorageDead { range, .. } => *range, - Self::Assign { range, .. } => *range, - Self::Other { range } => *range, - } - } -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum MirTerminator { - Drop { - local: FnLocal, - range: Range, - }, - Call { - destination_local: FnLocal, - fn_span: Range, - }, - Other { - range: Range, - }, -} -impl MirTerminator { - pub fn range(&self) -> Range { - match self { - Self::Drop { range, .. } => *range, - Self::Call { fn_span, .. } => *fn_span, - Self::Other { range } => *range, - } - } -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct MirBasicBlock { - pub statements: Vec, - pub terminator: Option, -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -#[serde(tag = "type", rename_all = "snake_case")] -pub enum MirDecl { - User { - local: FnLocal, - name: String, - span: Range, - ty: String, - lives: Vec, - shared_borrow: Vec, - mutable_borrow: Vec, - drop: bool, - drop_range: Vec, - must_live_at: Vec, - }, - Other { - local: FnLocal, - ty: String, - lives: Vec, - shared_borrow: Vec, - mutable_borrow: Vec, - drop: bool, - drop_range: Vec, - must_live_at: Vec, - }, -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct Function { - pub fn_id: u32, - pub basic_blocks: Vec, - pub decls: Vec, -} diff --git a/src/shells.rs b/src/shells.rs deleted file mode 100644 index 0688786b..00000000 --- a/src/shells.rs +++ /dev/null @@ -1,139 +0,0 @@ -use clap_complete_nushell::Nushell; - -use std::fmt::Display; -use std::path::Path; -use std::str::FromStr; - -use clap::ValueEnum; - -use clap_complete::Generator; -use clap_complete::shells; - -/// Shell with auto-generated completion script available. -#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, ValueEnum)] -#[non_exhaustive] -#[value(rename_all = "lower")] -pub enum Shell { - /// Bourne Again `SHell` (bash) - Bash, - /// Elvish shell - Elvish, - /// Friendly Interactive `SHell` (fish) - Fish, - /// `PowerShell` - PowerShell, - /// Z `SHell` (zsh) - Zsh, - /// Nushell - Nushell, -} - -impl Display for Shell { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.to_possible_value() - .expect("no values are skipped") - .get_name() - .fmt(f) - } -} - -impl FromStr for Shell { - type Err = String; - - fn from_str(s: &str) -> Result { - for variant in Self::value_variants() { - if variant.to_possible_value().unwrap().matches(s, false) { - return Ok(*variant); - } - } - Err(format!("invalid variant: {s}")) - } -} - -impl Generator for Shell { - fn file_name(&self, name: &str) -> String { - match self { - Shell::Bash => shells::Bash.file_name(name), - Shell::Elvish => shells::Elvish.file_name(name), - Shell::Fish => shells::Fish.file_name(name), - Shell::PowerShell => shells::PowerShell.file_name(name), - Shell::Zsh => shells::Zsh.file_name(name), - Shell::Nushell => Nushell.file_name(name), - } - } - - fn generate(&self, cmd: &clap::Command, buf: &mut dyn std::io::Write) { - match self { - Shell::Bash => shells::Bash.generate(cmd, buf), - Shell::Elvish => shells::Elvish.generate(cmd, buf), - Shell::Fish => shells::Fish.generate(cmd, buf), - Shell::PowerShell => shells::PowerShell.generate(cmd, buf), - Shell::Zsh => shells::Zsh.generate(cmd, buf), - Shell::Nushell => Nushell.generate(cmd, buf), - } - } -} - -impl Shell { - /// Parse a shell from a path to the executable for the shell - /// - /// # Examples - /// - /// ``` - /// use clap_complete::shells::Shell; - /// - /// assert_eq!(Shell::from_shell_path("/bin/bash"), Some(Shell::Bash)); - /// assert_eq!(Shell::from_shell_path("/usr/bin/zsh"), Some(Shell::Zsh)); - /// assert_eq!(Shell::from_shell_path("/opt/my_custom_shell"), None); - /// ``` - pub fn from_shell_path>(path: P) -> Option { - parse_shell_from_path(path.as_ref()) - } - - /// Determine the user's current shell from the environment - /// - /// This will read the SHELL environment variable and try to determine which shell is in use - /// from that. - /// - /// If SHELL is not set, then on windows, it will default to powershell, and on - /// other operating systems it will return `None`. - /// - /// If SHELL is set, but contains a value that doesn't correspond to one of the supported shell - /// types, then return `None`. - /// - /// # Example: - /// - /// ```no_run - /// # use clap::Command; - /// use clap_complete::{generate, shells::Shell}; - /// # fn build_cli() -> Command { - /// # Command::new("compl") - /// # } - /// let mut cmd = build_cli(); - /// generate(Shell::from_env().unwrap_or(Shell::Bash), &mut cmd, "myapp", &mut std::io::stdout()); - /// ``` - pub fn from_env() -> Option { - if let Some(env_shell) = std::env::var_os("SHELL") { - Shell::from_shell_path(env_shell) - } else if cfg!(windows) { - Some(Shell::PowerShell) - } else { - None - } - } -} - -// use a separate function to avoid having to monomorphize the entire function due -// to from_shell_path being generic -fn parse_shell_from_path(path: &Path) -> Option { - let name = path.file_stem()?.to_str()?; - match name { - "bash" => Some(Shell::Bash), - "zsh" => Some(Shell::Zsh), - "fish" => Some(Shell::Fish), - "elvish" => Some(Shell::Elvish), - "powershell" | "powershell_ise" => Some(Shell::PowerShell), - "nushell" => Some(Shell::Nushell), - _ => None, - } -} diff --git a/src/toolchain.rs b/src/toolchain.rs deleted file mode 100644 index d7fe5d69..00000000 --- a/src/toolchain.rs +++ /dev/null @@ -1,328 +0,0 @@ -use std::env; -use std::fs::read_dir; -use std::path::{Path, PathBuf}; -use std::sync::LazyLock; -use tokio::fs::{create_dir_all, read_to_string, remove_dir_all, rename}; - -use flate2::read::GzDecoder; -use tar::Archive; - -pub const TOOLCHAIN: &str = env!("RUSTOWL_TOOLCHAIN"); -pub const HOST_TUPLE: &str = env!("HOST_TUPLE"); -const TOOLCHAIN_CHANNEL: &str = env!("TOOLCHAIN_CHANNEL"); -const TOOLCHAIN_DATE: Option<&str> = option_env!("TOOLCHAIN_DATE"); - -pub static FALLBACK_RUNTIME_DIR: LazyLock = LazyLock::new(|| { - let opt = PathBuf::from("/opt/rustowl"); - if sysroot_from_runtime(&opt).is_dir() { - return opt; - } - let same = env::current_exe().unwrap().parent().unwrap().to_path_buf(); - if sysroot_from_runtime(&same).is_dir() { - return same; - } - env::home_dir().unwrap().join(".rustowl") -}); - -fn recursive_read_dir(path: impl AsRef) -> Vec { - let mut paths = Vec::new(); - if path.as_ref().is_dir() { - for entry in read_dir(&path).unwrap().flatten() { - let path = entry.path(); - if path.is_dir() { - paths.extend_from_slice(&recursive_read_dir(&path)); - } else { - paths.push(path); - } - } - } - paths -} - -pub fn sysroot_from_runtime(runtime: impl AsRef) -> PathBuf { - runtime.as_ref().join("sysroot").join(TOOLCHAIN) -} - -async fn get_runtime_dir() -> PathBuf { - let sysroot = sysroot_from_runtime(&*FALLBACK_RUNTIME_DIR); - if FALLBACK_RUNTIME_DIR.is_dir() && sysroot.is_dir() { - return FALLBACK_RUNTIME_DIR.clone(); - } - - log::info!("sysroot not found; start setup toolchain"); - if let Err(e) = setup_toolchain(&*FALLBACK_RUNTIME_DIR, false).await { - log::error!("{e:?}"); - std::process::exit(1); - } else { - FALLBACK_RUNTIME_DIR.clone() - } -} - -pub async fn get_sysroot() -> PathBuf { - sysroot_from_runtime(get_runtime_dir().await) -} - -async fn download(url: &str) -> Result, ()> { - log::info!("start downloading {url}..."); - let mut resp = match reqwest::get(url).await.and_then(|v| v.error_for_status()) { - Ok(v) => v, - Err(e) => { - log::error!("failed to download tarball"); - log::error!("{e:?}"); - return Err(()); - } - }; - - let content_length = resp.content_length().unwrap_or(200_000_000) as usize; - let mut data = Vec::with_capacity(content_length); - let mut received = 0; - while let Some(chunk) = match resp.chunk().await { - Ok(v) => v, - Err(e) => { - log::error!("failed to download runtime archive"); - log::error!("{e:?}"); - return Err(()); - } - } { - data.extend_from_slice(&chunk); - let current = data.len() * 100 / content_length; - if received != current { - received = current; - log::info!("{received:>3}% received"); - } - } - log::info!("download finished"); - Ok(data) -} -async fn download_tarball_and_extract(url: &str, dest: &Path) -> Result<(), ()> { - let data = download(url).await?; - let decoder = GzDecoder::new(&*data); - let mut archive = Archive::new(decoder); - archive.unpack(dest).map_err(|_| { - log::error!("failed to unpack tarball"); - })?; - log::info!("successfully unpacked"); - Ok(()) -} -#[cfg(target_os = "windows")] -async fn download_zip_and_extract(url: &str, dest: &Path) -> Result<(), ()> { - use zip::ZipArchive; - let data = download(url).await?; - let cursor = std::io::Cursor::new(&*data); - - let mut archive = match ZipArchive::new(cursor) { - Ok(archive) => archive, - Err(e) => { - log::error!("failed to read ZIP archive"); - log::error!("{e:?}"); - return Err(()); - } - }; - archive.extract(dest).map_err(|e| { - log::error!("failed to unpack zip: {e}"); - })?; - log::info!("successfully unpacked"); - Ok(()) -} - -async fn install_component(component: &str, dest: &Path) -> Result<(), ()> { - let tempdir = tempfile::tempdir().map_err(|_| ())?; - // Using `tempdir.path()` more than once causes SEGV, so we use `tempdir.path().to_owned()`. - let temp_path = tempdir.path().to_owned(); - log::info!("temp dir is made: {}", temp_path.display()); - - let dist_base = "https://static.rust-lang.org/dist"; - let base_url = match TOOLCHAIN_DATE { - Some(v) => format!("{dist_base}/{v}"), - None => dist_base.to_owned(), - }; - - let component_toolchain = format!("{component}-{TOOLCHAIN_CHANNEL}-{HOST_TUPLE}"); - let tarball_url = format!("{base_url}/{component_toolchain}.tar.gz"); - - download_tarball_and_extract(&tarball_url, &temp_path).await?; - - let extracted_path = temp_path.join(&component_toolchain); - let components = read_to_string(extracted_path.join("components")) - .await - .map_err(|_| { - log::error!("failed to read components list"); - })?; - let components = components.split_whitespace(); - - for component in components { - let component_path = extracted_path.join(component); - for from in recursive_read_dir(&component_path) { - let rel_path = match from.strip_prefix(&component_path) { - Ok(v) => v, - Err(e) => { - log::error!("path error: {e}"); - return Err(()); - } - }; - let to = dest.join(rel_path); - if let Err(e) = create_dir_all(to.parent().unwrap()).await { - log::error!("failed to create dir: {e}"); - return Err(()); - } - if let Err(e) = rename(&from, &to).await { - log::warn!("file rename failed: {e}, falling back to copy and delete"); - if let Err(copy_err) = tokio::fs::copy(&from, &to).await { - log::error!("file copy error (after rename failure): {copy_err}"); - return Err(()); - } - if let Err(del_err) = tokio::fs::remove_file(&from).await { - log::error!("file delete error (after copy): {del_err}"); - return Err(()); - } - } - } - log::info!("component {component} successfully installed"); - } - Ok(()) -} -pub async fn setup_toolchain(dest: impl AsRef, skip_rustowl: bool) -> Result<(), ()> { - setup_rust_toolchain(&dest).await?; - if !skip_rustowl { - setup_rustowl_toolchain(&dest).await?; - } - Ok(()) -} -pub async fn setup_rust_toolchain(dest: impl AsRef) -> Result<(), ()> { - let sysroot = sysroot_from_runtime(dest.as_ref()); - if create_dir_all(&sysroot).await.is_err() { - log::error!("failed to create toolchain directory"); - return Err(()); - } - - log::info!("start installing Rust toolchain..."); - install_component("rustc", &sysroot).await?; - install_component("rust-std", &sysroot).await?; - install_component("cargo", &sysroot).await?; - log::info!("installing Rust toolchain finished"); - Ok(()) -} -pub async fn setup_rustowl_toolchain(dest: impl AsRef) -> Result<(), ()> { - log::info!("start installing RustOwl toolchain..."); - #[cfg(not(target_os = "windows"))] - let rustowl_toolchain_result = { - let rustowl_tarball_url = format!( - "https://github.com/cordx56/rustowl/releases/download/v{}/rustowl-{HOST_TUPLE}.tar.gz", - clap::crate_version!(), - ); - download_tarball_and_extract(&rustowl_tarball_url, dest.as_ref()).await - }; - #[cfg(target_os = "windows")] - let rustowl_toolchain_result = { - let rustowl_zip_url = format!( - "https://github.com/cordx56/rustowl/releases/download/v{}/rustowl-{HOST_TUPLE}.zip", - clap::crate_version!(), - ); - download_zip_and_extract(&rustowl_zip_url, dest.as_ref()).await - }; - if rustowl_toolchain_result.is_ok() { - log::info!("installing RustOwl toolchain finished"); - } else { - log::warn!("could not install RustOwl toolchain; local installed rustowlc will be used"); - } - - log::info!("toolchain setup finished"); - Ok(()) -} - -pub async fn uninstall_toolchain() { - let sysroot = sysroot_from_runtime(&*FALLBACK_RUNTIME_DIR); - if sysroot.is_dir() { - log::info!("remove sysroot: {}", sysroot.display()); - remove_dir_all(&sysroot).await.unwrap(); - } -} - -pub async fn get_executable_path(name: &str) -> String { - #[cfg(not(windows))] - let exec_name = name.to_owned(); - #[cfg(windows)] - let exec_name = format!("{name}.exe"); - - let sysroot = get_sysroot().await; - let exec_bin = sysroot.join("bin").join(&exec_name); - if exec_bin.is_file() { - log::info!("{name} is selected in sysroot/bin"); - return exec_bin.to_string_lossy().to_string(); - } - - let mut current_exec = env::current_exe().unwrap(); - current_exec.set_file_name(&exec_name); - if current_exec.is_file() { - log::info!("{name} is selected in the same directory as rustowl executable"); - return current_exec.to_string_lossy().to_string(); - } - - log::warn!("{name} not found; fallback"); - exec_name.to_owned() -} - -pub async fn setup_cargo_command(rustc_threads: usize) -> tokio::process::Command { - let cargo = get_executable_path("cargo").await; - let mut command = tokio::process::Command::new(&cargo); - let rustowlc = get_executable_path("rustowlc").await; - - // check user set flags - let delimiter = 0x1f as char; - let rustflags = env::var("RUSTFLAGS") - .unwrap_or("".to_string()) - .split_whitespace() - .fold("".to_string(), |acc, x| format!("{acc}{delimiter}{x}")); - let mut encoded_flags = env::var("CARGO_ENCODED_RUSTFLAGS") - .map(|v| format!("{v}{delimiter}")) - .unwrap_or("".to_string()); - if 1 < rustc_threads { - encoded_flags = format!("-Z{delimiter}threads={rustc_threads}{delimiter}{encoded_flags}"); - } - - let sysroot = get_sysroot().await; - command - .env("RUSTC", &rustowlc) - .env("RUSTC_WORKSPACE_WRAPPER", &rustowlc) - .env( - "CARGO_ENCODED_RUSTFLAGS", - format!( - "{}--sysroot={}{}", - encoded_flags, - sysroot.display(), - rustflags - ), - ); - set_rustc_env(&mut command, &sysroot); - command -} - -pub fn set_rustc_env(command: &mut tokio::process::Command, sysroot: &Path) { - command.env("RUSTC_BOOTSTRAP", "1"); // Support nightly projects - - #[cfg(target_os = "linux")] - { - let mut paths = env::split_paths(&env::var("LD_LIBRARY_PATH").unwrap_or("".to_owned())) - .collect::>(); - paths.push_front(sysroot.join("lib")); - let paths = env::join_paths(paths).unwrap(); - command.env("LD_LIBRARY_PATH", paths); - } - #[cfg(target_os = "macos")] - { - let mut paths = - env::split_paths(&env::var("DYLD_FALLBACK_LIBRARY_PATH").unwrap_or("".to_owned())) - .collect::>(); - paths.push_front(sysroot.join("lib")); - let paths = env::join_paths(paths).unwrap(); - command.env("DYLD_FALLBACK_LIBRARY_PATH", paths); - } - #[cfg(target_os = "windows")] - { - let mut paths = env::split_paths(&env::var_os("Path").unwrap()) - .collect::>(); - paths.push_front(sysroot.join("bin")); - let paths = env::join_paths(paths).unwrap(); - command.env("Path", paths); - } -} diff --git a/src/utils.rs b/src/utils.rs deleted file mode 100644 index b8110037..00000000 --- a/src/utils.rs +++ /dev/null @@ -1,143 +0,0 @@ -use crate::models::*; - -pub fn is_super_range(r1: Range, r2: Range) -> bool { - (r1.from() < r2.from() && r2.until() <= r1.until()) - || (r1.from() <= r2.from() && r2.until() < r1.until()) -} - -pub fn common_range(r1: Range, r2: Range) -> Option { - if r2.from() < r1.from() { - return common_range(r2, r1); - } - if r1.until() < r2.from() { - return None; - } - let from = r2.from(); - let until = r1.until().min(r2.until()); - Range::new(from, until) -} - -pub fn common_ranges(ranges: &[Range]) -> Vec { - let mut common_ranges = Vec::new(); - for i in 0..ranges.len() { - for j in i + 1..ranges.len() { - if let Some(common) = common_range(ranges[i], ranges[j]) { - common_ranges.push(common); - } - } - } - eliminated_ranges(common_ranges) -} - -/// merge two ranges, result is superset of two ranges -pub fn merge_ranges(r1: Range, r2: Range) -> Option { - if common_range(r1, r2).is_some() || r1.until() == r2.from() || r2.until() == r1.from() { - let from = r1.from().min(r2.from()); - let until = r1.until().max(r2.until()); - Range::new(from, until) - } else { - None - } -} - -/// eliminate common ranges and flatten ranges -pub fn eliminated_ranges(mut ranges: Vec) -> Vec { - let mut i = 0; - 'outer: while i < ranges.len() { - let mut j = 0; - while j < ranges.len() { - if i != j - && let Some(merged) = merge_ranges(ranges[i], ranges[j]) - { - ranges[i] = merged; - ranges.remove(j); - continue 'outer; - } - j += 1; - } - i += 1; - } - ranges -} - -pub fn exclude_ranges(mut from: Vec, excludes: Vec) -> Vec { - let mut i = 0; - 'outer: while i < from.len() { - let mut j = 0; - while j < excludes.len() { - if let Some(common) = common_range(from[i], excludes[j]) { - if let Some(r) = Range::new(from[i].from(), common.from() - 1) { - from.push(r); - } - if let Some(r) = Range::new(common.until() + 1, from[i].until()) { - from.push(r); - } - from.remove(i); - continue 'outer; - } - j += 1; - } - i += 1; - } - eliminated_ranges(from) -} - -#[allow(unused)] -pub trait MirVisitor { - fn visit_func(&mut self, func: &Function) {} - fn visit_decl(&mut self, decl: &MirDecl) {} - fn visit_stmt(&mut self, stmt: &MirStatement) {} - fn visit_term(&mut self, term: &MirTerminator) {} -} -pub fn mir_visit(func: &Function, visitor: &mut impl MirVisitor) { - visitor.visit_func(func); - for decl in &func.decls { - visitor.visit_decl(decl); - } - for bb in &func.basic_blocks { - for stmt in &bb.statements { - visitor.visit_stmt(stmt); - } - if let Some(term) = &bb.terminator { - visitor.visit_term(term); - } - } -} - -pub fn index_to_line_char(s: &str, idx: Loc) -> (u32, u32) { - let mut line = 0; - let mut col = 0; - // it seems that the compiler is ignoring CR - for (i, c) in s.replace("\r", "").chars().enumerate() { - if idx == Loc::from(i as u32) { - return (line, col); - } - if c == '\n' { - line += 1; - col = 0; - } else if c != '\r' { - col += 1; - } - } - (0, 0) -} -pub fn line_char_to_index(s: &str, mut line: u32, char: u32) -> u32 { - let mut col = 0; - // it seems that the compiler is ignoring CR - for (i, c) in s.replace("\r", "").chars().enumerate() { - if line == 0 && col == char { - return i as u32; - } - if c == '\n' && 0 < line { - line -= 1; - col = 0; - } else if c != '\r' { - col += 1; - } - } - 0 -} - -pub fn get_default_parallel_count() -> usize { - num_cpus::get_physical() -}