Skip to content

rename: real_ladybug -> ladybug #881

rename: real_ladybug -> ladybug

rename: real_ladybug -> ladybug #881

Workflow file for this run

name: CI Workflow
on:
pull_request:
branches:
- main
push:
branches:
- main
workflow_dispatch:
inputs:
auto_checkpoint:
description: 'Database config - auto checkpoint'
required: false
default: true
type: boolean
buffer_pool_size:
description: 'Database config - buffer pool size'
required: false
type: number
max_num_threads:
description: 'Database config - max number of threads'
required: false
default: 2
type: number
enable_compression:
description: 'Database config - enable compression'
required: false
default: true
type: boolean
checkpoint_threshold:
description: 'Database config - checkpoint threshold'
required: false
default: 16777216
type: number
force_checkpoint_on_close:
description: "Database config - force checkpoint on close"
required: false
default: true
type: boolean
env:
RUNTIME_CHECKS: 1
USE_EXISTING_BINARY_DATASET: 1
AUTO_CHECKPOINT: ${{ github.event.inputs.auto_checkpoint }}
BUFFER_POOL_SIZE: ${{ github.event.inputs.buffer_pool_size }}
MAX_NUM_THREADS: ${{ github.event.inputs.max_num_threads }}
ENABLE_COMPRESSION: ${{ github.event.inputs.enable_compression }}
CHECKPOINT_THRESHOLD: ${{ github.event.inputs.checkpoint_threshold }}
FORCE_CHECKPOINT_ON_CLOSE: ${{ github.event.inputs.force_checkpoint_on_close }}
WERROR: 1
RUSTFLAGS: --deny warnings
PIP_BREAK_SYSTEM_PACKAGES: 1
# Only allow one run in this group to run at a time, and cancel any runs in progress in this group.
# We use the workflow name and then add the pull request number, or (if it's a push to main), we use the name of the branch.
# See github's docs[1] and a relevant stack overflow answer[2]
# [1]: https://docs.github.com/en/actions/using-jobs/using-concurrency
# [2]: https://stackoverflow.com/questions/66335225/how-to-cancel-previous-runs-in-the-pr-when-you-push-new-commitsupdate-the-curre
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
sanity-checks:
name: sanity checks
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- name: Check source headers for include guards
run: ./scripts/check-include-guards.sh src/include
./scripts/check-include-guards.sh test/include
- name: Check extension headers for include guards
run: |
for dir in extension/*/src/include ;do
./scripts/check-include-guards.sh "$dir"
done
- name: Checks source files for std::assert
run: ./scripts/check-no-std-assert.sh src
- name: Check extension files for std::assert
run: ./scripts/check-no-std-assert.sh extension
- name: Ensure generated grammar files are up to date
run: |
python3 scripts/antlr4/hash.py src/antlr4/keywords.txt src/antlr4/Cypher.g4 > tmphashfile
cmp tmphashfile scripts/antlr4/hash.md5
rm tmphashfile
clang-format:
runs-on: ubuntu-24.04
container:
image: alpine:3.20
steps:
- name: Install clang-format dependencies
run: apk add --no-cache git python3 clang18-extra-tools
- uses: actions/checkout@v4
with:
repository: ${{ github.event.pull_request.head.repo.full_name }}
ref: ${{ github.event.pull_request.head.ref }}
- name: Clang Format
run: python3 scripts/run-clang-format.py --clang-format-executable /usr/lib/llvm18/bin/clang-format -r src/ test/ tools/ extension/
minimal-test:
name: minimal test
runs-on: ubuntu-latest
needs: [ sanity-checks ]
env:
GEN: Ninja
TEST_JOBS: 4
NUM_THREADS: 4
steps:
- uses: actions/checkout@v4
- name: Checkout dataset and benchmarks
run: |
git submodule update --init dataset benchmark # needed for demo db
- name: Setup ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: minimal-test-${{ runner.os }}-${{ github.ref }}
max-size: 2G
restore-keys: |
minimal-test-${{ runner.os }}-refs/heads/main
minimal-test-${{ runner.os }}-
- name: Build
env:
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
run: make relwithdebinfo
- name: Generate datasets
run: bash scripts/generate_binary_demo.sh --lbug-shell-mode relwithdebinfo
- name: Install uv
run: pip3 install uv
- name: Test
env:
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
run: |
uv venv
source .venv/bin/activate
uv pip install pytest pexpect
make shell-test test
minimal-linux-extension-test:
name: minimal linux extension test
runs-on: ubuntu-latest
env:
GEN: Ninja
UW_S3_ACCESS_KEY_ID: ${{ secrets.UW_S3_ACCESS_KEY_ID }}
UW_S3_SECRET_ACCESS_KEY: ${{ secrets.UW_S3_SECRET_ACCESS_KEY }}
AWS_S3_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
AWS_S3_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
GCS_ACCESS_KEY_ID: ${{ secrets.GCS_ACCESS_KEY_ID }}
GCS_SECRET_ACCESS_KEY: ${{ secrets.GCS_SECRET_ACCESS_KEY }}
AZURE_CONNECTION_STRING: ${{ format('DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix=core.windows.net', secrets.AZURE_ACCOUNT_NAME, secrets.AZURE_ACCOUNT_KEY) }}
AZURE_ACCOUNT_NAME: ${{ secrets.AZURE_ACCOUNT_NAME }}
AZURE_PUBLIC_CONTAINER: ${{ secrets.AZURE_PUBLIC_CONTAINER }}
steps:
- uses: actions/checkout@v4
- name: Checkout extensions
run: |
git submodule update --init extension dataset benchmark
- name: Free disk space on Ubuntu runner
uses: kfir4444/free-disk-space@main
with:
tool-cache: true
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true
- name: Setup ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: minimal-extension-test-${{ runner.os }}-${{ github.ref }}
max-size: 2G
restore-keys: |
minimal-extension-test-${{ runner.os }}-refs/heads/main
minimal-extension-test-${{ runner.os }}-
- name: Update PostgreSQL host
working-directory: extension/postgres/test/test_files
env:
PG_FNAME: postgres.test
SQL_FNAME: sql_query.test
FIND: "localhost"
run: |
node -e 'fs=require("fs");fs.readFile(process.env.PG_FNAME,"utf8",(err,data)=>{if(err!=null)throw err;fs.writeFile(process.env.PG_FNAME,data.replaceAll(process.env.FIND,process.env.PG_HOST),"utf8",e=>{if(e!=null)throw e;});});'
node -e 'fs=require("fs");fs.readFile(process.env.SQL_FNAME,"utf8",(err,data)=>{if(err!=null)throw err;fs.writeFile(process.env.SQL_FNAME,data.replaceAll(process.env.FIND,process.env.PG_HOST),"utf8",e=>{if(e!=null)throw e;});});'
- name: Install DuckDB (Linux)
if: runner.os == 'Linux'
run: |
if [ "${{ runner.arch }}" = "X64" ]; then
wget https://github.com/duckdb/duckdb/releases/latest/download/libduckdb-linux-amd64.zip
unzip libduckdb-linux-amd64.zip -d duckdb
else
wget https://github.com/duckdb/duckdb/releases/latest/download/libduckdb-linux-arm64.zip
unzip libduckdb-linux-arm64.zip -d duckdb
fi
sudo cp duckdb/duckdb.h /usr/local/include/
sudo cp duckdb/duckdb.hpp /usr/local/include/
sudo cp duckdb/libduckdb.so /usr/local/lib/
sudo ldconfig
# Create CMake config files
sudo mkdir -p /usr/local/lib/cmake/DuckDB
sudo tee /usr/local/lib/cmake/DuckDB/DuckDBConfig.cmake > /dev/null << 'EOF'
# DuckDB CMake configuration file
if(NOT TARGET DuckDB::duckdb)
add_library(DuckDB::duckdb SHARED IMPORTED)
set_target_properties(DuckDB::duckdb PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "/usr/local/include"
IMPORTED_LOCATION "/usr/local/lib/libduckdb.so"
)
endif()
set(DuckDB_LIBRARIES DuckDB::duckdb)
set(DuckDB_INCLUDE_DIRS "/usr/local/include")
set(DuckDB_FOUND TRUE)
EOF
- name: Install uv
run: |
pip3 install uv
uv venv
- name: Install dependencies
run: uv pip install rangehttpserver requests
# shell needs to be built first to generate the dataset provided by the server
- name: Extension test build
run: |
make relwithdebinfo
cp build/relwithdebinfo/tools/shell/lbug lbug.prod
make extension-test-build
cp lbug.prod build/relwithdebinfo/tools/shell/lbug
- name: Test dynamic extension loading
run: |
cd tools/shell/test
uv venv
source .venv/bin/activate
uv pip install pytest pexpect
pytest test_dynamic_extension_loading.py -v
- name: Extension test
run: |
uv run scripts/generate-tinysnb.py
uv run scripts/setup-extension-repo.py &
make extension-test