Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ updates:
- "/"
- "/attestor/attestor"
- "/attestor/attestor_zombienet"
- "/checkpoint-builder"
- "/common/cc-client"
- "/common/continuity"
- "/common/eth"
Expand Down Expand Up @@ -101,6 +102,7 @@ updates:
directories:
- "/attestor/scripts"
- "/cc3-indexer"
- "/checkpoint-verifier"
- "/cli"
- "/common/eth/contracts"
- "/common/eth/contracts/get-contract-state"
Expand Down
40 changes: 30 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ members = [
"proof-gen-api-server",
"attestor/attestor",
"attestor/attestor_zombienet",
"checkpoint-builder",
]
resolver = "2"

Expand Down Expand Up @@ -59,6 +60,7 @@ axum = { version = "0.8" }
bip32 = { version = "0.5.1", default-features = false, features = ["bip39"] }
clap = { version = "4.5.3", features = ["derive", "env"] }
derive_more = "0.99.17"
dotenvy = "0.15"
env_logger = "0.11"
environmental = { version = "1.1.4", default-features = false }
ethereum = { version = "0.18.2", default-features = false }
Expand Down Expand Up @@ -90,6 +92,7 @@ serde = { version = "1.0.197", default-features = false }
serde_json = { version = "1.0.145", default-features = false }
serde_yaml = { version = "0.9.34" }
sha3 = { version = "0.10", default-features = false }
sled = "0.34"
slices = "0.2.0"
snap = "1.1"
tempfile = { version = "3.10", default-features = false }
Expand Down
2 changes: 1 addition & 1 deletion archiver/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ clap = { workspace = true }
dotenvy = "0.15"
futures = { workspace = true }
serde = { workspace = true, features = ["std", "derive"] }
sled = "0.34"
sled = { workspace = true }
tokio = { workspace = true, features = ["rt-multi-thread", "macros", "signal"] }
tower-http = { workspace = true, features = ["trace"] }
tracing = { workspace = true }
Expand Down
132 changes: 132 additions & 0 deletions attestor/scripts/ImportCheckpointsFromCsv.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
require('dotenv').config(); // Load environment variables from .env
const { ApiPromise, WsProvider, Keyring } = require('@polkadot/api');
const { hexToU8a } = require('@polkadot/util');
const fs = require('fs');

// Flag handling
const IS_DEV = process.argv.includes('--dev');

const BATCH_SIZE = 100;
const MAX_RETRIES = 10;
// Decrease the retry delay when running with --dev
const RETRY_DELAY_MS = IS_DEV ? 6000 : 15000;

function parseArgs() {
const args = process.argv.slice(2);
const result = {};
for (let i = 0; i < args.length; i++) {
if (args[i] === '--file' && args[i + 1]) result.file = args[++i];
else if (args[i] === '--chain-key' && args[i + 1]) result.chainKey = args[++i];
else if (args[i] === '--rpc' && args[i + 1]) result.rpc = args[++i];
}
return result;
}

async function delay(ms) {
return new Promise((res) => setTimeout(res, ms));
}

async function main() {
if (IS_DEV) {
console.log('Running in DEV mode: RETRY_DELAY_MS set to 6000ms');
}

const cliArgs = parseArgs();

// Resolve config: CLI args take priority over env vars
const mnemonic = process.env.MNEMONIC;
if (!mnemonic) {
throw new Error('MNEMONIC not found in environment');
}

const csvFile = cliArgs.file || process.env.CHECKPOINTS_FILE;
if (!csvFile) {
throw new Error('CSV file not specified. Use --file <path> or set CHECKPOINTS_FILE env var');
}

const destinationChain = cliArgs.rpc || process.env.DESTINATION_CHAIN;
if (!destinationChain) {
throw new Error('RPC endpoint not specified. Use --rpc <url> or set DESTINATION_CHAIN env var');
}

const chainKey = cliArgs.chainKey || process.env.CHAIN_KEY_ON_DESTINATION;
if (!chainKey) {
throw new Error('Chain key not specified. Use --chain-key <key> or set CHAIN_KEY_ON_DESTINATION env var');
}

// Get api and keyring
const provider = new WsProvider(destinationChain);
const api = await ApiPromise.create({ provider });

const keyring = new Keyring({ type: 'sr25519' });
const sudo = keyring.addFromUri(mnemonic);
console.log('Sudo address:', sudo.address);

// Parse CSV: each line is "block_number,digest_hash"
const rawData = fs.readFileSync(csvFile, 'utf8');
const lines = rawData.trim().split('\n');

// Skip header line if present (starts with non-numeric)
const dataLines = lines.filter((line) => {
const firstChar = line.trim()[0];
return firstChar >= '0' && firstChar <= '9';
});

const entries = dataLines.map((line) => {
const [blockNumber, digestHex] = line.trim().split(',');
return { blockNumber: blockNumber.trim(), digestHex: digestHex.trim() };
});

console.log(`Loaded ${entries.length} checkpoints from ${csvFile}`);

for (let i = 0; i < entries.length; i += BATCH_SIZE) {
const batch = entries.slice(i, i + BATCH_SIZE);

const checkpointVec = batch.map(({ blockNumber, digestHex }) => {
return api.createType('AttestorPrimitivesAttestationCheckpoint', {
digest: hexToU8a(digestHex),
// Use bigint to avoid precision loss when block numbers exceed Number.MAX_SAFE_INTEGER
block_number: BigInt(blockNumber),
});
});

const boundedVec = api.createType('BoundedVec<AttestorPrimitivesAttestationCheckpoint, 100>', checkpointVec);

const call = api.tx.attestation.importCheckpoints(chainKey, boundedVec);
const sudoCall = api.tx.sudo.sudo(call);

let attempt = 0;
while (attempt < MAX_RETRIES) {
console.log(`Submitting batch ${Math.floor(i / BATCH_SIZE) + 1}, attempt ${attempt + 1}...`);
try {
const unsub = await sudoCall.signAndSend(sudo, (result) => {
if (result.status.isInBlock) {
console.log(`📦 Batch included in block: ${result.status.asInBlock}`);
unsub();
} else if (result.isError) {
console.error('❌ Transaction error reported');
unsub();
}
});
break; // exit retry loop if no exception
} catch (err) {
console.error(`⚠️ Error submitting batch: ${err.message}`);
attempt++;
if (attempt >= MAX_RETRIES) {
throw new Error(`❌ Failed to submit batch after ${MAX_RETRIES} attempts`);
}
await delay(RETRY_DELAY_MS);
}
}

await delay(RETRY_DELAY_MS);
}

console.log('✅ All checkpoint batches submitted.');
process.exit(0);
}

main().catch((err) => {
console.error('❌ Error:', err);
process.exit(1);
});
61 changes: 61 additions & 0 deletions attestor/scripts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -115,3 +115,64 @@ No special steps here. Simply register and run attestors as normal for the chain
imported checkpoints to.

The resulting attestor sync process should take a much shorter time than it otherwise would.

## Importing Checkpoints from a CSV File

`ImportCheckpointsFromCsv.js` is an alternative to the export-then-import workflow above.
Use it when you already have checkpoints in a CSV file (e.g. produced by a checkpoint-builder
tool) and want to import them directly into a chain without needing a live source chain to
export from.

### CSV Format

Each row is `block_number,digest_hex`. An optional header line is supported and will be
skipped automatically.

```
block_number,digest_hex
1000,0x6cfd0730ec3bd606df4a125c004105ba21dbf06a004464e102de156d992ef04b
2000,0xd63f51622a13d598a341266e72a253556ed1f681495006a47b739404c3ea6a90
```

### Configuration

Configuration can be provided via CLI arguments or environment variables. CLI arguments take
priority over environment variables.

| CLI argument | Env variable | Description |
|---------------|---------------------------|----------------------------------------------------|
| `--file` | `CHECKPOINTS_FILE` | Path to the CSV file |
| `--rpc` | `DESTINATION_CHAIN` | WebSocket URL of the target chain |
| `--chain-key` | `CHAIN_KEY_ON_DESTINATION`| Chain key to import checkpoints into |
| *(none)* | `MNEMONIC` | Sudo account mnemonic (required, env var only) |

### Usage

Via CLI arguments:

```sh
node ImportCheckpointsFromCsv.js --file checkpoints.csv --rpc ws://127.0.0.1:9944 --chain-key 2
```

Via `.env` file — add to `attestor/scripts/.env`:

```
MNEMONIC="//Alice"
DESTINATION_CHAIN="ws://127.0.0.1:9944"
CHAIN_KEY_ON_DESTINATION=2
CHECKPOINTS_FILE=checkpoints.csv
```

Then run:

```sh
node ImportCheckpointsFromCsv.js
```

Pass `--dev` to reduce the retry delay from 15 s to 6 s, useful when testing locally:

```sh
node ImportCheckpointsFromCsv.js --dev
```

Checkpoints are submitted in batches of 100 via a `sudo(attestation.importCheckpoints(...))` call.
19 changes: 19 additions & 0 deletions checkpoint-builder/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Path to the Sled database folder containing block roots
SLED_DB_PATH="./block_roots_db"
# Starting digest for checkpoint generation
STARTING_DIGEST=
# Checkpoint ranges in format "start,end,interval;start,end,interval;..."
# Each range specifies: height_start, height_end (inclusive), checkpoint_interval
# Example: "0,999,100;1000,4999,500" creates:
# - Checkpoints every 100 blocks for blocks 0-999 (10 checkpoints)
# - Checkpoints every 500 blocks for blocks 1000-4999 (8 checkpoints)
# Rules:
# - Ranges must be contiguous (range[i].end + 1 == range[i+1].start)
# - Block count in each range must be divisible by its interval
CHECKPOINT_RANGES="0;1,1000000,10000;1000001,2000000,5000;2000001,4000000,2000;4000001,5000000,1000;5000001,10000000,1000"
# How many checkpoints to batch together before writing to CSV
CHECKPOINT_FLUSH_INTERVAL=20000
# Whether to validate that all checkpoint ranges are present in the database before processing
VALIDATE_DATABASE=true
# Output file for checkpoints (will have timestamp appended)
OUTPUT_FILE="checkpoints.csv"
1 change: 1 addition & 0 deletions checkpoint-builder/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.env
Loading
Loading