Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ agg_mode_batcher_send_sp1_proof:
-F "nonce=$${NONCE}" \
-F "proof=@scripts/test_files/sp1/sp1_fibonacci_5_0_0.proof" \
-F "program_vk=@scripts/test_files/sp1/sp1_fibonacci_5_0_0_vk.bin" \
-F "signature_hex=0x0" \
-F "_signature_hex=0x0" \
http://127.0.0.1:8089/proof/sp1

__AGGREGATOR__: ## ____
Expand Down
18 changes: 10 additions & 8 deletions aggregation_mode/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions aggregation_mode/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ serde_yaml = "0.9"
alloy = { version = "1.1.1", features = ["default", "signer-keystore", "kzg"] }
bincode = "1.3.3"
aligned-sdk = { path = "../crates/sdk/" }
db = { path = "./db" }
sp1-sdk = "5.0.0"
risc0-zkvm = { version = "3.0.3" }

Expand Down
2 changes: 2 additions & 0 deletions aggregation_mode/batcher/src/server/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ impl BatcherServer {
HttpServer::new(move || {
App::new()
.app_data(Data::new(state.clone()))
// Note: this is temporary and should be lowered when we accept proofs via multipart form data instead of json
.app_data(web::JsonConfig::default().limit(50 * 1024 * 1024)) // 50mb
.route("/nonce/{address}", web::get().to(Self::get_nonce))
.route("/proof/merkle", web::get().to(Self::get_proof_merkle_path))
.route("/proof/sp1", web::post().to(Self::post_proof_sp1))
Expand Down
2 changes: 1 addition & 1 deletion aggregation_mode/db/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "agg_mode_db"
name = "db"
version = "0.1.0"
edition = "2021"

Expand Down
1 change: 1 addition & 0 deletions aggregation_mode/db/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pub mod types;
34 changes: 34 additions & 0 deletions aggregation_mode/db/src/types.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
use sqlx::{
prelude::FromRow,
types::{BigDecimal, Uuid},
Type,
};

#[derive(Debug, Clone, Copy, PartialEq, Eq, Type)]
#[sqlx(type_name = "task_status", rename_all = "lowercase")]
pub enum TaskStatus {
Pending,
Processing,
Verified,
}

#[derive(Debug, Clone, FromRow)]
pub struct Task {
pub task_id: Uuid,
pub address: String,
pub proving_system_id: i32,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This type should match the one used to query the DB on get_pending_tasks_and_mark_them_as_processed (i64). I understand that in the Database the value is an integer, but we should keep a common type to represent it.

pub proof: Vec<u8>,
pub program_commitment: Vec<u8>,
pub merkle_path: Option<Vec<u8>>,
pub status: TaskStatus,
}

#[derive(Debug, Clone, FromRow)]
pub struct Payment {
pub payment_event_id: Uuid,
pub address: String,
pub amount: i32,
pub started_at: BigDecimal,
pub valid_until: BigDecimal,
pub tx_hash: String,
}
2 changes: 2 additions & 0 deletions aggregation_mode/proof_aggregator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ serde_yaml = { workspace = true }
alloy = { workspace = true }
bincode = { workspace = true }
aligned-sdk = { workspace = true }
db = { workspace = true }

tracing = { version = "0.1", features = ["log"] }
tracing-subscriber = { version = "0.3.0", features = ["env-filter"] }
Expand All @@ -21,6 +22,7 @@ ciborium = "=0.2.2"
lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks.git", rev = "5f8f2cfcc8a1a22f77e8dff2d581f1166eefb80b", features = ["serde"]}
rayon = "1.10.0"
backon = "1.2.0"
sqlx = { version = "0.8", features = [ "runtime-tokio", "postgres", "uuid", "bigdecimal" ] }

# zkvms
sp1-sdk = { workspace = true }
Expand Down

This file was deleted.

16 changes: 12 additions & 4 deletions aggregation_mode/proof_aggregator/src/aggregators/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@ pub mod sp1_aggregator;

use std::fmt::Display;

use aligned_sdk::aggregation_layer::AggregationModeProvingSystem;
use lambdaworks_crypto::merkle_tree::traits::IsMerkleTreeBackend;
use risc0_aggregator::{Risc0AggregationError, Risc0ProofReceiptAndImageId};
use sha3::{Digest, Keccak256};
use sp1_aggregator::{SP1AggregationError, SP1ProofWithPubValuesAndElf};
use sp1_aggregator::{SP1AggregationError, SP1ProofWithPubValuesAndVk};
use tracing::info;

#[derive(Clone, Debug)]
Expand Down Expand Up @@ -44,6 +45,13 @@ impl ZKVMEngine {
Some(engine)
}

pub fn proving_system_id(&self) -> u16 {
match &self {
ZKVMEngine::SP1 => AggregationModeProvingSystem::SP1.as_u16(),
ZKVMEngine::RISC0 => AggregationModeProvingSystem::RISC0.as_u16(),
}
}

/// Aggregates a list of [`AlignedProof`]s into a single [`AlignedProof`].
///
/// Returns a tuple containing:
Expand All @@ -61,7 +69,7 @@ impl ZKVMEngine {
) -> Result<(AlignedProof, [u8; 32]), ProofAggregationError> {
let res = match self {
ZKVMEngine::SP1 => {
let proofs: Vec<SP1ProofWithPubValuesAndElf> = proofs
let proofs: Vec<SP1ProofWithPubValuesAndVk> = proofs
.into_iter()
// Fetcher already filtered for SP1
// We do this for type casting, as to avoid using generics
Expand All @@ -80,7 +88,7 @@ impl ZKVMEngine {
proofs_per_chunk,
);

let mut agg_proofs: Vec<(SP1ProofWithPubValuesAndElf, Vec<[u8; 32]>)> = vec![];
let mut agg_proofs: Vec<(SP1ProofWithPubValuesAndVk, Vec<[u8; 32]>)> = vec![];
for (i, chunk) in chunks.enumerate() {
let leaves_commitment =
chunk.iter().map(|e| e.hash_vk_and_pub_inputs()).collect();
Expand Down Expand Up @@ -154,7 +162,7 @@ impl ZKVMEngine {
}

pub enum AlignedProof {
SP1(Box<SP1ProofWithPubValuesAndElf>),
SP1(Box<SP1ProofWithPubValuesAndVk>),
Risc0(Box<Risc0ProofReceiptAndImageId>),
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,8 @@ static SP1_PROVER_CLIENT: LazyLock<EnvProver> = LazyLock::new(ProverClient::from
static SP1_PROVER_CLIENT_CPU: LazyLock<CpuProver> =
LazyLock::new(|| ProverClient::builder().cpu().build());

pub struct SP1ProofWithPubValuesAndElf {
pub struct SP1ProofWithPubValuesAndVk {
pub proof_with_pub_values: SP1ProofWithPublicValues,
pub elf: Vec<u8>,
pub vk: SP1VerifyingKey,
}

Expand All @@ -37,16 +36,14 @@ pub enum AlignedSP1VerificationError {
UnsupportedProof,
}

impl SP1ProofWithPubValuesAndElf {
impl SP1ProofWithPubValuesAndVk {
/// Constructs a new instance of the struct by verifying a given SP1 proof with its public values.
pub fn new(
proof_with_pub_values: SP1ProofWithPublicValues,
elf: Vec<u8>,
vk: SP1VerifyingKey,
) -> Result<Self, AlignedSP1VerificationError> {
let client = &*SP1_PROVER_CLIENT_CPU;

let (_pk, vk) = client.setup(&elf);

// only sp1 compressed proofs are supported for aggregation now
match proof_with_pub_values.proof {
sp1_sdk::SP1Proof::Compressed(_) => client
Expand All @@ -57,7 +54,6 @@ impl SP1ProofWithPubValuesAndElf {

Ok(Self {
proof_with_pub_values,
elf,
vk,
})
}
Expand All @@ -80,8 +76,8 @@ pub enum SP1AggregationError {
}

pub(crate) fn run_user_proofs_aggregator(
proofs: &[SP1ProofWithPubValuesAndElf],
) -> Result<SP1ProofWithPubValuesAndElf, SP1AggregationError> {
proofs: &[SP1ProofWithPubValuesAndVk],
) -> Result<SP1ProofWithPubValuesAndVk, SP1AggregationError> {
let mut stdin = SP1Stdin::new();

let mut program_input = sp1_aggregation_program::UserProofsAggregatorInput {
Expand Down Expand Up @@ -131,18 +127,17 @@ pub(crate) fn run_user_proofs_aggregator(
.verify(&proof, &vk)
.map_err(SP1AggregationError::Verification)?;

let proof_and_elf = SP1ProofWithPubValuesAndElf {
let proof_and_elf = SP1ProofWithPubValuesAndVk {
proof_with_pub_values: proof,
elf: USER_PROOFS_PROGRAM_ELF.to_vec(),
vk,
};

Ok(proof_and_elf)
}

pub(crate) fn run_chunk_aggregator(
proofs: &[(SP1ProofWithPubValuesAndElf, Vec<[u8; 32]>)],
) -> Result<SP1ProofWithPubValuesAndElf, SP1AggregationError> {
proofs: &[(SP1ProofWithPubValuesAndVk, Vec<[u8; 32]>)],
) -> Result<SP1ProofWithPubValuesAndVk, SP1AggregationError> {
let mut stdin = SP1Stdin::new();

let mut program_input = sp1_aggregation_program::ChunkAggregatorInput {
Expand Down Expand Up @@ -204,9 +199,8 @@ pub(crate) fn run_chunk_aggregator(
.verify(&proof, &vk)
.map_err(SP1AggregationError::Verification)?;

let proof_and_elf = SP1ProofWithPubValuesAndElf {
let proof_and_elf = SP1ProofWithPubValuesAndVk {
proof_with_pub_values: proof,
elf: CHUNK_PROGRAM_ELF.to_vec(),
vk,
};

Expand Down
37 changes: 2 additions & 35 deletions aggregation_mode/proof_aggregator/src/backend/config.rs
Original file line number Diff line number Diff line change
@@ -1,31 +1,26 @@
use serde::{Deserialize, Serialize};
use std::{fs::File, fs::OpenOptions, io::Read, io::Write};
use std::{fs::File, io::Read};

#[derive(Debug, Deserialize, Serialize)]
pub struct ECDSAConfig {
pub private_key_store_path: String,
pub private_key_store_password: String,
}

#[derive(Debug, Deserialize, Serialize)]
pub struct LastAggregatedBlock {
pub last_aggregated_block: u64,
}

#[derive(Debug, Deserialize, Serialize)]
pub struct Config {
pub eth_rpc_url: String,
pub eth_ws_url: String,
pub max_proofs_in_queue: u16,
pub proof_aggregation_service_address: String,
pub aligned_service_manager_address: String,
pub last_aggregated_block_filepath: String,
pub ecdsa: ECDSAConfig,
pub proofs_per_chunk: u16,
pub total_proofs_limit: u16,
pub risc0_chunk_aggregator_image_id: String,
pub sp1_chunk_aggregator_vk_hash: String,
pub monthly_budget_eth: f64,
pub db_connection_url: String,
}

impl Config {
Expand All @@ -36,32 +31,4 @@ impl Config {
let config: Config = serde_yaml::from_str(&contents)?;
Ok(config)
}

pub fn get_last_aggregated_block(&self) -> Result<u64, Box<dyn std::error::Error>> {
let mut file = File::open(&self.last_aggregated_block_filepath)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let lab: LastAggregatedBlock = serde_json::from_str(&contents)?;
Ok(lab.last_aggregated_block)
}

pub fn update_last_aggregated_block(
&self,
last_aggregated_block: u64,
) -> Result<(), Box<dyn std::error::Error>> {
let last_aggregated_block_struct = LastAggregatedBlock {
last_aggregated_block,
};

let mut file = OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(&self.last_aggregated_block_filepath)?;

let content = serde_json::to_string(&last_aggregated_block_struct)?;
file.write_all(content.as_bytes())?;

Ok(())
}
}
Loading
Loading