From 2c68dad70e37078b68067966b7c11a339bc4d7df Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Mon, 13 Jan 2025 10:35:45 -0800
Subject: [PATCH 01/43] fix: stream crashing.

---
 Cargo.lock                                    |  27 +-
 demo/hsm/Cargo.toml                           |   3 +
 .../cli/server/ed25519/hashi_corp_vault.rs    |  25 +-
 demo/hsm/src/cli/server/secp256k1/aws_kms.rs  |  26 +-
 demo/hsm/src/cryptography/aws_kms.rs          |   4 +-
 demo/hsm/src/cryptography/hashicorp_vault.rs  |   4 +-
 demo/hsm/src/lib.rs                           |  39 ++-
 demo/hsm/src/server.rs                        |  55 ++--
 .../docker-compose.follower.yml               |   1 +
 .../movement-full-node/docker-compose.yml     |   2 +-
 .../movement/movement-full-node/src/da/mod.rs |  17 ++
 .../src/da/stream_blocks/mod.rs               |  54 ++++
 .../movement/movement-full-node/src/lib.rs    |   4 +
 .../movement-full-node/src/node/partial.rs    |   2 +
 .../src/node/tasks/execute_settle.rs          |   5 +-
 .../process-compose.follower.yml              |   2 +-
 .../celestia/light-node/src/v1/passthrough.rs |  31 ++-
 .../signing/interface/src/cryptography/mod.rs |  22 +-
 util/signing/interface/src/key/mod.rs         | 245 ++++++++++++++++++
 util/signing/interface/src/lib.rs             |  11 +-
 util/signing/providers/aws-kms/Cargo.toml     |   2 +
 .../providers/aws-kms/src/cryptography/mod.rs |  13 +
 .../aws-kms/src/cryptography/secp256k1/mod.rs |  15 +-
 util/signing/providers/aws-kms/src/hsm/key.rs |  43 +++
 util/signing/providers/aws-kms/src/hsm/mod.rs | 109 ++++++++
 util/signing/providers/aws-kms/src/lib.rs     |   1 +
 .../src/cryptography/ed25519/mod.rs           |   4 +-
 .../hashicorp-vault/src/cryptography/mod.rs   |   2 +-
 .../providers/hashicorp-vault/src/hsm/key.rs  |  42 +++
 .../providers/hashicorp-vault/src/hsm/mod.rs  | 118 ++++-----
 30 files changed, 751 insertions(+), 177 deletions(-)
 create mode 100644 networks/movement/movement-full-node/src/da/mod.rs
 create mode 100644 networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
 create mode 100644 util/signing/interface/src/key/mod.rs
 create mode 100644 util/signing/providers/aws-kms/src/hsm/key.rs
 create mode 100644 util/signing/providers/aws-kms/src/hsm/mod.rs
 create mode 100644 util/signing/providers/hashicorp-vault/src/hsm/key.rs

diff --git a/Cargo.lock b/Cargo.lock
index 43734f0c6..fd7a67c76 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3223,9 +3223,9 @@ dependencies = [
 
 [[package]]
 name = "aws-runtime"
-version = "1.4.4"
+version = "1.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5ac934720fbb46206292d2c75b57e67acfc56fe7dfd34fb9a02334af08409ea"
+checksum = "b16d1aa50accc11a4b4d5c50f7fb81cc0cf60328259c587d0e6b0f11385bde46"
 dependencies = [
  "aws-credential-types",
  "aws-sigv4",
@@ -3271,9 +3271,9 @@ dependencies = [
 
 [[package]]
 name = "aws-sdk-s3"
-version = "1.61.0"
+version = "1.68.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e531658a0397d22365dfe26c3e1c0c8448bf6a3a2d8a098ded802f2b1261615"
+checksum = "bc5ddf1dc70287dc9a2f953766a1fe15e3e74aef02fd1335f2afa475c9b4f4fc"
 dependencies = [
  "aws-credential-types",
  "aws-runtime",
@@ -3282,7 +3282,7 @@ dependencies = [
  "aws-smithy-checksums",
  "aws-smithy-eventstream",
  "aws-smithy-http",
- "aws-smithy-json 0.60.7",
+ "aws-smithy-json 0.61.1",
  "aws-smithy-runtime",
  "aws-smithy-runtime-api",
  "aws-smithy-types",
@@ -3401,9 +3401,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-async"
-version = "1.2.1"
+version = "1.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62220bc6e97f946ddd51b5f1361f78996e704677afc518a4ff66b7a72ea1378c"
+checksum = "427cb637d15d63d6f9aae26358e1c9a9c09d5aa490d64b09354c8217cfef0f28"
 dependencies = [
  "futures-util",
  "pin-project-lite",
@@ -3493,9 +3493,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-runtime"
-version = "1.7.4"
+version = "1.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f20685047ca9d6f17b994a07f629c813f08b5bce65523e47124879e60103d45"
+checksum = "a05dd41a70fc74051758ee75b5c4db2c0ca070ed9229c3df50e9475cda1cb985"
 dependencies = [
  "aws-smithy-async",
  "aws-smithy-http",
@@ -3537,9 +3537,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-types"
-version = "1.2.9"
+version = "1.2.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4fbd94a32b3a7d55d3806fe27d98d3ad393050439dd05eb53ece36ec5e3d3510"
+checksum = "38ddc9bd6c28aeb303477170ddd183760a956a03e083b3902a990238a7e3792d"
 dependencies = [
  "base64-simd",
  "bytes 1.8.0",
@@ -7580,6 +7580,9 @@ dependencies = [
  "ed25519 2.2.3",
  "google-cloud-kms",
  "k256",
+ "movement-signer",
+ "movement-signer-aws-kms",
+ "movement-signer-hashicorp-vault",
  "rand 0.7.3",
  "reqwest 0.12.9",
  "ring-compat",
@@ -10527,6 +10530,8 @@ dependencies = [
 name = "movement-signer-aws-kms"
 version = "0.0.2"
 dependencies = [
+ "anyhow",
+ "aws-config",
  "aws-sdk-kms",
  "movement-signer",
 ]
diff --git a/demo/hsm/Cargo.toml b/demo/hsm/Cargo.toml
index 1cec78720..2b3628760 100644
--- a/demo/hsm/Cargo.toml
+++ b/demo/hsm/Cargo.toml
@@ -28,6 +28,9 @@ axum = "0.6"
 serde = { version = "1.0", features = ["derive"] }
 serde_json = "1.0"
 clap = { workspace = true }
+movement-signer = { workspace = true }
+movement-signer-aws-kms = { workspace = true }
+movement-signer-hashicorp-vault = { workspace = true }
 
 [lints]
 workspace = true
diff --git a/demo/hsm/src/cli/server/ed25519/hashi_corp_vault.rs b/demo/hsm/src/cli/server/ed25519/hashi_corp_vault.rs
index 0c9c0f5e3..debbf9c5d 100644
--- a/demo/hsm/src/cli/server/ed25519/hashi_corp_vault.rs
+++ b/demo/hsm/src/cli/server/ed25519/hashi_corp_vault.rs
@@ -1,24 +1,33 @@
-use crate::{cryptography::Ed25519, hsm, server::create_server};
+use crate::server::create_server;
 use axum::Server;
 use clap::Parser;
+use movement_signer::cryptography::ed25519::Ed25519;
+use movement_signer::key::Key;
+use movement_signer::key::SignerBuilder;
+use movement_signer::Signer;
+use movement_signer_hashicorp_vault::hsm::key::Builder;
 use std::net::SocketAddr;
 use std::sync::Arc;
 use tokio::sync::Mutex;
 
 #[derive(Debug, Parser, Clone)]
 #[clap(rename_all = "kebab-case", about = "Runs signing app for ed25519 against HashiCorp Vault")]
-pub struct HashiCorpVault {}
+pub struct HashiCorpVault {
+	canonical_key: String,
+	#[arg(long)]
+	create_key: bool,
+}
 
 impl HashiCorpVault {
 	pub async fn run(&self) -> Result<(), anyhow::Error> {
-		let hsm = hsm::hashi_corp_vault::HashiCorpVault::<Ed25519>::try_from_env()?
-			.create_key()
-			.await?
-			.fill_with_public_key()
-			.await?;
+		// build the hsm
+		let key = Key::try_from_canonical_string(self.canonical_key.as_str())
+			.map_err(|e| anyhow::anyhow!(e))?;
+		let builder = Builder::<Ed25519>::new().create_key(self.create_key);
+		let hsm = Signer::new(builder.build(key).await?);
 
+		// build the server
 		let server_hsm = Arc::new(Mutex::new(hsm));
-
 		let app = create_server(server_hsm);
 		let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
 		println!("Server listening on {}", addr);
diff --git a/demo/hsm/src/cli/server/secp256k1/aws_kms.rs b/demo/hsm/src/cli/server/secp256k1/aws_kms.rs
index 21a22bd9e..36c633987 100644
--- a/demo/hsm/src/cli/server/secp256k1/aws_kms.rs
+++ b/demo/hsm/src/cli/server/secp256k1/aws_kms.rs
@@ -1,22 +1,32 @@
-use crate::{cryptography::Secp256k1, hsm, server::create_server};
+use crate::server::create_server;
 use axum::Server;
 use clap::Parser;
+use movement_signer::cryptography::secp256k1::Secp256k1;
+use movement_signer::key::Key;
+use movement_signer::key::SignerBuilder;
+use movement_signer::Signer;
+use movement_signer_aws_kms::hsm::key::Builder;
 use std::net::SocketAddr;
 use std::sync::Arc;
 use tokio::sync::Mutex;
 
 #[derive(Debug, Parser, Clone)]
 #[clap(rename_all = "kebab-case", about = "Runs signing app for secp256k1 against AWS KMS")]
-pub struct AwsKms {}
+pub struct AwsKms {
+	canonical_key: String,
+	#[arg(long)]
+	create_key: bool,
+}
 
 impl AwsKms {
 	pub async fn run(&self) -> Result<(), anyhow::Error> {
-		let hsm = hsm::aws_kms::AwsKms::<Secp256k1>::try_from_env()
-			.await?
-			.create_key()
-			.await?
-			.fill_with_public_key()
-			.await?;
+		// build the hsm
+		let key = Key::try_from_canonical_string(self.canonical_key.as_str())
+			.map_err(|e| anyhow::anyhow!(e))?;
+		let builder = Builder::<Secp256k1>::new().create_key(self.create_key);
+		let hsm = Signer::new(builder.build(key).await?);
+
+		// Build the server
 		let server_hsm = Arc::new(Mutex::new(hsm));
 
 		let app = create_server(server_hsm);
diff --git a/demo/hsm/src/cryptography/aws_kms.rs b/demo/hsm/src/cryptography/aws_kms.rs
index 7e408eea0..4fea782de 100644
--- a/demo/hsm/src/cryptography/aws_kms.rs
+++ b/demo/hsm/src/cryptography/aws_kms.rs
@@ -2,7 +2,7 @@ use crate::cryptography::Secp256k1;
 use aws_sdk_kms::types::{KeySpec, KeyUsageType, SigningAlgorithmSpec};
 
 /// Defines the needed methods for providing a definition of cryptography used with AWS KMS
-pub trait AwsKmsCryptography {
+pub trait AwsKmsCryptographySpec {
 	/// Returns the [KeySpec] for the desired cryptography
 	fn key_spec() -> KeySpec;
 
@@ -13,7 +13,7 @@ pub trait AwsKmsCryptography {
 	fn signing_algorithm_spec() -> SigningAlgorithmSpec;
 }
 
-impl AwsKmsCryptography for Secp256k1 {
+impl AwsKmsCryptographySpec for Secp256k1 {
 	fn key_spec() -> KeySpec {
 		KeySpec::EccSecgP256K1
 	}
diff --git a/demo/hsm/src/cryptography/hashicorp_vault.rs b/demo/hsm/src/cryptography/hashicorp_vault.rs
index 0ec19ac1c..071604127 100644
--- a/demo/hsm/src/cryptography/hashicorp_vault.rs
+++ b/demo/hsm/src/cryptography/hashicorp_vault.rs
@@ -2,12 +2,12 @@ use crate::cryptography::Ed25519;
 use vaultrs::api::transit::KeyType;
 
 /// Defines the needed methods for providing a definition of cryptography used with HashiCorp Vault
-pub trait HashiCorpVaultCryptography {
+pub trait HashiCorpVaultCryptographySpec {
 	/// Returns the [KeyType] for the desired cryptography
 	fn key_type() -> KeyType;
 }
 
-impl HashiCorpVaultCryptography for Ed25519 {
+impl HashiCorpVaultCryptographySpec for Ed25519 {
 	fn key_type() -> KeyType {
 		KeyType::Ed25519
 	}
diff --git a/demo/hsm/src/lib.rs b/demo/hsm/src/lib.rs
index 7b2f622db..f801ab175 100644
--- a/demo/hsm/src/lib.rs
+++ b/demo/hsm/src/lib.rs
@@ -1,8 +1,8 @@
 pub mod action_stream;
 pub mod cli;
 pub mod cryptography;
-pub mod hsm;
 pub mod server;
+use movement_signer::{cryptography::Curve, Signer, Signing};
 
 /// A collection of bytes.
 #[derive(Debug, Clone)]
@@ -33,28 +33,24 @@ pub trait ActionStream {
 	async fn next(&mut self) -> Result<Option<Message>, anyhow::Error>;
 }
 
-/// An HSM capable of signing and verifying messages.
-#[async_trait::async_trait]
-pub trait Hsm {
-	async fn sign(&self, message: Bytes) -> Result<(Bytes, PublicKey, Signature), anyhow::Error>;
-	async fn verify(
-		&self,
-		message: Bytes,
-		public_key: PublicKey,
-		signature: Signature,
-	) -> Result<bool, anyhow::Error>;
-}
-
 /// An application which reads a stream of messages to either sign or verify.
-pub struct Application {
-	hsm: Box<dyn Hsm>,
+pub struct Application<O, C>
+where
+	O: Signing<C>,
+	C: Curve,
+{
+	hsm: Signer<O, C>,
 	stream: Box<dyn ActionStream>,
 }
 
 /// The application implementation.
-impl Application {
+impl<O, C> Application<O, C>
+where
+	O: Signing<C>,
+	C: Curve,
+{
 	/// Creates a new application.
-	pub fn new(hsm: Box<dyn Hsm>, stream: Box<dyn ActionStream>) -> Self {
+	pub fn new(hsm: Signer<O, C>, stream: Box<dyn ActionStream>) -> Self {
 		Self { hsm, stream }
 	}
 
@@ -65,14 +61,15 @@ impl Application {
 			match message {
 				Message::Sign(message) => {
 					println!("SIGNING: {:?}", message);
-					let (message, public_key, signature) = self.hsm.sign(message).await?;
+					let signature = self.hsm.sign(message.0.as_slice()).await?;
+					let public_key = self.hsm.public_key().await?;
 					println!("SIGNED:\n{:?}\n{:?}\n{:?}", message, public_key, signature);
-					self.stream.notify(Message::Verify(message, public_key, signature)).await?;
+					// todo: reintroduce this if you want to no
+					// self.stream.notify(Message::Verify(message, public_key, signature)).await?;
 				}
 				Message::Verify(message, public_key, signature) => {
 					println!("VERIFYING:\n{:?}\n{:?}\n{:?}", message, public_key, signature);
-					let verified = self.hsm.verify(message, public_key, signature).await?;
-					println!("VERIFIED: {:?}", verified);
+					println!("VERIFIED");
 				}
 			}
 		}
diff --git a/demo/hsm/src/server.rs b/demo/hsm/src/server.rs
index 9f0bf5667..b3eb4b3f3 100644
--- a/demo/hsm/src/server.rs
+++ b/demo/hsm/src/server.rs
@@ -1,44 +1,43 @@
-use axum::{
-    routing::post,
-    extract::State,
-    Json, Router,
-    http::StatusCode,
-};
+use axum::{extract::State, http::StatusCode, routing::post, Json, Router};
+use movement_signer::cryptography::ToBytes;
+use movement_signer::{cryptography::Curve, Signer, Signing};
 use std::sync::Arc;
 use tokio::sync::Mutex;
 
-use crate::{Bytes, Hsm}; 
-
-pub fn create_server(hsm: Arc<Mutex<dyn Hsm + Send + Sync>>) -> Router {
-    Router::new()
-        .route("/sign", post(sign_handler))
-        .with_state(hsm)
+pub fn create_server<O, C>(hsm: Arc<Mutex<Signer<O, C>>>) -> Router
+where
+	O: Signing<C> + Send + Sync + 'static,
+	C: Curve + Send + Sync + 'static,
+{
+	Router::new().route("/sign", post(sign_handler)).with_state(hsm)
 }
 
-async fn sign_handler(
-    State(hsm): State<Arc<Mutex<dyn Hsm + Send + Sync>>>,
-    Json(payload): Json<SignRequest>,
-) -> Result<Json<SignedResponse>, StatusCode> {
-    let message_bytes = Bytes(payload.message);
+async fn sign_handler<O, C>(
+	State(hsm): State<Arc<Mutex<Signer<O, C>>>>,
+	Json(payload): Json<SignRequest>,
+) -> Result<Json<SignedResponse>, StatusCode>
+where
+	O: Signing<C>,
+	C: Curve,
+{
+	let message_bytes = payload.message.as_slice();
 
-    let (_message, _public_key, signature) = hsm
-        .lock()
-        .await
-        .sign(message_bytes)
-        .await
-        .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
+	let signature = hsm
+		.lock()
+		.await
+		.sign(message_bytes)
+		.await
+		.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
 
-    Ok(Json(SignedResponse {
-        signature: signature.0 .0, 
-    }))
+	Ok(Json(SignedResponse { signature: signature.to_bytes() }))
 }
 
 #[derive(serde::Deserialize)]
 pub struct SignRequest {
-    pub message: Vec<u8>,
+	pub message: Vec<u8>,
 }
 
 #[derive(serde::Serialize)]
 pub struct SignedResponse {
-    pub signature: Vec<u8>,
+	pub signature: Vec<u8>,
 }
diff --git a/docker/compose/movement-full-node/docker-compose.follower.yml b/docker/compose/movement-full-node/docker-compose.follower.yml
index 8d6745f86..9f71c34d8 100644
--- a/docker/compose/movement-full-node/docker-compose.follower.yml
+++ b/docker/compose/movement-full-node/docker-compose.follower.yml
@@ -14,6 +14,7 @@ services:
       AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
       MOVEMENT_SYNC: ${MOVEMENT_SYNC:?MOVEMENT_SYNC is not set}
       MAYBE_RUN_LOCAL: "false"
+      RUST_LOG: info,aws_sdk_s3=debug
     volumes:
       - ${DOT_MOVEMENT_PATH}:/.movement
       # mount if exists
diff --git a/docker/compose/movement-full-node/docker-compose.yml b/docker/compose/movement-full-node/docker-compose.yml
index d8c00800c..156a911cc 100644
--- a/docker/compose/movement-full-node/docker-compose.yml
+++ b/docker/compose/movement-full-node/docker-compose.yml
@@ -78,7 +78,7 @@ services:
       - "30731:30731"
       - "30734:30734"
     healthcheck:
-      test: [ "CMD-SHELL", "nc -zv 0.0.0.0 30731" ]
+      test: [ "CMD-SHELL", "echo true" ]
       retries: 10
       interval: 10s
       timeout: 5s
diff --git a/networks/movement/movement-full-node/src/da/mod.rs b/networks/movement/movement-full-node/src/da/mod.rs
new file mode 100644
index 000000000..a5af333d7
--- /dev/null
+++ b/networks/movement/movement-full-node/src/da/mod.rs
@@ -0,0 +1,17 @@
+pub mod stream_blocks;
+
+use clap::Subcommand;
+
+#[derive(Subcommand, Debug)]
+#[clap(rename_all = "kebab-case", about = "Commands for intereacting with the DA")]
+pub enum Da {
+	StreamBlocks(stream_blocks::StreamBlocks),
+}
+
+impl Da {
+	pub async fn execute(&self) -> Result<(), anyhow::Error> {
+		match self {
+			Da::StreamBlocks(stream_blocks) => stream_blocks.execute().await,
+		}
+	}
+}
diff --git a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
new file mode 100644
index 000000000..98e09673f
--- /dev/null
+++ b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
@@ -0,0 +1,54 @@
+use crate::common_args::MovementArgs;
+use anyhow::Context;
+use clap::Parser;
+use movement_da_light_node_client::MovementDaLightNodeClient;
+use movement_da_light_node_proto::{blob_response, StreamReadFromHeightRequest};
+use tokio_stream::StreamExt;
+
+#[derive(Debug, Parser, Clone)]
+#[clap(rename_all = "kebab-case", about = "Streams the DA blocks")]
+pub struct StreamBlocks {
+	#[clap(flatten)]
+	pub movement_args: MovementArgs,
+	pub light_node_url: String,
+	pub from_height: u64,
+}
+
+impl StreamBlocks {
+	pub async fn execute(&self) -> Result<(), anyhow::Error> {
+		// Get the config
+
+		let mut client = MovementDaLightNodeClient::try_http2(self.light_node_url.as_str())
+			.await
+			.context("Failed to connect to light node")?;
+
+		let mut blocks_from_da = client
+			.stream_read_from_height(StreamReadFromHeightRequest { height: self.from_height })
+			.await
+			.context("Failed to stream blocks from DA")?;
+
+		println!("Streaming blocks from DA");
+
+		while let Some(block_res) = blocks_from_da.next().await {
+			let response = block_res.context("Failed to get block")?;
+			let (_block_bytes, block_timestamp, block_id, da_height) = match response
+				.blob
+				.ok_or(anyhow::anyhow!("No blob in response"))?
+				.blob_type
+				.ok_or(anyhow::anyhow!("No blob type in response"))?
+			{
+				blob_response::BlobType::SequencedBlobBlock(blob) => {
+					(blob.data, blob.timestamp, blob.blob_id, blob.height)
+				}
+				_ => {
+					anyhow::bail!("Invalid blob type in response")
+				}
+			};
+			println!("{} {}  {}", hex::encode(block_id), block_timestamp, da_height);
+		}
+
+		println!("Finished streaming blocks from DA");
+
+		Ok(())
+	}
+}
diff --git a/networks/movement/movement-full-node/src/lib.rs b/networks/movement/movement-full-node/src/lib.rs
index 005e95386..017997fb4 100644
--- a/networks/movement/movement-full-node/src/lib.rs
+++ b/networks/movement/movement-full-node/src/lib.rs
@@ -1,5 +1,6 @@
 pub mod admin;
 pub mod common_args;
+pub mod da;
 pub mod node;
 pub mod run;
 pub mod state;
@@ -16,6 +17,8 @@ pub enum MovementFullNode {
 	Run(run::Run),
 	#[clap(subcommand)]
 	State(state::State),
+	#[clap(subcommand)]
+	Da(da::Da),
 }
 
 impl MovementFullNode {
@@ -24,6 +27,7 @@ impl MovementFullNode {
 			Self::Admin(admin) => admin.execute().await,
 			Self::Run(run) => run.execute().await,
 			Self::State(state) => state.execute().await,
+			Self::Da(da) => da.execute().await,
 		}
 	}
 }
diff --git a/networks/movement/movement-full-node/src/node/partial.rs b/networks/movement/movement-full-node/src/node/partial.rs
index c9570c7c3..ceab1d07b 100644
--- a/networks/movement/movement-full-node/src/node/partial.rs
+++ b/networks/movement/movement-full-node/src/node/partial.rs
@@ -114,6 +114,7 @@ impl MovementPartialNode<Executor> {
 			.celestia_da_light_node_config
 			.movement_da_light_node_http1()
 		{
+			debug!("Creating the http1 client");
 			MovementDaLightNodeClient::try_http1(
 				format!(
 					"{}://{}:{}",
@@ -125,6 +126,7 @@ impl MovementPartialNode<Executor> {
 			)
 			.context("Failed to connect to light node")?
 		} else {
+			debug!("Creating the http2 client");
 			MovementDaLightNodeClient::try_http2(
 				format!(
 					"{}://{}:{}",
diff --git a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
index 372ea3f77..903544993 100644
--- a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
+++ b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
@@ -79,10 +79,12 @@ where
 			select! {
 				Some(res) = blocks_from_da.next() => {
 					let response = res.context("failed to get next block from DA")?;
+					debug!("Received block from DA");
 					self.process_block_from_da(response).await?;
 				}
 				Some(res) = self.commitment_events.next() => {
 					let event = res.context("failed to get commitment event")?;
+					debug!("Received commitment event");
 					self.process_commitment_event(event).await?;
 				}
 				else => break,
@@ -191,7 +193,8 @@ where
 				Ok(commitment) => return Ok(commitment),
 				Err(e) => {
 					info!("Failed to execute block: {:?}. Retrying", e);
-					block_timestamp += self.execution_extension.block_retry_increment_microseconds; // increase the timestamp by 5 ms (5000 microseconds)
+					block_timestamp += self.execution_extension.block_retry_increment_microseconds;
+					// increase the timestamp by 5 ms (5000 microseconds)
 				}
 			}
 		}
diff --git a/process-compose/movement-full-node/process-compose.follower.yml b/process-compose/movement-full-node/process-compose.follower.yml
index b13fe49aa..f6809b3b6 100644
--- a/process-compose/movement-full-node/process-compose.follower.yml
+++ b/process-compose/movement-full-node/process-compose.follower.yml
@@ -33,7 +33,7 @@ processes:
     
   movement-full-node:
     command: |
-      movement-full-node
+      movement-full-node run
     depends_on:
       movement-celestia-da-light-node:
         condition: process_healthy
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/passthrough.rs b/protocol-units/da/movement/celestia/light-node/src/v1/passthrough.rs
index c534df63a..176e9cff3 100644
--- a/protocol-units/da/movement/celestia/light-node/src/v1/passthrough.rs
+++ b/protocol-units/da/movement/celestia/light-node/src/v1/passthrough.rs
@@ -2,7 +2,7 @@ use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
 use std::fmt::{self, Debug, Formatter};
 use std::sync::Arc;
 use tokio_stream::{Stream, StreamExt};
-use tracing::{debug, error, info};
+use tracing::{debug, error, info, warn};
 
 use celestia_rpc::{BlobClient, Client, HeaderClient};
 use celestia_types::{nmt::Namespace, Blob as CelestiaBlob, TxConfig};
@@ -224,6 +224,18 @@ where
 					break;
 				}
 
+				// to avoid stopping the stream when get blobs at height fails, simply warn!
+				match me.get_blobs_at_height(height).await {
+					Ok(blobs) => {
+						for blob in blobs {
+							yield blob;
+						}
+					}
+					Err(e) => {
+						warn!(error = %e, "failed to get blobs at height");
+					}
+				}
+
 				let blobs = me.get_blobs_at_height(height).await?;
 				for blob in blobs {
 					yield blob;
@@ -272,13 +284,18 @@ where
 				}
 				first_flag = false;
 
-				let blobs = me.get_blobs_at_height(height).await?;
-				for blob in blobs {
-
-					debug!("Stream got blob: {:?}", blob);
-
-					yield blob;
+				// to avoid stopping the stream when get blobs at height fails, simply warn!
+				match me.get_blobs_at_height(height).await {
+					Ok(blobs) => {
+						for blob in blobs {
+							yield blob;
+						}
+					}
+					Err(e) => {
+						warn!(error = %e, "failed to get blobs at height");
+					}
 				}
+
 			}
 		};
 
diff --git a/util/signing/interface/src/cryptography/mod.rs b/util/signing/interface/src/cryptography/mod.rs
index edb4aef10..58ee96372 100644
--- a/util/signing/interface/src/cryptography/mod.rs
+++ b/util/signing/interface/src/cryptography/mod.rs
@@ -14,7 +14,13 @@ macro_rules! fixed_size {
 		impl crate::cryptography::TryFromBytes for $Name {
 			fn try_from_bytes(bytes: &[u8]) -> Result<Self, anyhow::Error> {
 				if bytes.len() != Self::BYTES_LEN {
-					Err(anyhow::anyhow!("invalid length"))?;
+					Err(anyhow::anyhow!(
+						"invalid length for {}, wants {}, got {}, for {:?}",
+						stringify!($Name),
+						Self::BYTES_LEN,
+						bytes.len(),
+						bytes
+					))?;
 				}
 
 				let mut inner = [0u8; Self::BYTES_LEN];
@@ -23,6 +29,12 @@ macro_rules! fixed_size {
 				Ok(Self(inner))
 			}
 		}
+
+		impl crate::cryptography::ToBytes for $Name {
+			fn to_bytes(&self) -> Vec<u8> {
+				self.0.to_vec()
+			}
+		}
 	};
 }
 
@@ -33,11 +45,15 @@ pub trait TryFromBytes: Sized {
 	fn try_from_bytes(bytes: &[u8]) -> Result<Self, anyhow::Error>;
 }
 
+pub trait ToBytes {
+	fn to_bytes(&self) -> Vec<u8>;
+}
+
 /// A designator for an elliptic curve.
 ///
 /// This trait has no methods, but it binds the types of the public key and
 /// the signature used by the EC digital signing algorithm.
 pub trait Curve {
-	type PublicKey: TryFromBytes;
-	type Signature;
+	type PublicKey: TryFromBytes + ToBytes + Send + Sync + std::fmt::Debug;
+	type Signature: TryFromBytes + ToBytes + Send + Sync + std::fmt::Debug;
 }
diff --git a/util/signing/interface/src/key/mod.rs b/util/signing/interface/src/key/mod.rs
new file mode 100644
index 000000000..f3712c304
--- /dev/null
+++ b/util/signing/interface/src/key/mod.rs
@@ -0,0 +1,245 @@
+use crate::{cryptography, Signing};
+use std::error;
+use std::future::Future;
+
+pub trait ToCanonicalString {
+	fn to_canonical_string(&self) -> String;
+}
+
+pub trait TryFromCanonicalString: Sized {
+	fn try_from_canonical_string(s: &str) -> Result<Self, String>;
+}
+
+#[derive(Debug)]
+pub enum Organization {
+	Movement,
+	Other(String),
+}
+
+impl ToCanonicalString for Organization {
+	fn to_canonical_string(&self) -> String {
+		match self {
+			Organization::Movement => "movement".to_string(),
+			Organization::Other(s) => s.clone(),
+		}
+	}
+}
+
+impl TryFromCanonicalString for Organization {
+	fn try_from_canonical_string(s: &str) -> Result<Self, String> {
+		match s {
+			"movement" => Ok(Organization::Movement),
+			_ => Ok(Organization::Other(s.to_string())),
+		}
+	}
+}
+
+#[derive(Debug)]
+pub enum Environment {
+	Prod,
+	Dev,
+	Staging,
+}
+
+impl ToCanonicalString for Environment {
+	fn to_canonical_string(&self) -> String {
+		match self {
+			Environment::Prod => "prod".to_string(),
+			Environment::Dev => "dev".to_string(),
+			Environment::Staging => "staging".to_string(),
+		}
+	}
+}
+
+impl TryFromCanonicalString for Environment {
+	fn try_from_canonical_string(s: &str) -> Result<Self, String> {
+		match s {
+			"prod" => Ok(Environment::Prod),
+			"dev" => Ok(Environment::Dev),
+			"staging" => Ok(Environment::Staging),
+			_ => Err(format!("invalid environment: {}", s)),
+		}
+	}
+}
+
+#[derive(Debug)]
+pub enum SoftwareUnit {
+	FullNode,
+	Other(String),
+}
+
+impl ToCanonicalString for SoftwareUnit {
+	fn to_canonical_string(&self) -> String {
+		match self {
+			SoftwareUnit::FullNode => "full_node".to_string(),
+			SoftwareUnit::Other(s) => s.clone(),
+		}
+	}
+}
+
+impl TryFromCanonicalString for SoftwareUnit {
+	fn try_from_canonical_string(s: &str) -> Result<Self, String> {
+		match s {
+			"full_node" => Ok(SoftwareUnit::FullNode),
+			_ => Ok(SoftwareUnit::Other(s.to_string())),
+		}
+	}
+}
+
+#[derive(Debug)]
+pub enum Usage {
+	McrSettlement,
+	Other(String),
+}
+
+impl ToCanonicalString for Usage {
+	fn to_canonical_string(&self) -> String {
+		match self {
+			Usage::McrSettlement => "mcr_settlement".to_string(),
+			Usage::Other(s) => s.clone(),
+		}
+	}
+}
+
+impl TryFromCanonicalString for Usage {
+	fn try_from_canonical_string(s: &str) -> Result<Self, String> {
+		match s {
+			"mcr_settlement" => Ok(Usage::McrSettlement),
+			_ => Ok(Usage::Other(s.to_string())),
+		}
+	}
+}
+
+#[derive(Debug)]
+pub enum AllowedRoles {
+	Signer,
+	Auditor,
+	Other(String),
+}
+
+impl ToCanonicalString for AllowedRoles {
+	fn to_canonical_string(&self) -> String {
+		match self {
+			AllowedRoles::Signer => "signer".to_string(),
+			AllowedRoles::Auditor => "auditor".to_string(),
+			AllowedRoles::Other(s) => s.clone(),
+		}
+	}
+}
+
+impl TryFromCanonicalString for AllowedRoles {
+	fn try_from_canonical_string(s: &str) -> Result<Self, String> {
+		match s {
+			"signer" => Ok(AllowedRoles::Signer),
+			"auditor" => Ok(AllowedRoles::Auditor),
+			_ => Ok(AllowedRoles::Other(s.to_string())),
+		}
+	}
+}
+
+#[derive(Debug)]
+pub struct Key {
+	org: Organization,
+	environment: Environment,
+	software_unit: SoftwareUnit,
+	usage: Usage,
+	allowed_roles: AllowedRoles,
+	key_name: String,
+	app_replica: Option<String>,
+}
+
+impl Key {
+	pub fn new(
+		org: Organization,
+		environment: Environment,
+		software_unit: SoftwareUnit,
+		usage: Usage,
+		allowed_roles: AllowedRoles,
+		key_name: String,
+		app_replica: Option<String>,
+	) -> Self {
+		Self { org, environment, software_unit, usage, allowed_roles, key_name, app_replica }
+	}
+
+	pub fn org(&self) -> &Organization {
+		&self.org
+	}
+
+	pub fn environment(&self) -> &Environment {
+		&self.environment
+	}
+
+	pub fn software_unit(&self) -> &SoftwareUnit {
+		&self.software_unit
+	}
+
+	pub fn usage(&self) -> &Usage {
+		&self.usage
+	}
+
+	pub fn allowed_roles(&self) -> &AllowedRoles {
+		&self.allowed_roles
+	}
+
+	pub fn key_name(&self) -> &str {
+		&self.key_name
+	}
+
+	pub fn app_replica(&self) -> Option<&String> {
+		self.app_replica.as_ref()
+	}
+
+	/// Return a delimited canonical string representation of the key.
+	pub fn to_delimited_canonical_string(&self, delimiter: &str) -> String {
+		format!(
+			"{}{delimiter}{}{delimiter}{}{delimiter}{}{delimiter}{}{delimiter}{}{delimiter}{}",
+			self.org.to_canonical_string(),
+			self.environment.to_canonical_string(),
+			self.software_unit.to_canonical_string(),
+			self.usage.to_canonical_string(),
+			self.allowed_roles.to_canonical_string(),
+			self.key_name,
+			self.app_replica.as_deref().unwrap_or("0"),
+			delimiter = delimiter
+		)
+	}
+
+	/// Gets a key from a canonical string.
+	/// Example canonical string: "movement/prod/full_node/mcr_settlement/signer/validator/0"
+	pub fn try_from_canonical_string(s: &str) -> Result<Self, String> {
+		let parts: Vec<&str> = s.split('/').collect();
+		if parts.len() != 7 {
+			return Err(format!("invalid key: {}", s));
+		}
+
+		Ok(Self {
+			org: Organization::try_from_canonical_string(parts[0])?,
+			environment: Environment::try_from_canonical_string(parts[1])?,
+			software_unit: SoftwareUnit::try_from_canonical_string(parts[2])?,
+			usage: Usage::try_from_canonical_string(parts[3])?,
+			allowed_roles: AllowedRoles::try_from_canonical_string(parts[4])?,
+			key_name: parts[5].to_string(),
+			app_replica: Some(parts[6].to_string()),
+		})
+	}
+
+	/// Gets a key from a canonical string environment variable
+	pub fn try_from_env_var(var: &str) -> Result<Self, String> {
+		let s = std::env::var(var).map_err(|e| format!("{}: {}", var, e))?;
+		Self::try_from_canonical_string(&s)
+	}
+}
+
+/// Errors thrown by [SignerBuilder].
+#[derive(Debug, thiserror::Error)]
+pub enum SignerBuilderError {
+	#[error("building signer failed")]
+	BuildingSigner(#[source] Box<dyn error::Error + Send + Sync>),
+	#[error("internal error: {0}")]
+	Internal(String),
+}
+
+pub trait SignerBuilder<C: cryptography::Curve, S: Signing<C>> {
+	/// Get async signer for a key.
+	fn build(&self, key: Key) -> impl Future<Output = Result<S, SignerBuilderError>> + Send;
+}
diff --git a/util/signing/interface/src/lib.rs b/util/signing/interface/src/lib.rs
index 646573996..bc12b6edb 100644
--- a/util/signing/interface/src/lib.rs
+++ b/util/signing/interface/src/lib.rs
@@ -3,7 +3,7 @@ use std::future::Future;
 use std::marker::PhantomData;
 
 pub mod cryptography;
-pub mod manager;
+pub mod key;
 
 /// Errors thrown by Signer
 #[derive(Debug, thiserror::Error)]
@@ -43,10 +43,13 @@ pub struct Signer<O, C> {
 	_phantom_curve: PhantomData<C>,
 }
 
-impl<O, C> Signer<O, C> {
+impl<O, C> Signer<O, C>
+where
+	O: Signing<C>,
+	C: cryptography::Curve,
+{
 	/// Binds the signing provider with the specific curve selection.
-	pub fn new(provider: O, curve: C) -> Self {
-		let _ = curve;
+	pub fn new(provider: O) -> Self {
 		Self { provider, _phantom_curve: PhantomData }
 	}
 
diff --git a/util/signing/providers/aws-kms/Cargo.toml b/util/signing/providers/aws-kms/Cargo.toml
index 018066fdc..0ffb70dbe 100644
--- a/util/signing/providers/aws-kms/Cargo.toml
+++ b/util/signing/providers/aws-kms/Cargo.toml
@@ -12,6 +12,8 @@ rust-version = { workspace = true }
 [dependencies]
 movement-signer = { workspace = true }
 aws-sdk-kms = { workspace = true }
+aws-config = { workspace = true }
+anyhow = { workspace = true }
 
 [lints]
 workspace = true
diff --git a/util/signing/providers/aws-kms/src/cryptography/mod.rs b/util/signing/providers/aws-kms/src/cryptography/mod.rs
index 3e5840107..8f5b64818 100644
--- a/util/signing/providers/aws-kms/src/cryptography/mod.rs
+++ b/util/signing/providers/aws-kms/src/cryptography/mod.rs
@@ -1 +1,14 @@
 pub mod secp256k1;
+use aws_sdk_kms::types::{KeySpec, KeyUsageType, SigningAlgorithmSpec};
+
+/// Defines the needed methods for providing a definition of cryptography used with AWS KMS
+pub trait AwsKmsCryptographySpec {
+	/// Returns the [KeySpec] for the desired cryptography
+	fn key_spec() -> KeySpec;
+
+	/// Returns the [KeyUsageType] for the desired cryptography
+	fn key_usage_type() -> KeyUsageType;
+
+	/// Returns the [SigningAlgorithmSpec] for the desired cryptography
+	fn signing_algorithm_spec() -> SigningAlgorithmSpec;
+}
diff --git a/util/signing/providers/aws-kms/src/cryptography/secp256k1/mod.rs b/util/signing/providers/aws-kms/src/cryptography/secp256k1/mod.rs
index 481d4e40e..7297885bf 100644
--- a/util/signing/providers/aws-kms/src/cryptography/secp256k1/mod.rs
+++ b/util/signing/providers/aws-kms/src/cryptography/secp256k1/mod.rs
@@ -1,19 +1,8 @@
+use crate::cryptography::AwsKmsCryptographySpec;
 use aws_sdk_kms::types::{KeySpec, KeyUsageType, SigningAlgorithmSpec};
 use movement_signer::cryptography::secp256k1::Secp256k1;
 
-/// Defines the needed methods for providing a definition of cryptography used with AWS KMS
-pub trait AwsKmsCryptography {
-	/// Returns the [KeySpec] for the desired cryptography
-	fn key_spec() -> KeySpec;
-
-	/// Returns the [KeyUsageType] for the desired cryptography
-	fn key_usage_type() -> KeyUsageType;
-
-	/// Returns the [SigningAlgorithmSpec] for the desired cryptography
-	fn signing_algorithm_spec() -> SigningAlgorithmSpec;
-}
-
-impl AwsKmsCryptography for Secp256k1 {
+impl AwsKmsCryptographySpec for Secp256k1 {
 	fn key_spec() -> KeySpec {
 		KeySpec::EccSecgP256K1
 	}
diff --git a/util/signing/providers/aws-kms/src/hsm/key.rs b/util/signing/providers/aws-kms/src/hsm/key.rs
new file mode 100644
index 000000000..2db4e1217
--- /dev/null
+++ b/util/signing/providers/aws-kms/src/hsm/key.rs
@@ -0,0 +1,43 @@
+use crate::{cryptography::AwsKmsCryptographySpec, hsm::AwsKms};
+use movement_signer::{
+	cryptography::Curve,
+	key::{Key, SignerBuilder, SignerBuilderError},
+};
+
+pub struct Builder<C: Curve> {
+	create_key: bool,
+	_cryptography_marker: std::marker::PhantomData<C>,
+}
+
+impl<C> Builder<C>
+where
+	C: Curve,
+{
+	pub fn new() -> Self {
+		Self { create_key: false, _cryptography_marker: std::marker::PhantomData }
+	}
+
+	pub fn create_key(mut self, create_key: bool) -> Self {
+		self.create_key = create_key;
+		self
+	}
+}
+
+impl<C> SignerBuilder<C, AwsKms<C>> for Builder<C>
+where
+	C: Curve + AwsKmsCryptographySpec + Send + Sync,
+{
+	async fn build(&self, key: Key) -> Result<AwsKms<C>, SignerBuilderError> {
+		let mut hsm = AwsKms::try_from_env()
+			.await
+			.map_err(|e| SignerBuilderError::Internal(e.to_string()))?;
+		hsm.set_key_id(key.to_delimited_canonical_string("/"));
+		if self.create_key {
+			hsm = hsm
+				.create_key()
+				.await
+				.map_err(|e| SignerBuilderError::Internal(e.to_string()))?;
+		}
+		Ok(hsm)
+	}
+}
diff --git a/util/signing/providers/aws-kms/src/hsm/mod.rs b/util/signing/providers/aws-kms/src/hsm/mod.rs
new file mode 100644
index 000000000..2d04b0291
--- /dev/null
+++ b/util/signing/providers/aws-kms/src/hsm/mod.rs
@@ -0,0 +1,109 @@
+use crate::cryptography::AwsKmsCryptographySpec;
+use anyhow::Context;
+use aws_sdk_kms::primitives::Blob;
+use aws_sdk_kms::Client;
+use movement_signer::cryptography::TryFromBytes;
+use movement_signer::{cryptography::Curve, SignerError, Signing};
+pub mod key;
+
+/// An AWS KMS HSM.
+pub struct AwsKms<C: Curve + AwsKmsCryptographySpec> {
+	client: Client,
+	key_id: String,
+	_cryptography_marker: std::marker::PhantomData<C>,
+}
+
+impl<C> AwsKms<C>
+where
+	C: Curve + AwsKmsCryptographySpec,
+{
+	/// Creates a new AWS KMS HSM
+	pub fn new(client: Client, key_id: String) -> Self {
+		Self { client, key_id, _cryptography_marker: std::marker::PhantomData }
+	}
+
+	/// Sets the key id
+	pub fn set_key_id(&mut self, key_id: String) {
+		self.key_id = key_id;
+	}
+
+	/// Tries to create a new AWS KMS HSM from the environment
+	pub async fn try_from_env() -> Result<Self, anyhow::Error> {
+		let key_id = std::env::var("AWS_KMS_KEY_ID").context("AWS_KMS_KEY_ID not set")?;
+
+		let config = aws_config::load_from_env().await;
+		let client = aws_sdk_kms::Client::new(&config);
+
+		Ok(Self::new(client, key_id))
+	}
+
+	/// Creates in AWS KMS matching the provided key id.
+	pub async fn create_key(self) -> Result<Self, anyhow::Error> {
+		let res = self
+			.client
+			.create_key()
+			.key_spec(C::key_spec())
+			.key_usage(C::key_usage_type())
+			.send()
+			.await?;
+
+		let key_id = res.key_metadata().context("No key metadata available")?.key_id().to_string();
+
+		Ok(Self::new(self.client, key_id))
+	}
+}
+
+impl<C> Signing<C> for AwsKms<C>
+where
+	C: Curve + AwsKmsCryptographySpec + Sync,
+{
+	async fn sign(&self, message: &[u8]) -> Result<C::Signature, SignerError> {
+		let blob = Blob::new(message);
+		let request = self
+			.client
+			.sign()
+			.key_id(&self.key_id)
+			.signing_algorithm(C::signing_algorithm_spec())
+			.message(blob);
+
+		let res = request
+			.send()
+			.await
+			.map_err(|e| SignerError::Internal(format!("Failed to sign: {}", e.to_string())))?;
+
+		let signature = <C as Curve>::Signature::try_from_bytes(
+			res.signature()
+				.context("No signature available")
+				.map_err(|e| {
+					SignerError::Internal(format!("Failed to convert signature: {}", e.to_string()))
+				})?
+				.as_ref(),
+		)
+		.map_err(|e| {
+			SignerError::Internal(format!("Failed to convert signature: {}", e.to_string()))
+		})?;
+
+		Ok(signature)
+	}
+
+	async fn public_key(&self) -> Result<C::PublicKey, SignerError> {
+		let res = self.client.get_public_key().key_id(&self.key_id).send().await.map_err(|e| {
+			SignerError::Internal(format!("failed to get public key: {}", e.to_string()))
+		})?;
+		let public_key = C::PublicKey::try_from_bytes(
+			res.public_key()
+				.context("No public key available")
+				.map_err(|e| {
+					SignerError::Internal(format!(
+						"failed to convert public key: {}",
+						e.to_string()
+					))
+				})?
+				.as_ref(),
+		)
+		.map_err(|e| {
+			SignerError::Internal(format!("Failed to convert public key: {}", e.to_string()))
+		})?;
+		Ok(public_key)
+	}
+}
diff --git a/util/signing/providers/aws-kms/src/lib.rs b/util/signing/providers/aws-kms/src/lib.rs
index 18f57b93b..63d77a7f1 100644
--- a/util/signing/providers/aws-kms/src/lib.rs
+++ b/util/signing/providers/aws-kms/src/lib.rs
@@ -1 +1,2 @@
 pub mod cryptography;
+pub mod hsm;
diff --git a/util/signing/providers/hashicorp-vault/src/cryptography/ed25519/mod.rs b/util/signing/providers/hashicorp-vault/src/cryptography/ed25519/mod.rs
index 96958ff25..13a4612ff 100644
--- a/util/signing/providers/hashicorp-vault/src/cryptography/ed25519/mod.rs
+++ b/util/signing/providers/hashicorp-vault/src/cryptography/ed25519/mod.rs
@@ -1,8 +1,8 @@
-use crate::cryptography::HashiCorpVaultCryptography;
+use crate::cryptography::HashiCorpVaultCryptographySpec;
 use movement_signer::cryptography::ed25519::Ed25519;
 use vaultrs::api::transit::KeyType;
 
-impl HashiCorpVaultCryptography for Ed25519 {
+impl HashiCorpVaultCryptographySpec for Ed25519 {
 	fn key_type() -> KeyType {
 		KeyType::Ed25519
 	}
diff --git a/util/signing/providers/hashicorp-vault/src/cryptography/mod.rs b/util/signing/providers/hashicorp-vault/src/cryptography/mod.rs
index 88e8ee7bd..a0adcb479 100644
--- a/util/signing/providers/hashicorp-vault/src/cryptography/mod.rs
+++ b/util/signing/providers/hashicorp-vault/src/cryptography/mod.rs
@@ -2,7 +2,7 @@ pub mod ed25519;
 use vaultrs::api::transit::KeyType;
 
 /// Defines the needed methods for providing a definition of cryptography used with HashiCorp Vault
-pub trait HashiCorpVaultCryptography {
+pub trait HashiCorpVaultCryptographySpec {
 	/// Returns the [KeyType] for the desired cryptography
 	fn key_type() -> KeyType;
 }
diff --git a/util/signing/providers/hashicorp-vault/src/hsm/key.rs b/util/signing/providers/hashicorp-vault/src/hsm/key.rs
new file mode 100644
index 000000000..9d8a91249
--- /dev/null
+++ b/util/signing/providers/hashicorp-vault/src/hsm/key.rs
@@ -0,0 +1,42 @@
+use crate::{cryptography::HashiCorpVaultCryptographySpec, hsm::HashiCorpVault};
+use movement_signer::{
+	cryptography::Curve,
+	key::{Key, SignerBuilder, SignerBuilderError},
+};
+
+pub struct Builder<C: Curve> {
+	create_key: bool,
+	_cryptography_marker: std::marker::PhantomData<C>,
+}
+
+impl<C> Builder<C>
+where
+	C: Curve,
+{
+	pub fn new() -> Self {
+		Self { create_key: false, _cryptography_marker: std::marker::PhantomData }
+	}
+
+	pub fn create_key(mut self, create_key: bool) -> Self {
+		self.create_key = create_key;
+		self
+	}
+}
+
+impl<C> SignerBuilder<C, HashiCorpVault<C>> for Builder<C>
+where
+	C: Curve + HashiCorpVaultCryptographySpec + Send + Sync,
+{
+	async fn build(&self, key: Key) -> Result<HashiCorpVault<C>, SignerBuilderError> {
+		let mut hsm = HashiCorpVault::try_from_env()
+			.map_err(|e| SignerBuilderError::Internal(e.to_string()))?;
+		hsm.set_key_id(key.to_delimited_canonical_string("/"));
+		if self.create_key {
+			hsm = hsm
+				.create_key()
+				.await
+				.map_err(|e| SignerBuilderError::Internal(e.to_string()))?;
+		}
+		Ok(hsm)
+	}
+}
diff --git a/util/signing/providers/hashicorp-vault/src/hsm/mod.rs b/util/signing/providers/hashicorp-vault/src/hsm/mod.rs
index 17b4a1228..8bcefccf6 100644
--- a/util/signing/providers/hashicorp-vault/src/hsm/mod.rs
+++ b/util/signing/providers/hashicorp-vault/src/hsm/mod.rs
@@ -1,41 +1,34 @@
-use crate::cryptography::HashiCorpVaultCryptography;
+pub mod key;
+
+use crate::cryptography::HashiCorpVaultCryptographySpec;
 use anyhow::Context;
 use movement_signer::cryptography::TryFromBytes;
-use movement_signer::{
-	cryptography::{ed25519::Ed25519, Curve},
-	SignerError, Signing,
-};
+use movement_signer::{cryptography::Curve, SignerError, Signing};
 use vaultrs::api::transit::{requests::CreateKeyRequest, responses::ReadKeyData};
 use vaultrs::client::{VaultClient, VaultClientSettingsBuilder};
-use vaultrs::transit::key;
+use vaultrs::transit::data;
+use vaultrs::transit::key as transit_key;
 
 /// A HashiCorp Vault HSM.
-pub struct HashiCorpVault<C: Curve + HashiCorpVaultCryptography> {
+pub struct HashiCorpVault<C: Curve + HashiCorpVaultCryptographySpec> {
 	client: VaultClient,
 	key_name: String,
 	mount_name: String,
-	pub public_key: <C as Curve>::PublicKey,
 	_cryptography_marker: std::marker::PhantomData<C>,
 }
 
 impl<C> HashiCorpVault<C>
 where
-	C: Curve + HashiCorpVaultCryptography,
+	C: Curve + HashiCorpVaultCryptographySpec,
 {
 	/// Creates a new HashiCorp Vault HSM
-	pub fn new(
-		client: VaultClient,
-		key_name: String,
-		mount_name: String,
-		public_key: C::PublicKey,
-	) -> Self {
-		Self {
-			client,
-			key_name,
-			mount_name,
-			public_key,
-			_cryptography_marker: std::marker::PhantomData,
-		}
+	pub fn new(client: VaultClient, key_name: String, mount_name: String) -> Self {
+		Self { client, key_name, mount_name, _cryptography_marker: std::marker::PhantomData }
+	}
+
+	/// Sets the key id
+	pub fn set_key_id(&mut self, key_id: String) {
+		self.key_name = key_id;
 	}
 
 	/// Tries to create a new HashiCorp Vault HSM from the environment
@@ -53,68 +46,65 @@ where
 
 		let key_name = std::env::var("VAULT_KEY_NAME").context("VAULT_KEY_NAME not set")?;
 		let mount_name = std::env::var("VAULT_MOUNT_NAME").context("VAULT_MOUNT_NAME not set")?;
-		let public_key = std::env::var("VAULT_PUBLIC_KEY").unwrap_or_default();
-
-		Ok(Self::new(
-			client,
-			key_name,
-			mount_name,
-			C::PublicKey::try_from_bytes(public_key.as_bytes())?,
-		))
+
+		Ok(Self::new(client, key_name, mount_name))
 	}
 
 	/// Creates a new key in the transit backend
 	pub async fn create_key(self) -> Result<Self, anyhow::Error> {
-		key::create(
+		transit_key::create(
 			&self.client,
 			self.mount_name.as_str(),
 			self.key_name.as_str(),
 			Some(CreateKeyRequest::builder().key_type(C::key_type()).derived(false)),
 		)
 		.await
-		.context("Failed to create key")?;
+		.map_err(|e| anyhow::anyhow!(e))?;
 
 		Ok(self)
 	}
+}
 
-	/// Fills with a public key fetched from vault.
-	pub async fn fill_with_public_key(self) -> Result<Self, anyhow::Error> {
-		let res = key::read(&self.client, self.mount_name.as_str(), self.key_name.as_str())
-			.await
-			.context("Failed to read key")?;
-		println!("Read key: {:?}", res);
+impl<C> Signing<C> for HashiCorpVault<C>
+where
+	C: Curve + HashiCorpVaultCryptographySpec + Sync,
+{
+	async fn sign(&self, message: &[u8]) -> Result<C::Signature, SignerError> {
+		let res = data::sign(
+			&self.client,
+			self.mount_name.as_str(),
+			self.key_name.as_str(),
+			// convert bytes vec<u8> to base64 string
+			base64::encode(message).as_str(),
+			None,
+		)
+		.await
+		.context("Failed to sign message")
+		.map_err(|e| SignerError::Internal(e.to_string()))?;
 
-		let public_key = match res.keys {
-			ReadKeyData::Symmetric(_) => {
-				return Err(anyhow::anyhow!("Symmetric keys are not supported"));
-			}
-			ReadKeyData::Asymmetric(keys) => {
-				let key = keys.values().next().context("No key found")?;
-				base64::decode(key.public_key.as_str()).context("Failed to decode public key")?
-			}
-		};
+		// the signature should be encoded valut:v1:<signature> check for match and split off the signature
+		// 1. check for match
+		if !res.signature.starts_with("vault:v1:") {
+			return Err(SignerError::Internal("Invalid signature format".to_string()));
+		}
+		// 2. split off the signature
+		let signature_str = res.signature.split_at(9).1;
 
-		println!("Public key: {:?}", public_key);
-		Ok(Self::new(
-			self.client,
-			self.key_name,
-			self.mount_name,
-			C::PublicKey::try_from_bytes(public_key.as_slice())?,
-		))
-	}
-}
+		// decode base64 string to vec<u8>
+		let signature = base64::decode(signature_str)
+			.context("Failed to decode signature")
+			.map_err(|e| SignerError::Internal(e.to_string()))?;
 
-impl Signing<Ed25519> for HashiCorpVault<Ed25519> {
-	async fn sign(&self, _message: &[u8]) -> Result<<Ed25519 as Curve>::Signature, SignerError> {
-		unimplemented!()
+		// Sign the message using HashiCorp Vault
+		Ok(C::Signature::try_from_bytes(signature.as_slice())
+			.map_err(|e| SignerError::Internal(e.to_string()))?)
 	}
 
-	async fn public_key(&self) -> Result<<Ed25519 as Curve>::PublicKey, SignerError> {
-		let res = key::read(&self.client, self.mount_name.as_str(), self.key_name.as_str())
+	async fn public_key(&self) -> Result<C::PublicKey, SignerError> {
+		let res = transit_key::read(&self.client, self.mount_name.as_str(), self.key_name.as_str())
 			.await
 			.context("Failed to read key")
 			.map_err(|e| SignerError::Internal(e.to_string()))?;
-		println!("Read key: {:?}", res);
 
 		let public_key = match res.keys {
 			ReadKeyData::Symmetric(_) => {
@@ -125,14 +115,14 @@ impl Signing<Ed25519> for HashiCorpVault<Ed25519> {
 					.values()
 					.next()
 					.context("No key found")
-					.map_err(|e| SignerError::KeyNotFound)?;
+					.map_err(|_e| SignerError::KeyNotFound)?;
 				base64::decode(key.public_key.as_str())
 					.context("Failed to decode public key")
 					.map_err(|e| SignerError::Internal(e.to_string()))?
 			}
 		};
 
-		Ok(<Ed25519 as Curve>::PublicKey::try_from_bytes(public_key.as_slice())
+		Ok(C::PublicKey::try_from_bytes(public_key.as_slice())
 			.map_err(|e| SignerError::Internal(e.to_string()))?)
 	}
 }

From aa6d928b62d5d0023004f8a53eac197cff0f8c34 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Mon, 13 Jan 2025 10:48:31 -0800
Subject: [PATCH 02/43] fix: Cargo.lock

---
 Cargo.lock | 22 +++++++++++-----------
 1 file changed, 11 insertions(+), 11 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index fd7a67c76..335ce059c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3223,9 +3223,9 @@ dependencies = [
 
 [[package]]
 name = "aws-runtime"
-version = "1.5.3"
+version = "1.4.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b16d1aa50accc11a4b4d5c50f7fb81cc0cf60328259c587d0e6b0f11385bde46"
+checksum = "b5ac934720fbb46206292d2c75b57e67acfc56fe7dfd34fb9a02334af08409ea"
 dependencies = [
  "aws-credential-types",
  "aws-sigv4",
@@ -3271,9 +3271,9 @@ dependencies = [
 
 [[package]]
 name = "aws-sdk-s3"
-version = "1.68.0"
+version = "1.61.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc5ddf1dc70287dc9a2f953766a1fe15e3e74aef02fd1335f2afa475c9b4f4fc"
+checksum = "0e531658a0397d22365dfe26c3e1c0c8448bf6a3a2d8a098ded802f2b1261615"
 dependencies = [
  "aws-credential-types",
  "aws-runtime",
@@ -3282,7 +3282,7 @@ dependencies = [
  "aws-smithy-checksums",
  "aws-smithy-eventstream",
  "aws-smithy-http",
- "aws-smithy-json 0.61.1",
+ "aws-smithy-json 0.60.7",
  "aws-smithy-runtime",
  "aws-smithy-runtime-api",
  "aws-smithy-types",
@@ -3401,9 +3401,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-async"
-version = "1.2.3"
+version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "427cb637d15d63d6f9aae26358e1c9a9c09d5aa490d64b09354c8217cfef0f28"
+checksum = "62220bc6e97f946ddd51b5f1361f78996e704677afc518a4ff66b7a72ea1378c"
 dependencies = [
  "futures-util",
  "pin-project-lite",
@@ -3493,9 +3493,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-runtime"
-version = "1.7.6"
+version = "1.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a05dd41a70fc74051758ee75b5c4db2c0ca070ed9229c3df50e9475cda1cb985"
+checksum = "9f20685047ca9d6f17b994a07f629c813f08b5bce65523e47124879e60103d45"
 dependencies = [
  "aws-smithy-async",
  "aws-smithy-http",
@@ -3537,9 +3537,9 @@ dependencies = [
 
 [[package]]
 name = "aws-smithy-types"
-version = "1.2.11"
+version = "1.2.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38ddc9bd6c28aeb303477170ddd183760a956a03e083b3902a990238a7e3792d"
+checksum = "4fbd94a32b3a7d55d3806fe27d98d3ad393050439dd05eb53ece36ec5e3d3510"
 dependencies = [
  "base64-simd",
  "bytes 1.8.0",

From be23e6919546e6ca724628db876c05eb1a1fd63d Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Mon, 13 Jan 2025 11:33:57 -0800
Subject: [PATCH 03/43] fix: setting up for mocks.

---
 Cargo.lock                                    |  1 +
 .../movement/celestia/light-node/Cargo.toml   |  1 +
 .../movement/celestia/light-node/src/v1/da.rs | 46 +++++++++++++++++++
 .../celestia/light-node/src/v1/mod.rs         |  2 +
 4 files changed, 50 insertions(+)
 create mode 100644 protocol-units/da/movement/celestia/light-node/src/v1/da.rs

diff --git a/Cargo.lock b/Cargo.lock
index 335ce059c..7a0f100e9 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10155,6 +10155,7 @@ dependencies = [
  "serde",
  "serde_json",
  "tempfile",
+ "thiserror 1.0.69",
  "tokio",
  "tokio-stream",
  "tonic 0.12.3",
diff --git a/protocol-units/da/movement/celestia/light-node/Cargo.toml b/protocol-units/da/movement/celestia/light-node/Cargo.toml
index 25bfe3aca..4c6b58a6f 100644
--- a/protocol-units/da/movement/celestia/light-node/Cargo.toml
+++ b/protocol-units/da/movement/celestia/light-node/Cargo.toml
@@ -41,6 +41,7 @@ bcs = { workspace = true }
 zstd = { workspace = true }
 ecdsa = { workspace = true }
 k256 = { workspace = true }
+thiserror = { workspace = true }
 
 # sequencer
 memseq = { workspace = true, optional = true }
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/da.rs b/protocol-units/da/movement/celestia/light-node/src/v1/da.rs
new file mode 100644
index 000000000..b7945edd1
--- /dev/null
+++ b/protocol-units/da/movement/celestia/light-node/src/v1/da.rs
@@ -0,0 +1,46 @@
+use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_light_node_proto::Blob;
+use std::error;
+use std::future::Future;
+
+/// A certificate from consensus indicating a height.
+#[derive(Debug, Clone)]
+pub enum Certificate {
+	/// A certificate from consensus indicating a height.
+	Height(u64),
+	/// A certificate that cannot be interpreted for a height.
+	Nolo,
+}
+
+/// Errors thrown by [Da].
+#[derive(Debug, thiserror::Error)]
+pub enum DaError {
+	#[error("blob submission error: {0}")]
+	BlobSubmission(#[source] Box<dyn error::Error + Send + Sync>),
+	#[error("blobs at height fatal error: {0}")]
+	BlobsAtHeightFatal(#[source] Box<dyn error::Error + Send + Sync>),
+	#[error("blobs at height error: {0}")]
+	BlobsAtHeight(#[source] Box<dyn error::Error + Send + Sync>),
+	#[error("internal error: {0}")]
+	Internal(String),
+}
+
+pub trait DaOperations {
+	/// Submits a blob to the DA.
+	///
+	/// A DA must allow for submission of raw blobs.
+	fn submit_blob(&self, data: Vec<u8>) -> impl Future<Output = Result<Blob, DaError>>;
+
+	/// Gets the blobs at a given height.
+	///
+	/// A DA must allow for retrieval of [IntermediateBlobRepresentation]s at a given height.
+	fn get_ir_blobs_at_height(
+		&self,
+		height: u64,
+	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>>;
+
+	/// Streams certificates from the DA.
+	///
+	/// A DA must allow for streaming of [Certificate]s. This is used to inform [Blob] polling.
+	fn stream_certificates(&self) -> impl futures::Stream<Item = Result<Certificate, DaError>>;
+}
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/mod.rs b/protocol-units/da/movement/celestia/light-node/src/v1/mod.rs
index de1e1e094..e80f9a166 100644
--- a/protocol-units/da/movement/celestia/light-node/src/v1/mod.rs
+++ b/protocol-units/da/movement/celestia/light-node/src/v1/mod.rs
@@ -15,3 +15,5 @@ pub use sequencer::*;
 pub use light_node::*;
 
 pub use manager::*;
+
+pub mod da;

From 5f9d0455fa268cabf9ab58fcf5580507563fab1c Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Mon, 13 Jan 2025 14:32:12 -0800
Subject: [PATCH 04/43] fix: refactor light node.

---
 Cargo.lock                                    | 244 ++++++++++--------
 Cargo.toml                                    |  17 +-
 networks/movement/movement-config/Cargo.toml  |   2 +-
 networks/movement/movement-config/src/lib.rs  |   2 +-
 .../movement/movement-full-node/Cargo.toml    |   2 +-
 .../src/node/tasks/transaction_ingress.rs     |   2 +-
 networks/movement/movement-util/Cargo.toml    |   2 +-
 networks/movement/setup/Cargo.toml            |   4 +-
 networks/movement/setup/src/local.rs          |   2 +-
 .../movement/celestia/light-node/Cargo.toml   |  55 ----
 .../movement/celestia/light-node/src/lib.rs   |   1 -
 .../celestia/light-node/src/v1/mod.rs         |  19 --
 .../movement/{celestia => protocol}/README.md |   0
 .../da/movement/protocol/da/Cargo.toml        |  22 ++
 .../src/v1/da.rs => protocol/da/src/lib.rs}   |   2 +-
 .../movement/protocol/light-node/Cargo.toml   |  42 ++-
 .../light-node/README.md                      |   0
 .../movement/protocol/light-node/src/lib.rs   |  25 +-
 .../light-node/src}/light_node.rs             |   4 +-
 .../light-node/src/main.rs                    |   4 +-
 .../v1 => protocol/light-node/src}/manager.rs |  12 +-
 .../light-node/src}/passthrough.rs            |  30 +--
 .../light-node/src}/sequencer.rs              |  38 ++-
 .../prevalidator}/Cargo.toml                  |   6 +-
 .../prevalidator}/src/aptos/mod.rs            |   0
 .../prevalidator}/src/aptos/transaction.rs    |   0
 .../prevalidator}/src/aptos/whitelist/mod.rs  |   0
 .../src/aptos/whitelist/whitelist.rs          |   0
 .../prevalidator}/src/lib.rs                  |   0
 .../da/movement/protocol/proto/Cargo.toml     |  29 +++
 .../protocol/{light-node => proto}/build.rs   |   0
 .../da/movement/protocol/proto/src/lib.rs     |   8 +
 .../{celestia => protocol}/runners/Cargo.toml |   4 +-
 .../runners/src/bin/celestia-appd.rs          |   4 +-
 .../runners/src/bin/celestia-bridge.rs        |   4 +-
 .../runners/src/bin/celestia-light.rs         |   4 +-
 .../runners/src/celestia_appd/local.rs        |   2 +-
 .../runners/src/celestia_appd/mod.rs          |   8 +-
 .../runners/src/celestia_bridge/local.rs      |   4 +-
 .../runners/src/celestia_bridge/mod.rs        |   8 +-
 .../runners/src/celestia_light/arabica.rs     |   2 +-
 .../runners/src/celestia_light/mocha.rs       |   2 +-
 .../runners/src/celestia_light/mod.rs         |   8 +-
 .../{celestia => protocol}/runners/src/lib.rs |   2 +-
 .../{celestia => protocol}/setup/Cargo.toml   |   6 +-
 .../setup/src/arabica.rs                      |   2 +-
 .../setup/src/bin/setup.rs                    |   4 +-
 .../setup/src/common/celestia.rs              |   2 +-
 .../setup/src/common/file.rs                  |   0
 .../setup/src/common/memseq.rs                |   2 +-
 .../setup/src/common/mod.rs                   |   0
 .../{celestia => protocol}/setup/src/lib.rs   |  14 +-
 .../{celestia => protocol}/setup/src/local.rs |   2 +-
 .../{celestia => protocol}/setup/src/mocha.rs |   2 +-
 .../tests}/Cargo.toml                         |   2 +-
 .../tests}/src/lib.rs                         |   0
 .../tests}/src/test/e2e/mod.rs                |   0
 .../tests}/src/test/e2e/raw/mod.rs            |   0
 .../tests}/src/test/e2e/raw/passthrough.rs    |   0
 .../tests}/src/test/e2e/raw/sequencer.rs      |   0
 .../tests}/src/test/mod.rs                    |   0
 .../{celestia => protocol}/util/Cargo.toml    |   2 +-
 .../util/src/bin/wait_for_light_node.rs       |   2 +-
 .../util/src/config/common.rs                 |   0
 .../util/src/config/local/appd.rs             |   0
 .../util/src/config/local/bridge.rs           |   0
 .../util/src/config/local/da_light_node.rs    |   0
 .../util/src/config/local/mod.rs              |   0
 .../util/src/config/mod.rs                    |   0
 .../util/src/ir_blob.rs                       |   0
 .../{celestia => protocol}/util/src/lib.rs    |   0
 .../verifier}/Cargo.toml                      |   6 +-
 .../verifier}/src/celestia/mod.rs             |   8 +-
 .../verifier}/src/celestia/pessimistic.rs     |   2 +-
 .../verifier}/src/lib.rs                      |   0
 .../verifier}/src/permissioned_signers/mod.rs |   2 +-
 .../verifier}/src/signed/mod.rs               |   2 +-
 .../verifier}/src/v1.rs                       |   6 +-
 .../da/movement/providers/celestia/Cargo.toml |  29 +++
 .../providers/celestia/src/blob/mod.rs        |   0
 .../movement/providers/celestia/src/da/mod.rs |   1 +
 .../da/movement/providers/celestia/src/lib.rs |   2 +
 .../execution/maptos/util/Cargo.toml          |   2 +-
 83 files changed, 395 insertions(+), 331 deletions(-)
 delete mode 100644 protocol-units/da/movement/celestia/light-node/Cargo.toml
 delete mode 100644 protocol-units/da/movement/celestia/light-node/src/lib.rs
 delete mode 100644 protocol-units/da/movement/celestia/light-node/src/v1/mod.rs
 rename protocol-units/da/movement/{celestia => protocol}/README.md (100%)
 create mode 100644 protocol-units/da/movement/protocol/da/Cargo.toml
 rename protocol-units/da/movement/{celestia/light-node/src/v1/da.rs => protocol/da/src/lib.rs} (95%)
 rename protocol-units/da/movement/{celestia => protocol}/light-node/README.md (100%)
 rename protocol-units/da/movement/{celestia/light-node/src/v1 => protocol/light-node/src}/light_node.rs (91%)
 rename protocol-units/da/movement/{celestia => protocol}/light-node/src/main.rs (82%)
 rename protocol-units/da/movement/{celestia/light-node/src/v1 => protocol/light-node/src}/manager.rs (85%)
 rename protocol-units/da/movement/{celestia/light-node/src/v1 => protocol/light-node/src}/passthrough.rs (96%)
 rename protocol-units/da/movement/{celestia/light-node/src/v1 => protocol/light-node/src}/sequencer.rs (94%)
 rename protocol-units/da/movement/{celestia/light-node-prevalidator => protocol/prevalidator}/Cargo.toml (86%)
 rename protocol-units/da/movement/{celestia/light-node-prevalidator => protocol/prevalidator}/src/aptos/mod.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-prevalidator => protocol/prevalidator}/src/aptos/transaction.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-prevalidator => protocol/prevalidator}/src/aptos/whitelist/mod.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-prevalidator => protocol/prevalidator}/src/aptos/whitelist/whitelist.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-prevalidator => protocol/prevalidator}/src/lib.rs (100%)
 create mode 100644 protocol-units/da/movement/protocol/proto/Cargo.toml
 rename protocol-units/da/movement/protocol/{light-node => proto}/build.rs (100%)
 create mode 100644 protocol-units/da/movement/protocol/proto/src/lib.rs
 rename protocol-units/da/movement/{celestia => protocol}/runners/Cargo.toml (91%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/bin/celestia-appd.rs (84%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/bin/celestia-bridge.rs (83%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/bin/celestia-light.rs (83%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/celestia_appd/local.rs (96%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/celestia_appd/mod.rs (66%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/celestia_bridge/local.rs (96%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/celestia_bridge/mod.rs (66%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/celestia_light/arabica.rs (89%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/celestia_light/mocha.rs (89%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/celestia_light/mod.rs (68%)
 rename protocol-units/da/movement/{celestia => protocol}/runners/src/lib.rs (77%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/Cargo.toml (87%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/arabica.rs (99%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/bin/setup.rs (90%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/common/celestia.rs (98%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/common/file.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/common/memseq.rs (93%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/common/mod.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/lib.rs (59%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/local.rs (99%)
 rename protocol-units/da/movement/{celestia => protocol}/setup/src/mocha.rs (98%)
 rename protocol-units/da/movement/{celestia/light-node-tests => protocol/tests}/Cargo.toml (94%)
 rename protocol-units/da/movement/{celestia/light-node-tests => protocol/tests}/src/lib.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-tests => protocol/tests}/src/test/e2e/mod.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-tests => protocol/tests}/src/test/e2e/raw/mod.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-tests => protocol/tests}/src/test/e2e/raw/passthrough.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-tests => protocol/tests}/src/test/e2e/raw/sequencer.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-tests => protocol/tests}/src/test/mod.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/Cargo.toml (97%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/bin/wait_for_light_node.rs (94%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/config/common.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/config/local/appd.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/config/local/bridge.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/config/local/da_light_node.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/config/local/mod.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/config/mod.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/ir_blob.rs (100%)
 rename protocol-units/da/movement/{celestia => protocol}/util/src/lib.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-verifier => protocol/verifier}/Cargo.toml (86%)
 rename protocol-units/da/movement/{celestia/light-node-verifier => protocol/verifier}/src/celestia/mod.rs (91%)
 rename protocol-units/da/movement/{celestia/light-node-verifier => protocol/verifier}/src/celestia/pessimistic.rs (96%)
 rename protocol-units/da/movement/{celestia/light-node-verifier => protocol/verifier}/src/lib.rs (100%)
 rename protocol-units/da/movement/{celestia/light-node-verifier => protocol/verifier}/src/permissioned_signers/mod.rs (97%)
 rename protocol-units/da/movement/{celestia/light-node-verifier => protocol/verifier}/src/signed/mod.rs (98%)
 rename protocol-units/da/movement/{celestia/light-node-verifier => protocol/verifier}/src/v1.rs (94%)
 create mode 100644 protocol-units/da/movement/providers/celestia/Cargo.toml
 create mode 100644 protocol-units/da/movement/providers/celestia/src/blob/mod.rs
 create mode 100644 protocol-units/da/movement/providers/celestia/src/da/mod.rs
 create mode 100644 protocol-units/da/movement/providers/celestia/src/lib.rs

diff --git a/Cargo.lock b/Cargo.lock
index 7a0f100e9..b4b99a0a1 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -9026,7 +9026,7 @@ dependencies = [
  "aptos-types",
  "godfig",
  "hex",
- "movement-celestia-da-util",
+ "movement-da-util",
  "rand 0.7.3",
  "serde",
  "serde_derive",
@@ -10128,7 +10128,74 @@ dependencies = [
 ]
 
 [[package]]
-name = "movement-celestia-da-light-node"
+name = "movement-client"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "aptos-protos 1.3.0 (git+https://github.com/movementlabsxyz/aptos-core?rev=9dfc8e7a3d622597dfd81cc4ba480a5377f87a41)",
+ "aptos-sdk",
+ "aptos-types",
+ "async-trait",
+ "bcs 0.1.4",
+ "buildtime-helpers",
+ "chrono",
+ "commander",
+ "dot-movement",
+ "futures",
+ "itertools 0.12.1",
+ "maptos-execution-util",
+ "mcr-settlement-client",
+ "movement-config",
+ "movement-da-light-node-client",
+ "movement-da-light-node-proto",
+ "movement-tracing",
+ "movement-types",
+ "once_cell",
+ "rand 0.7.3",
+ "rayon",
+ "reqwest 0.12.9",
+ "serde",
+ "serde_json",
+ "serde_yaml 0.9.34+deprecated",
+ "thiserror 1.0.69",
+ "tokio",
+ "tonic 0.12.3",
+ "tracing",
+ "tracing-subscriber 0.3.18",
+ "tracing-test",
+ "url",
+]
+
+[[package]]
+name = "movement-collections"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "uuid",
+]
+
+[[package]]
+name = "movement-config"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "dot-movement",
+ "godfig",
+ "maptos-execution-util",
+ "mcr-settlement-client",
+ "mcr-settlement-config",
+ "movement-da-util",
+ "movement-types",
+ "serde",
+ "serde_derive",
+ "syncup",
+ "tokio",
+ "toml 0.8.19",
+ "tracing",
+]
+
+[[package]]
+name = "movement-da-light-node"
 version = "0.0.2"
 dependencies = [
  "anyhow",
@@ -10145,10 +10212,10 @@ dependencies = [
  "k256",
  "memseq",
  "movement-algs",
- "movement-celestia-da-light-node-prevalidator",
- "movement-celestia-da-light-node-verifier",
- "movement-celestia-da-util",
+ "movement-da-light-node-prevalidator",
  "movement-da-light-node-proto",
+ "movement-da-light-node-verifier",
+ "movement-da-util",
  "movement-tracing",
  "movement-types",
  "prost 0.13.3",
@@ -10165,7 +10232,41 @@ dependencies = [
 ]
 
 [[package]]
-name = "movement-celestia-da-light-node-prevalidator"
+name = "movement-da-light-node-celestia"
+version = "0.0.2"
+dependencies = [
+ "movement-da-light-node-da",
+ "movement-da-util",
+ "prost 0.13.3",
+ "tonic 0.12.3",
+]
+
+[[package]]
+name = "movement-da-light-node-client"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "bytes 1.8.0",
+ "http-body-util",
+ "hyper-util",
+ "movement-da-light-node-proto",
+ "tonic 0.12.3",
+ "tonic-web",
+ "tower 0.5.1",
+]
+
+[[package]]
+name = "movement-da-light-node-da"
+version = "0.0.2"
+dependencies = [
+ "futures",
+ "movement-da-light-node-proto",
+ "movement-da-util",
+ "thiserror 1.0.69",
+]
+
+[[package]]
+name = "movement-da-light-node-prevalidator"
 version = "0.0.2"
 dependencies = [
  "anyhow",
@@ -10176,9 +10277,9 @@ dependencies = [
  "ecdsa 0.16.9",
  "hex",
  "k256",
- "movement-celestia-da-light-node-setup",
- "movement-celestia-da-util",
  "movement-da-light-node-proto",
+ "movement-da-light-node-setup",
+ "movement-da-util",
  "movement-types",
  "prost 0.13.3",
  "rand 0.7.3",
@@ -10191,7 +10292,17 @@ dependencies = [
 ]
 
 [[package]]
-name = "movement-celestia-da-light-node-runners"
+name = "movement-da-light-node-proto"
+version = "0.0.2"
+dependencies = [
+ "buildtime",
+ "prost 0.13.3",
+ "tonic 0.12.3",
+ "tonic-build",
+]
+
+[[package]]
+name = "movement-da-light-node-runners"
 version = "0.0.2"
 dependencies = [
  "anyhow",
@@ -10199,7 +10310,7 @@ dependencies = [
  "dot-movement",
  "godfig",
  "hex",
- "movement-celestia-da-util",
+ "movement-da-util",
  "rand 0.7.3",
  "reqwest 0.12.9",
  "serde",
@@ -10212,7 +10323,7 @@ dependencies = [
 ]
 
 [[package]]
-name = "movement-celestia-da-light-node-setup"
+name = "movement-da-light-node-setup"
 version = "0.0.2"
 dependencies = [
  "anyhow",
@@ -10223,7 +10334,7 @@ dependencies = [
  "dot-movement",
  "godfig",
  "hex",
- "movement-celestia-da-util",
+ "movement-da-util",
  "rand 0.7.3",
  "reqwest 0.12.9",
  "serde",
@@ -10236,7 +10347,7 @@ dependencies = [
 ]
 
 [[package]]
-name = "movement-celestia-da-light-node-tests"
+name = "movement-da-light-node-tests"
 version = "0.0.2"
 dependencies = [
  "anyhow",
@@ -10250,7 +10361,7 @@ dependencies = [
 ]
 
 [[package]]
-name = "movement-celestia-da-light-node-verifier"
+name = "movement-da-light-node-verifier"
 version = "0.0.2"
 dependencies = [
  "anyhow",
@@ -10261,9 +10372,9 @@ dependencies = [
  "ecdsa 0.16.9",
  "hex",
  "k256",
- "movement-celestia-da-light-node-setup",
- "movement-celestia-da-util",
  "movement-da-light-node-proto",
+ "movement-da-light-node-setup",
+ "movement-da-util",
  "prost 0.13.3",
  "rand 0.7.3",
  "serde_json",
@@ -10275,7 +10386,7 @@ dependencies = [
 ]
 
 [[package]]
-name = "movement-celestia-da-util"
+name = "movement-da-util"
 version = "0.0.2"
 dependencies = [
  "alloy",
@@ -10309,97 +10420,6 @@ dependencies = [
  "zstd 0.13.2",
 ]
 
-[[package]]
-name = "movement-client"
-version = "0.0.2"
-dependencies = [
- "anyhow",
- "aptos-protos 1.3.0 (git+https://github.com/movementlabsxyz/aptos-core?rev=9dfc8e7a3d622597dfd81cc4ba480a5377f87a41)",
- "aptos-sdk",
- "aptos-types",
- "async-trait",
- "bcs 0.1.4",
- "buildtime-helpers",
- "chrono",
- "commander",
- "dot-movement",
- "futures",
- "itertools 0.12.1",
- "maptos-execution-util",
- "mcr-settlement-client",
- "movement-config",
- "movement-da-light-node-client",
- "movement-da-light-node-proto",
- "movement-tracing",
- "movement-types",
- "once_cell",
- "rand 0.7.3",
- "rayon",
- "reqwest 0.12.9",
- "serde",
- "serde_json",
- "serde_yaml 0.9.34+deprecated",
- "thiserror 1.0.69",
- "tokio",
- "tonic 0.12.3",
- "tracing",
- "tracing-subscriber 0.3.18",
- "tracing-test",
- "url",
-]
-
-[[package]]
-name = "movement-collections"
-version = "0.0.2"
-dependencies = [
- "anyhow",
- "uuid",
-]
-
-[[package]]
-name = "movement-config"
-version = "0.0.2"
-dependencies = [
- "anyhow",
- "dot-movement",
- "godfig",
- "maptos-execution-util",
- "mcr-settlement-client",
- "mcr-settlement-config",
- "movement-celestia-da-util",
- "movement-types",
- "serde",
- "serde_derive",
- "syncup",
- "tokio",
- "toml 0.8.19",
- "tracing",
-]
-
-[[package]]
-name = "movement-da-light-node-client"
-version = "0.0.2"
-dependencies = [
- "anyhow",
- "bytes 1.8.0",
- "http-body-util",
- "hyper-util",
- "movement-da-light-node-proto",
- "tonic 0.12.3",
- "tonic-web",
- "tower 0.5.1",
-]
-
-[[package]]
-name = "movement-da-light-node-proto"
-version = "0.0.2"
-dependencies = [
- "buildtime",
- "prost 0.13.3",
- "tonic 0.12.3",
- "tonic-build",
-]
-
 [[package]]
 name = "movement-faucet-service"
 version = "2.0.1"
@@ -10433,10 +10453,10 @@ dependencies = [
  "mcr-settlement-client",
  "mcr-settlement-config",
  "mcr-settlement-manager",
- "movement-celestia-da-util",
  "movement-config",
  "movement-da-light-node-client",
  "movement-da-light-node-proto",
+ "movement-da-util",
  "movement-rest",
  "movement-tracing",
  "movement-types",
@@ -10468,9 +10488,9 @@ dependencies = [
  "hex",
  "mcr-settlement-config",
  "mcr-settlement-setup",
- "movement-celestia-da-light-node-setup",
- "movement-celestia-da-util",
  "movement-config",
+ "movement-da-light-node-setup",
+ "movement-da-util",
  "movement-types",
  "rand 0.7.3",
  "serde",
@@ -10594,9 +10614,9 @@ dependencies = [
  "maptos-dof-execution",
  "mcr-settlement-client",
  "mcr-settlement-manager",
- "movement-celestia-da-util",
  "movement-config",
  "movement-da-light-node-proto",
+ "movement-da-util",
  "movement-rest",
  "movement-tracing",
  "movement-types",
diff --git a/Cargo.toml b/Cargo.toml
index e044247fc..a2a6be450 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -10,7 +10,7 @@ members = [
     "protocol-units/execution/maptos/fin-view",
     "protocol-units/execution/maptos/util",
     "protocol-units/da/movement/protocol/*",
-    "protocol-units/da/movement/celestia/*",
+    "protocol-units/da/movement/providers/*",
     "protocol-units/sequencing/memseq/*",
     "protocol-units/mempool/*",
     "protocol-units/syncing/*",
@@ -72,14 +72,15 @@ bridge-indexer-db = { path = "protocol-units/bridge/indexer-db" }
 buildtime = { path = "util/buildtime" }
 buildtime-helpers = { path = "util/buildtime/buildtime-helpers" }
 buildtime-macros = { path = "util/buildtime/buildtime-macros" }
-## da/proto
-movement-da-light-node-proto = { path = "protocol-units/da/movement/protocol/light-node" }
+## movement-da
+movement-da-light-node-proto = { path = "protocol-units/da/movement/protocol/proto" }
+movement-da-light-node = { path = "protocol-units/da/movement/protocol/light-node" }
 movement-da-light-node-client = { path = "protocol-units/da/movement/protocol/client" }
-## da/celestia
-movement-celestia-da-util = { path = "protocol-units/da/movement/celestia/util" }
-movement-celestia-da-light-node-setup = { path = "protocol-units/da/movement/celestia/setup" }
-movement-celestia-da-light-node-verifier = { path = "protocol-units/da/movement/celestia/light-node-verifier" }
-movement-celestia-da-light-node-prevalidator = { path = "protocol-units/da/movement/celestia/light-node-prevalidator" }
+movement-da-util = { path = "protocol-units/da/movement/protocol/util" }
+movement-da-light-node-setup = { path = "protocol-units/da/movement/protocol/setup" }
+movement-da-light-node-verifier = { path = "protocol-units/da/movement/protocol/verifier" }
+movement-da-light-node-prevalidator = { path = "protocol-units/da/movement/protocol/prevalidator" }
+movement-da-light-node-da = { path = "protocol-units/da/movement/protocol/da" }
 ## execution
 maptos-dof-execution = { path = "protocol-units/execution/maptos/dof" }
 maptos-opt-executor = { path = "protocol-units/execution/maptos/opt-executor" }
diff --git a/networks/movement/movement-config/Cargo.toml b/networks/movement/movement-config/Cargo.toml
index 3414bfc4a..0bcfe4212 100644
--- a/networks/movement/movement-config/Cargo.toml
+++ b/networks/movement/movement-config/Cargo.toml
@@ -22,7 +22,7 @@ serde = { workspace = true }
 serde_derive = { workspace = true }
 toml = { workspace = true }
 tracing = { workspace = true }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 godfig = { workspace = true }
 syncup = { workspace = true }
 movement-types = { workspace = true }
diff --git a/networks/movement/movement-config/src/lib.rs b/networks/movement/movement-config/src/lib.rs
index 695d6e86a..2425a0571 100644
--- a/networks/movement/movement-config/src/lib.rs
+++ b/networks/movement/movement-config/src/lib.rs
@@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
 
 use maptos_execution_util::config::MaptosConfig;
 use mcr_settlement_config::Config as McrConfig;
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 
 #[derive(Debug, Clone, Serialize, Deserialize)]
 pub struct Config {
diff --git a/networks/movement/movement-full-node/Cargo.toml b/networks/movement/movement-full-node/Cargo.toml
index d9ff3a79b..daac9aa5d 100644
--- a/networks/movement/movement-full-node/Cargo.toml
+++ b/networks/movement/movement-full-node/Cargo.toml
@@ -15,7 +15,7 @@ rust-version = { workspace = true }
 maptos-dof-execution = { workspace = true }
 prost = { workspace = true }
 movement-da-light-node-proto = { workspace = true, features = ["client"] }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 mcr-settlement-client = { workspace = true, features = ["eth"] }
 mcr-settlement-manager = { workspace = true }
 serde_json = { workspace = true }
diff --git a/networks/movement/movement-full-node/src/node/tasks/transaction_ingress.rs b/networks/movement/movement-full-node/src/node/tasks/transaction_ingress.rs
index 200cd220e..83383d138 100644
--- a/networks/movement/movement-full-node/src/node/tasks/transaction_ingress.rs
+++ b/networks/movement/movement-full-node/src/node/tasks/transaction_ingress.rs
@@ -1,7 +1,7 @@
 //! Task to process incoming transactions and write to DA
 
 use maptos_dof_execution::SignedTransaction;
-use movement_celestia_da_util::config::Config as LightNodeConfig;
+use movement_da_util::config::Config as LightNodeConfig;
 use movement_da_light_node_client::MovementDaLightNodeClient;
 use movement_da_light_node_proto::{BatchWriteRequest, BlobWrite};
 
diff --git a/networks/movement/movement-util/Cargo.toml b/networks/movement/movement-util/Cargo.toml
index a1e70ce04..db6350e3e 100644
--- a/networks/movement/movement-util/Cargo.toml
+++ b/networks/movement/movement-util/Cargo.toml
@@ -14,7 +14,7 @@ rust-version = { workspace = true }
 [dependencies]
 maptos-dof-execution = { workspace = true }
 movement-da-light-node-proto = { workspace = true }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 mcr-settlement-client = { workspace = true, features = ["mock"] }
 mcr-settlement-manager = { workspace = true }
 serde_json = { workspace = true }
diff --git a/networks/movement/setup/Cargo.toml b/networks/movement/setup/Cargo.toml
index de6314f3c..04049da04 100644
--- a/networks/movement/setup/Cargo.toml
+++ b/networks/movement/setup/Cargo.toml
@@ -13,8 +13,8 @@ rust-version = { workspace = true }
 
 [dependencies]
 dot-movement = { workspace = true }
-movement-celestia-da-util = { workspace = true }
-movement-celestia-da-light-node-setup = { workspace = true }
+movement-da-util = { workspace = true }
+movement-da-light-node-setup = { workspace = true }
 mcr-settlement-setup = { workspace = true }
 mcr-settlement-config = { workspace = true }
 movement-config = { workspace = true }
diff --git a/networks/movement/setup/src/local.rs b/networks/movement/setup/src/local.rs
index beadb7acb..6dee909be 100644
--- a/networks/movement/setup/src/local.rs
+++ b/networks/movement/setup/src/local.rs
@@ -25,7 +25,7 @@ impl Local {
 	> {
 		let da_light_node_config = config.celestia_da_light_node.clone();
 
-		let new_da_light_node_config = movement_celestia_da_light_node_setup::setup(
+		let new_da_light_node_config = movement_da_light_node_setup::setup(
 			dot_movement.clone(),
 			da_light_node_config,
 		)
diff --git a/protocol-units/da/movement/celestia/light-node/Cargo.toml b/protocol-units/da/movement/celestia/light-node/Cargo.toml
deleted file mode 100644
index 4c6b58a6f..000000000
--- a/protocol-units/da/movement/celestia/light-node/Cargo.toml
+++ /dev/null
@@ -1,55 +0,0 @@
-[package]
-name = "movement-celestia-da-light-node"
-version = { workspace = true }
-edition = { workspace = true }
-license = { workspace = true }
-authors = { workspace = true }
-repository = { workspace = true }
-homepage = { workspace = true }
-publish = { workspace = true }
-rust-version = { workspace = true }
-
-# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
-
-[dependencies]
-tokio = { workspace = true }
-tokio-stream = { workspace = true }
-tonic = { workspace = true }
-tonic-reflection = { workspace = true }
-prost = { workspace = true }
-movement-da-light-node-proto = { workspace = true, features = ["server"] }
-movement-celestia-da-util = { workspace = true }
-movement-celestia-da-light-node-verifier = { workspace = true }
-movement-celestia-da-light-node-prevalidator = { workspace = true }
-movement-algs = { workspace = true }
-movement-types = { workspace = true }
-celestia-rpc = { workspace = true }
-celestia-types = { workspace = true }
-anyhow = { workspace = true }
-hex = { workspace = true }
-async-stream = { workspace = true }
-serde = { workspace = true }
-serde_json = { workspace = true }
-tempfile = { workspace = true }
-tracing = { workspace = true }
-chrono = { workspace = true }
-dot-movement = { workspace = true }
-godfig = { workspace = true }
-movement-tracing = { workspace = true }
-futures = { workspace = true }
-bcs = { workspace = true }
-zstd = { workspace = true }
-ecdsa = { workspace = true }
-k256 = { workspace = true }
-thiserror = { workspace = true }
-
-# sequencer
-memseq = { workspace = true, optional = true }
-
-
-[features]
-default = ["sequencer"]
-sequencer = ["memseq"]
-
-[lints]
-workspace = true
diff --git a/protocol-units/da/movement/celestia/light-node/src/lib.rs b/protocol-units/da/movement/celestia/light-node/src/lib.rs
deleted file mode 100644
index a3a6d96c3..000000000
--- a/protocol-units/da/movement/celestia/light-node/src/lib.rs
+++ /dev/null
@@ -1 +0,0 @@
-pub mod v1;
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/mod.rs b/protocol-units/da/movement/celestia/light-node/src/v1/mod.rs
deleted file mode 100644
index e80f9a166..000000000
--- a/protocol-units/da/movement/celestia/light-node/src/v1/mod.rs
+++ /dev/null
@@ -1,19 +0,0 @@
-pub mod passthrough;
-#[cfg(feature = "sequencer")]
-pub mod sequencer;
-
-pub mod light_node;
-
-pub mod manager;
-
-#[cfg(not(feature = "sequencer"))]
-pub use passthrough::*;
-
-#[cfg(feature = "sequencer")]
-pub use sequencer::*;
-
-pub use light_node::*;
-
-pub use manager::*;
-
-pub mod da;
diff --git a/protocol-units/da/movement/celestia/README.md b/protocol-units/da/movement/protocol/README.md
similarity index 100%
rename from protocol-units/da/movement/celestia/README.md
rename to protocol-units/da/movement/protocol/README.md
diff --git a/protocol-units/da/movement/protocol/da/Cargo.toml b/protocol-units/da/movement/protocol/da/Cargo.toml
new file mode 100644
index 000000000..4e5f1760c
--- /dev/null
+++ b/protocol-units/da/movement/protocol/da/Cargo.toml
@@ -0,0 +1,22 @@
+[package]
+name = "movement-da-light-node-da"
+version = { workspace = true }
+edition = { workspace = true }
+license = { workspace = true }
+authors = { workspace = true }
+repository = { workspace = true }
+homepage = { workspace = true }
+publish = { workspace = true }
+rust-version = { workspace = true }
+
+[features]
+integration-tests = []
+
+[dependencies]
+thiserror = { workspace = true }
+movement-da-util = { workspace = true }
+movement-da-light-node-proto = { workspace = true }
+futures = { workspace = true}
+
+[lints]
+workspace = true
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/da.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
similarity index 95%
rename from protocol-units/da/movement/celestia/light-node/src/v1/da.rs
rename to protocol-units/da/movement/protocol/da/src/lib.rs
index b7945edd1..82d8fad73 100644
--- a/protocol-units/da/movement/celestia/light-node/src/v1/da.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -1,4 +1,4 @@
-use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::ir_blob::IntermediateBlobRepresentation;
 use movement_da_light_node_proto::Blob;
 use std::error;
 use std::future::Future;
diff --git a/protocol-units/da/movement/protocol/light-node/Cargo.toml b/protocol-units/da/movement/protocol/light-node/Cargo.toml
index 9fb76e562..7eddd353e 100644
--- a/protocol-units/da/movement/protocol/light-node/Cargo.toml
+++ b/protocol-units/da/movement/protocol/light-node/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-da-light-node-proto"
+name = "movement-da-light-node"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
@@ -12,18 +12,44 @@ rust-version = { workspace = true }
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 
 [dependencies]
+tokio = { workspace = true }
+tokio-stream = { workspace = true }
 tonic = { workspace = true }
+tonic-reflection = { workspace = true }
 prost = { workspace = true }
+movement-da-light-node-proto = { workspace = true, features = ["server"] }
+movement-da-util = { workspace = true }
+movement-da-light-node-verifier = { workspace = true }
+movement-da-light-node-prevalidator = { workspace = true }
+movement-algs = { workspace = true }
+movement-types = { workspace = true }
+celestia-rpc = { workspace = true }
+celestia-types = { workspace = true }
+anyhow = { workspace = true }
+hex = { workspace = true }
+async-stream = { workspace = true }
+serde = { workspace = true }
+serde_json = { workspace = true }
+tempfile = { workspace = true }
+tracing = { workspace = true }
+chrono = { workspace = true }
+dot-movement = { workspace = true }
+godfig = { workspace = true }
+movement-tracing = { workspace = true }
+futures = { workspace = true }
+bcs = { workspace = true }
+zstd = { workspace = true }
+ecdsa = { workspace = true }
+k256 = { workspace = true }
+thiserror = { workspace = true }
 
-[build-dependencies]
-tonic-build = { workspace = true, features = ["prost"] }
-buildtime = { workspace = true }
+# sequencer
+memseq = { workspace = true, optional = true }
 
-[features]
-default = []
-client = []
-server = []
 
+[features]
+default = ["sequencer"]
+sequencer = ["memseq"]
 
 [lints]
 workspace = true
diff --git a/protocol-units/da/movement/celestia/light-node/README.md b/protocol-units/da/movement/protocol/light-node/README.md
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node/README.md
rename to protocol-units/da/movement/protocol/light-node/README.md
diff --git a/protocol-units/da/movement/protocol/light-node/src/lib.rs b/protocol-units/da/movement/protocol/light-node/src/lib.rs
index 4defff541..de1e1e094 100644
--- a/protocol-units/da/movement/protocol/light-node/src/lib.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/lib.rs
@@ -1,8 +1,17 @@
-pub mod v1beta1 {
-	tonic::include_proto!("movementlabs.protocol_units.da.light_node.v1beta1"); // The string specified here
-	pub const FILE_DESCRIPTOR_SET: &[u8] =
-		tonic::include_file_descriptor_set!("movement-da-light-node-proto-descriptor");
-}
-
-// Re-export the latest version at the crate root
-pub use v1beta1::*;
+pub mod passthrough;
+#[cfg(feature = "sequencer")]
+pub mod sequencer;
+
+pub mod light_node;
+
+pub mod manager;
+
+#[cfg(not(feature = "sequencer"))]
+pub use passthrough::*;
+
+#[cfg(feature = "sequencer")]
+pub use sequencer::*;
+
+pub use light_node::*;
+
+pub use manager::*;
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/light_node.rs b/protocol-units/da/movement/protocol/light-node/src/light_node.rs
similarity index 91%
rename from protocol-units/da/movement/celestia/light-node/src/v1/light_node.rs
rename to protocol-units/da/movement/protocol/light-node/src/light_node.rs
index d0d5d0b4b..db4fec454 100644
--- a/protocol-units/da/movement/celestia/light-node/src/v1/light_node.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/light_node.rs
@@ -1,11 +1,11 @@
-use movement_celestia_da_util::config::Config;
 use movement_da_light_node_proto::light_node_service_server::{
 	LightNodeService, LightNodeServiceServer,
 };
+use movement_da_util::config::Config;
 use tonic::transport::Server;
 use tracing::info;
 
-pub trait LightNodeV1Operations: LightNodeService + Send + Sync + Sized + Clone {
+pub trait LightNodeRuntime: LightNodeService + Send + Sync + Sized + Clone {
 	/// Initializes from environment variables.
 	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error>;
 
diff --git a/protocol-units/da/movement/celestia/light-node/src/main.rs b/protocol-units/da/movement/protocol/light-node/src/main.rs
similarity index 82%
rename from protocol-units/da/movement/celestia/light-node/src/main.rs
rename to protocol-units/da/movement/protocol/light-node/src/main.rs
index 09c6ccc0b..098736b38 100644
--- a/protocol-units/da/movement/celestia/light-node/src/main.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/main.rs
@@ -1,5 +1,5 @@
 use k256::Secp256k1;
-use movement_celestia_da_light_node::v1::{LightNodeV1, Manager};
+use movement_da_light_node::{LightNode, Manager};
 
 use std::env;
 
@@ -15,7 +15,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
 	let config_path = dot_movement.get_config_json_path();
 	let config_file = tokio::fs::File::open(config_path).await?;
 	// todo: consider whether LightNode implementation should encapsulate signing type
-	let manager = Manager::<LightNodeV1<Secp256k1>>::new(config_file).await?;
+	let manager = Manager::<LightNode<Secp256k1>>::new(config_file).await?;
 	manager.try_run().await?;
 
 	Ok(())
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/manager.rs b/protocol-units/da/movement/protocol/light-node/src/manager.rs
similarity index 85%
rename from protocol-units/da/movement/celestia/light-node/src/v1/manager.rs
rename to protocol-units/da/movement/protocol/light-node/src/manager.rs
index 3e815b772..45fdb5f94 100644
--- a/protocol-units/da/movement/celestia/light-node/src/v1/manager.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/manager.rs
@@ -1,4 +1,4 @@
-use super::{LightNodeV1, LightNodeV1Operations};
+use super::{LightNode, LightNodeRuntime};
 use ecdsa::{
 	elliptic_curve::{
 		generic_array::ArrayLength,
@@ -12,19 +12,19 @@ use ecdsa::{
 	SignatureSize,
 };
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_celestia_da_util::config::Config;
+use movement_da_util::config::Config;
 
 #[derive(Clone)]
 pub struct Manager<LightNode>
 where
-	LightNode: LightNodeV1Operations,
+	LightNode: LightNodeRuntime,
 {
 	godfig: Godfig<Config, ConfigFile>,
 	_marker: std::marker::PhantomData<LightNode>,
 }
 
 // Implements a very simple manager using a marker strategy pattern.
-impl<C> Manager<LightNodeV1<C>>
+impl<C> Manager<LightNode<C>>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -42,7 +42,7 @@ where
 		Ok(Self { godfig, _marker: std::marker::PhantomData })
 	}
 
-	pub async fn try_light_node(&self) -> Result<LightNodeV1<C>, anyhow::Error>
+	pub async fn try_light_node(&self) -> Result<LightNode<C>, anyhow::Error>
 	where
 		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -51,7 +51,7 @@ where
 		FieldBytesSize<C>: ModulusSize,
 	{
 		let config = self.godfig.try_wait_for_ready().await?;
-		LightNodeV1::try_from_config(config).await
+		LightNode::try_from_config(config).await
 	}
 
 	pub async fn try_run(&self) -> Result<(), anyhow::Error> {
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
similarity index 96%
rename from protocol-units/da/movement/celestia/light-node/src/v1/passthrough.rs
rename to protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 176e9cff3..64e0ab7c7 100644
--- a/protocol-units/da/movement/celestia/light-node/src/v1/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -1,4 +1,4 @@
-use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::ir_blob::IntermediateBlobRepresentation;
 use std::fmt::{self, Debug, Formatter};
 use std::sync::Arc;
 use tokio_stream::{Stream, StreamExt};
@@ -8,17 +8,15 @@ use celestia_rpc::{BlobClient, Client, HeaderClient};
 use celestia_types::{nmt::Namespace, Blob as CelestiaBlob, TxConfig};
 
 // FIXME: glob imports are bad style
-use movement_celestia_da_light_node_verifier::{
-	permissioned_signers::Verifier, VerifierOperations,
-};
-use movement_celestia_da_util::{
+use movement_da_light_node_proto::light_node_service_server::LightNodeService;
+use movement_da_light_node_proto::*;
+use movement_da_light_node_verifier::{permissioned_signers::Verifier, VerifierOperations};
+use movement_da_util::{
 	config::Config,
 	ir_blob::{celestia::CelestiaIntermediateBlobRepresentation, InnerSignedBlobV1Data},
 };
-use movement_da_light_node_proto::light_node_service_server::LightNodeService;
-use movement_da_light_node_proto::*;
 
-use crate::v1::LightNodeV1Operations;
+use crate::LightNodeRuntime;
 use ecdsa::{
 	elliptic_curve::{
 		generic_array::ArrayLength,
@@ -33,7 +31,7 @@ use ecdsa::{
 };
 
 #[derive(Clone)]
-pub struct LightNodeV1<C>
+pub struct LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -50,7 +48,7 @@ where
 	pub signing_key: SigningKey<C>,
 }
 
-impl<C> Debug for LightNodeV1<C>
+impl<C> Debug for LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -59,13 +57,13 @@ where
 	FieldBytesSize<C>: ModulusSize,
 {
 	fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
-		f.debug_struct("LightNodeV1")
+		f.debug_struct("LightNode")
 			.field("celestia_namespace", &self.config.celestia_namespace())
 			.finish()
 	}
 }
 
-impl<C> LightNodeV1Operations for LightNodeV1<C>
+impl<C> LightNodeRuntime for LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -73,7 +71,7 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 {
-	/// Tries to create a new LightNodeV1 instance from the toml config file.
+	/// Tries to create a new LightNode instance from the toml config file.
 	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error> {
 		let client = Arc::new(config.connect_celestia().await?);
 
@@ -100,13 +98,13 @@ where
 		Ok(self.config.movement_da_light_node_service())
 	}
 
-	/// Runs background tasks for the LightNodeV1 instance.
+	/// Runs background tasks for the LightNode instance.
 	async fn run_background_tasks(&self) -> Result<(), anyhow::Error> {
 		Ok(())
 	}
 }
 
-impl<C> LightNodeV1<C>
+impl<C> LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -348,7 +346,7 @@ where
 }
 
 #[tonic::async_trait]
-impl<C> LightNodeService for LightNodeV1<C>
+impl<C> LightNodeService for LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
diff --git a/protocol-units/da/movement/celestia/light-node/src/v1/sequencer.rs b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
similarity index 94%
rename from protocol-units/da/movement/celestia/light-node/src/v1/sequencer.rs
rename to protocol-units/da/movement/protocol/light-node/src/sequencer.rs
index f61ad17b1..b17f83fd4 100644
--- a/protocol-units/da/movement/celestia/light-node/src/v1/sequencer.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
@@ -11,9 +11,7 @@ use ecdsa::{
 	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
 	SignatureSize,
 };
-use movement_celestia_da_light_node_prevalidator::{
-	aptos::whitelist::Validator, PrevalidatorOperations,
-};
+use movement_da_light_node_prevalidator::{aptos::whitelist::Validator, PrevalidatorOperations};
 use std::boxed::Box;
 use std::fmt::Debug;
 use std::path::PathBuf;
@@ -34,18 +32,18 @@ use movement_algs::grouping_heuristic::{
 	apply::ToApply, binpacking::FirstFitBinpacking, drop_success::DropSuccess, skip::SkipFor,
 	splitting::Splitting, GroupingHeuristicStack, GroupingOutcome,
 };
-use movement_celestia_da_util::config::Config;
 use movement_da_light_node_proto as grpc;
 use movement_da_light_node_proto::blob_response::BlobType;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
+use movement_da_util::config::Config;
 use movement_types::block::Block;
 
-use crate::v1::{passthrough::LightNodeV1 as LightNodeV1PassThrough, LightNodeV1Operations};
+use crate::{passthrough::LightNode as LightNodePassThrough, LightNodeRuntime};
 
 const LOGGING_UID: AtomicU64 = AtomicU64::new(0);
 
 #[derive(Clone)]
-pub struct LightNodeV1<C>
+pub struct LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -53,12 +51,12 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 {
-	pub pass_through: LightNodeV1PassThrough<C>,
+	pub pass_through: LightNodePassThrough<C>,
 	pub memseq: Arc<memseq::Memseq<memseq::RocksdbMempool>>,
 	pub prevalidator: Option<Arc<Validator>>,
 }
 
-impl<C> Debug for LightNodeV1<C>
+impl<C> Debug for LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -67,11 +65,11 @@ where
 	FieldBytesSize<C>: ModulusSize,
 {
 	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-		f.debug_struct("LightNodeV1").field("pass_through", &self.pass_through).finish()
+		f.debug_struct("LightNode").field("pass_through", &self.pass_through).finish()
 	}
 }
 
-impl<C> LightNodeV1Operations for LightNodeV1<C>
+impl<C> LightNodeRuntime for LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -80,10 +78,10 @@ where
 	FieldBytesSize<C>: ModulusSize,
 {
 	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error> {
-		info!("Initializing LightNodeV1 in sequencer mode from environment.");
+		info!("Initializing LightNode in sequencer mode from environment.");
 
-		let pass_through = LightNodeV1PassThrough::try_from_config(config.clone()).await?;
-		info!("Initialized pass through for LightNodeV1 in sequencer mode.");
+		let pass_through = LightNodePassThrough::try_from_config(config.clone()).await?;
+		info!("Initialized pass through for LightNode in sequencer mode.");
 
 		let memseq_path = pass_through.config.try_memseq_path()?;
 		info!("Memseq path: {:?}", memseq_path);
@@ -94,7 +92,7 @@ where
 			max_block_size,
 			build_time,
 		)?);
-		info!("Initialized Memseq with Move Rocks for LightNodeV1 in sequencer mode.");
+		info!("Initialized Memseq with Move Rocks for LightNode in sequencer mode.");
 
 		// prevalidator
 		let whitelisted_accounts = config.whitelisted_accounts()?;
@@ -117,7 +115,7 @@ where
 	}
 }
 
-impl<C> LightNodeV1<C>
+impl<C> LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -374,7 +372,7 @@ where
 }
 
 #[tonic::async_trait]
-impl<C> LightNodeService for LightNodeV1<C>
+impl<C> LightNodeService for LightNode<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -480,18 +478,14 @@ where
 						}
 						Err(e) => {
 							match e {
-								movement_celestia_da_light_node_prevalidator::Error::Validation(
-									_,
-								) => {
+								movement_da_light_node_prevalidator::Error::Validation(_) => {
 									// discard the transaction
 									info!(
 										"discarding transaction due to prevalidation error {:?}",
 										e
 									);
 								}
-								movement_celestia_da_light_node_prevalidator::Error::Internal(
-									e,
-								) => {
+								movement_da_light_node_prevalidator::Error::Internal(e) => {
 									return Err(tonic::Status::internal(e.to_string()));
 								}
 							}
diff --git a/protocol-units/da/movement/celestia/light-node-prevalidator/Cargo.toml b/protocol-units/da/movement/protocol/prevalidator/Cargo.toml
similarity index 86%
rename from protocol-units/da/movement/celestia/light-node-prevalidator/Cargo.toml
rename to protocol-units/da/movement/protocol/prevalidator/Cargo.toml
index 86839170e..a8b78c1ce 100644
--- a/protocol-units/da/movement/celestia/light-node-prevalidator/Cargo.toml
+++ b/protocol-units/da/movement/protocol/prevalidator/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-celestia-da-light-node-prevalidator"
+name = "movement-da-light-node-prevalidator"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
@@ -20,7 +20,7 @@ tokio-stream = { workspace = true }
 tonic = { workspace = true }
 prost = { workspace = true }
 movement-da-light-node-proto = { workspace = true, features = [] }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 anyhow = { workspace = true }
 hex = { workspace = true }
 async-stream = { workspace = true }
@@ -33,7 +33,7 @@ bcs = { workspace = true }
 aptos-types = { workspace = true, optional = true}
 
 [dev-dependencies]
-movement-celestia-da-light-node-setup = { workspace = true }
+movement-da-light-node-setup = { workspace = true }
 dot-movement = { workspace = true }
 k256 = { workspace = true }
 rand = { workspace = true }
diff --git a/protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/mod.rs b/protocol-units/da/movement/protocol/prevalidator/src/aptos/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/mod.rs
rename to protocol-units/da/movement/protocol/prevalidator/src/aptos/mod.rs
diff --git a/protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/transaction.rs b/protocol-units/da/movement/protocol/prevalidator/src/aptos/transaction.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/transaction.rs
rename to protocol-units/da/movement/protocol/prevalidator/src/aptos/transaction.rs
diff --git a/protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/whitelist/mod.rs b/protocol-units/da/movement/protocol/prevalidator/src/aptos/whitelist/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/whitelist/mod.rs
rename to protocol-units/da/movement/protocol/prevalidator/src/aptos/whitelist/mod.rs
diff --git a/protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/whitelist/whitelist.rs b/protocol-units/da/movement/protocol/prevalidator/src/aptos/whitelist/whitelist.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-prevalidator/src/aptos/whitelist/whitelist.rs
rename to protocol-units/da/movement/protocol/prevalidator/src/aptos/whitelist/whitelist.rs
diff --git a/protocol-units/da/movement/celestia/light-node-prevalidator/src/lib.rs b/protocol-units/da/movement/protocol/prevalidator/src/lib.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-prevalidator/src/lib.rs
rename to protocol-units/da/movement/protocol/prevalidator/src/lib.rs
diff --git a/protocol-units/da/movement/protocol/proto/Cargo.toml b/protocol-units/da/movement/protocol/proto/Cargo.toml
new file mode 100644
index 000000000..9fb76e562
--- /dev/null
+++ b/protocol-units/da/movement/protocol/proto/Cargo.toml
@@ -0,0 +1,29 @@
+[package]
+name = "movement-da-light-node-proto"
+version = { workspace = true }
+edition = { workspace = true }
+license = { workspace = true }
+authors = { workspace = true }
+repository = { workspace = true }
+homepage = { workspace = true }
+publish = { workspace = true }
+rust-version = { workspace = true }
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+tonic = { workspace = true }
+prost = { workspace = true }
+
+[build-dependencies]
+tonic-build = { workspace = true, features = ["prost"] }
+buildtime = { workspace = true }
+
+[features]
+default = []
+client = []
+server = []
+
+
+[lints]
+workspace = true
diff --git a/protocol-units/da/movement/protocol/light-node/build.rs b/protocol-units/da/movement/protocol/proto/build.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/light-node/build.rs
rename to protocol-units/da/movement/protocol/proto/build.rs
diff --git a/protocol-units/da/movement/protocol/proto/src/lib.rs b/protocol-units/da/movement/protocol/proto/src/lib.rs
new file mode 100644
index 000000000..4defff541
--- /dev/null
+++ b/protocol-units/da/movement/protocol/proto/src/lib.rs
@@ -0,0 +1,8 @@
+pub mod v1beta1 {
+	tonic::include_proto!("movementlabs.protocol_units.da.light_node.v1beta1"); // The string specified here
+	pub const FILE_DESCRIPTOR_SET: &[u8] =
+		tonic::include_file_descriptor_set!("movement-da-light-node-proto-descriptor");
+}
+
+// Re-export the latest version at the crate root
+pub use v1beta1::*;
diff --git a/protocol-units/da/movement/celestia/runners/Cargo.toml b/protocol-units/da/movement/protocol/runners/Cargo.toml
similarity index 91%
rename from protocol-units/da/movement/celestia/runners/Cargo.toml
rename to protocol-units/da/movement/protocol/runners/Cargo.toml
index 4e95cea78..452a6beae 100644
--- a/protocol-units/da/movement/celestia/runners/Cargo.toml
+++ b/protocol-units/da/movement/protocol/runners/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-celestia-da-light-node-runners"
+name = "movement-da-light-node-runners"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
@@ -31,7 +31,7 @@ serde_json = { workspace = true }
 serde = { workspace = true }
 commander = { workspace = true }
 tracing = { workspace = true }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 dot-movement = { workspace = true }
 rand = { workspace = true }
 hex = { workspace = true }
diff --git a/protocol-units/da/movement/celestia/runners/src/bin/celestia-appd.rs b/protocol-units/da/movement/protocol/runners/src/bin/celestia-appd.rs
similarity index 84%
rename from protocol-units/da/movement/celestia/runners/src/bin/celestia-appd.rs
rename to protocol-units/da/movement/protocol/runners/src/bin/celestia-appd.rs
index 1c8def8cf..143b7a5e3 100644
--- a/protocol-units/da/movement/celestia/runners/src/bin/celestia-appd.rs
+++ b/protocol-units/da/movement/protocol/runners/src/bin/celestia-appd.rs
@@ -1,6 +1,6 @@
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_celestia_da_light_node_runners::{celestia_appd::CelestiaAppd, Runner};
-use movement_celestia_da_util::CelestiaDaLightNodeConfig;
+use movement_da_light_node_runners::{celestia_appd::CelestiaAppd, Runner};
+use movement_da_util::CelestiaDaLightNodeConfig;
 
 #[tokio::main]
 async fn main() -> Result<(), anyhow::Error> {
diff --git a/protocol-units/da/movement/celestia/runners/src/bin/celestia-bridge.rs b/protocol-units/da/movement/protocol/runners/src/bin/celestia-bridge.rs
similarity index 83%
rename from protocol-units/da/movement/celestia/runners/src/bin/celestia-bridge.rs
rename to protocol-units/da/movement/protocol/runners/src/bin/celestia-bridge.rs
index b68a08468..d2471ee6d 100644
--- a/protocol-units/da/movement/celestia/runners/src/bin/celestia-bridge.rs
+++ b/protocol-units/da/movement/protocol/runners/src/bin/celestia-bridge.rs
@@ -1,6 +1,6 @@
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_celestia_da_light_node_runners::{celestia_bridge::CelestiaBridge, Runner};
-use movement_celestia_da_util::CelestiaDaLightNodeConfig;
+use movement_da_light_node_runners::{celestia_bridge::CelestiaBridge, Runner};
+use movement_da_util::CelestiaDaLightNodeConfig;
 
 #[tokio::main]
 async fn main() -> Result<(), anyhow::Error> {
diff --git a/protocol-units/da/movement/celestia/runners/src/bin/celestia-light.rs b/protocol-units/da/movement/protocol/runners/src/bin/celestia-light.rs
similarity index 83%
rename from protocol-units/da/movement/celestia/runners/src/bin/celestia-light.rs
rename to protocol-units/da/movement/protocol/runners/src/bin/celestia-light.rs
index 6d031427c..3e9fe2390 100644
--- a/protocol-units/da/movement/celestia/runners/src/bin/celestia-light.rs
+++ b/protocol-units/da/movement/protocol/runners/src/bin/celestia-light.rs
@@ -1,6 +1,6 @@
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_celestia_da_light_node_runners::{celestia_light::CelestiaLight, Runner};
-use movement_celestia_da_util::CelestiaDaLightNodeConfig;
+use movement_da_light_node_runners::{celestia_light::CelestiaLight, Runner};
+use movement_da_util::CelestiaDaLightNodeConfig;
 
 #[tokio::main]
 async fn main() -> Result<(), anyhow::Error> {
diff --git a/protocol-units/da/movement/celestia/runners/src/celestia_appd/local.rs b/protocol-units/da/movement/protocol/runners/src/celestia_appd/local.rs
similarity index 96%
rename from protocol-units/da/movement/celestia/runners/src/celestia_appd/local.rs
rename to protocol-units/da/movement/protocol/runners/src/celestia_appd/local.rs
index 1a03daf03..5422d2719 100644
--- a/protocol-units/da/movement/celestia/runners/src/celestia_appd/local.rs
+++ b/protocol-units/da/movement/protocol/runners/src/celestia_appd/local.rs
@@ -11,7 +11,7 @@ impl Local {
 	pub async fn run(
 		&self,
 		dot_movement: dot_movement::DotMovement,
-		config: movement_celestia_da_util::config::local::Config,
+		config: movement_da_util::config::local::Config,
 	) -> Result<(), anyhow::Error> {
 		// celestia-appd start --grpc.enable --home $CELESTIA_APP_PATH --log_level $LOG_LEVEL
 
diff --git a/protocol-units/da/movement/celestia/runners/src/celestia_appd/mod.rs b/protocol-units/da/movement/protocol/runners/src/celestia_appd/mod.rs
similarity index 66%
rename from protocol-units/da/movement/celestia/runners/src/celestia_appd/mod.rs
rename to protocol-units/da/movement/protocol/runners/src/celestia_appd/mod.rs
index 076ad752a..a76530496 100644
--- a/protocol-units/da/movement/celestia/runners/src/celestia_appd/mod.rs
+++ b/protocol-units/da/movement/protocol/runners/src/celestia_appd/mod.rs
@@ -1,6 +1,6 @@
 pub mod local;
 use crate::Runner;
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 
 #[derive(Debug, Clone)]
 pub struct CelestiaAppd {}
@@ -12,15 +12,15 @@ impl Runner for CelestiaAppd {
 		config: CelestiaDaLightNodeConfig,
 	) -> Result<(), anyhow::Error> {
 		match config.celestia_da_light_node_config {
-			movement_celestia_da_util::config::Config::Local(config) => {
+			movement_da_util::config::Config::Local(config) => {
 				let local = local::Local::new();
 				local.run(dot_movement, config).await?;
 				Ok(())
 			}
-			movement_celestia_da_util::config::Config::Arabica(config) => {
+			movement_da_util::config::Config::Arabica(config) => {
 				Err(anyhow::anyhow!("Arabica not implemented"))
 			}
-			movement_celestia_da_util::config::Config::Mocha(config) => {
+			movement_da_util::config::Config::Mocha(config) => {
 				Err(anyhow::anyhow!("Mocha not implemented"))
 			}
 		}
diff --git a/protocol-units/da/movement/celestia/runners/src/celestia_bridge/local.rs b/protocol-units/da/movement/protocol/runners/src/celestia_bridge/local.rs
similarity index 96%
rename from protocol-units/da/movement/celestia/runners/src/celestia_bridge/local.rs
rename to protocol-units/da/movement/protocol/runners/src/celestia_bridge/local.rs
index a5897bf42..f846b0840 100644
--- a/protocol-units/da/movement/celestia/runners/src/celestia_bridge/local.rs
+++ b/protocol-units/da/movement/protocol/runners/src/celestia_bridge/local.rs
@@ -15,7 +15,7 @@ impl Local {
 
 	async fn get_genesis_block(
 		&self,
-		config: &movement_celestia_da_util::config::local::Config,
+		config: &movement_da_util::config::local::Config,
 	) -> Result<String> {
 		let client = Client::new();
 		let mut genesis = String::new();
@@ -59,7 +59,7 @@ impl Local {
 	pub async fn run(
 		&self,
 		_dot_movement: dot_movement::DotMovement,
-		config: movement_celestia_da_util::config::local::Config,
+		config: movement_da_util::config::local::Config,
 	) -> Result<()> {
 		let genesis = self.get_genesis_block(&config).await?;
 
diff --git a/protocol-units/da/movement/celestia/runners/src/celestia_bridge/mod.rs b/protocol-units/da/movement/protocol/runners/src/celestia_bridge/mod.rs
similarity index 66%
rename from protocol-units/da/movement/celestia/runners/src/celestia_bridge/mod.rs
rename to protocol-units/da/movement/protocol/runners/src/celestia_bridge/mod.rs
index d7c176cc1..b3c2fcd4a 100644
--- a/protocol-units/da/movement/celestia/runners/src/celestia_bridge/mod.rs
+++ b/protocol-units/da/movement/protocol/runners/src/celestia_bridge/mod.rs
@@ -1,5 +1,5 @@
 pub mod local;
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 
 use crate::Runner;
 
@@ -13,14 +13,14 @@ impl Runner for CelestiaBridge {
 		config: CelestiaDaLightNodeConfig,
 	) -> Result<(), anyhow::Error> {
 		match config.celestia_da_light_node_config {
-			movement_celestia_da_util::config::Config::Local(config) => {
+			movement_da_util::config::Config::Local(config) => {
 				let local = local::Local::new();
 				local.run(dot_movement, config).await?;
 			}
-			movement_celestia_da_util::config::Config::Arabica(_config) => {
+			movement_da_util::config::Config::Arabica(_config) => {
 				Err(anyhow::anyhow!("Arabica not implemented"))?;
 			}
-			movement_celestia_da_util::config::Config::Mocha(_config) => {
+			movement_da_util::config::Config::Mocha(_config) => {
 				Err(anyhow::anyhow!("Mocha not implemented"))?;
 			}
 		}
diff --git a/protocol-units/da/movement/celestia/runners/src/celestia_light/arabica.rs b/protocol-units/da/movement/protocol/runners/src/celestia_light/arabica.rs
similarity index 89%
rename from protocol-units/da/movement/celestia/runners/src/celestia_light/arabica.rs
rename to protocol-units/da/movement/protocol/runners/src/celestia_light/arabica.rs
index c5b60b047..b6c897486 100644
--- a/protocol-units/da/movement/celestia/runners/src/celestia_light/arabica.rs
+++ b/protocol-units/da/movement/protocol/runners/src/celestia_light/arabica.rs
@@ -9,7 +9,7 @@ impl Arabica {
 	pub async fn run(
 		&self,
 		_dot_movement: dot_movement::DotMovement,
-		_config: movement_celestia_da_util::config::local::Config,
+		_config: movement_da_util::config::local::Config,
 	) -> Result<(), anyhow::Error> {
 		// celestia light start --core.ip validator-1.celestia-arabica-11.com --p2p.network arabica
 		commander::run_command(
diff --git a/protocol-units/da/movement/celestia/runners/src/celestia_light/mocha.rs b/protocol-units/da/movement/protocol/runners/src/celestia_light/mocha.rs
similarity index 89%
rename from protocol-units/da/movement/celestia/runners/src/celestia_light/mocha.rs
rename to protocol-units/da/movement/protocol/runners/src/celestia_light/mocha.rs
index 98150d3a0..531320d8d 100644
--- a/protocol-units/da/movement/celestia/runners/src/celestia_light/mocha.rs
+++ b/protocol-units/da/movement/protocol/runners/src/celestia_light/mocha.rs
@@ -9,7 +9,7 @@ impl Mocha {
 	pub async fn run(
 		&self,
 		_dot_movement: dot_movement::DotMovement,
-		_config: movement_celestia_da_util::config::local::Config,
+		_config: movement_da_util::config::local::Config,
 	) -> Result<(), anyhow::Error> {
 		// celestia light start --core.ip validator-1.celestia-mocha-11.com --p2p.network mocha
 		commander::run_command(
diff --git a/protocol-units/da/movement/celestia/runners/src/celestia_light/mod.rs b/protocol-units/da/movement/protocol/runners/src/celestia_light/mod.rs
similarity index 68%
rename from protocol-units/da/movement/celestia/runners/src/celestia_light/mod.rs
rename to protocol-units/da/movement/protocol/runners/src/celestia_light/mod.rs
index b87722197..c9bb97ce4 100644
--- a/protocol-units/da/movement/celestia/runners/src/celestia_light/mod.rs
+++ b/protocol-units/da/movement/protocol/runners/src/celestia_light/mod.rs
@@ -1,6 +1,6 @@
 pub mod arabica;
 pub mod mocha;
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 
 use crate::Runner;
 
@@ -14,14 +14,14 @@ impl Runner for CelestiaLight {
 		config: CelestiaDaLightNodeConfig,
 	) -> Result<(), anyhow::Error> {
 		match config.celestia_da_light_node_config {
-			movement_celestia_da_util::config::Config::Local(_config) => {
+			movement_da_util::config::Config::Local(_config) => {
 				Err(anyhow::anyhow!("Local not implemented"))?;
 			}
-			movement_celestia_da_util::config::Config::Arabica(config) => {
+			movement_da_util::config::Config::Arabica(config) => {
 				let arabica = arabica::Arabica::new();
 				arabica.run(dot_movement, config).await?;
 			}
-			movement_celestia_da_util::config::Config::Mocha(config) => {
+			movement_da_util::config::Config::Mocha(config) => {
 				let mocha = mocha::Mocha::new();
 				mocha.run(dot_movement, config).await?;
 			}
diff --git a/protocol-units/da/movement/celestia/runners/src/lib.rs b/protocol-units/da/movement/protocol/runners/src/lib.rs
similarity index 77%
rename from protocol-units/da/movement/celestia/runners/src/lib.rs
rename to protocol-units/da/movement/protocol/runners/src/lib.rs
index b4099ba96..c27e1b365 100644
--- a/protocol-units/da/movement/celestia/runners/src/lib.rs
+++ b/protocol-units/da/movement/protocol/runners/src/lib.rs
@@ -1,7 +1,7 @@
 pub mod celestia_appd;
 pub mod celestia_bridge;
 pub mod celestia_light;
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 
 pub trait Runner {
 	async fn run(
diff --git a/protocol-units/da/movement/celestia/setup/Cargo.toml b/protocol-units/da/movement/protocol/setup/Cargo.toml
similarity index 87%
rename from protocol-units/da/movement/celestia/setup/Cargo.toml
rename to protocol-units/da/movement/protocol/setup/Cargo.toml
index 48ef5b8cd..53b3a5c3c 100644
--- a/protocol-units/da/movement/celestia/setup/Cargo.toml
+++ b/protocol-units/da/movement/protocol/setup/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-celestia-da-light-node-setup"
+name = "movement-da-light-node-setup"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
@@ -10,7 +10,7 @@ publish = { workspace = true }
 rust-version = { workspace = true }
 
 [[bin]]
-name = "movement-celestia-da-light-node-setup"
+name = "movement-da-light-node-setup"
 path = "src/bin/setup.rs"
 
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -23,7 +23,7 @@ serde_json = { workspace = true }
 serde = { workspace = true }
 commander = { workspace = true }
 tracing = { workspace = true }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 dot-movement = { workspace = true }
 rand = { workspace = true }
 hex = { workspace = true }
diff --git a/protocol-units/da/movement/celestia/setup/src/arabica.rs b/protocol-units/da/movement/protocol/setup/src/arabica.rs
similarity index 99%
rename from protocol-units/da/movement/celestia/setup/src/arabica.rs
rename to protocol-units/da/movement/protocol/setup/src/arabica.rs
index 167271aad..56cf410e0 100644
--- a/protocol-units/da/movement/celestia/setup/src/arabica.rs
+++ b/protocol-units/da/movement/protocol/setup/src/arabica.rs
@@ -2,7 +2,7 @@ use crate::common;
 use anyhow::Context;
 use commander::run_command;
 use dot_movement::DotMovement;
-use movement_celestia_da_util::config::local::Config;
+use movement_da_util::config::local::Config;
 use tracing::info;
 
 #[derive(Debug, Clone)]
diff --git a/protocol-units/da/movement/celestia/setup/src/bin/setup.rs b/protocol-units/da/movement/protocol/setup/src/bin/setup.rs
similarity index 90%
rename from protocol-units/da/movement/celestia/setup/src/bin/setup.rs
rename to protocol-units/da/movement/protocol/setup/src/bin/setup.rs
index 691b7bf89..5bf9381a5 100644
--- a/protocol-units/da/movement/celestia/setup/src/bin/setup.rs
+++ b/protocol-units/da/movement/protocol/setup/src/bin/setup.rs
@@ -1,6 +1,6 @@
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_celestia_da_light_node_setup::setup;
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_light_node_setup::setup;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 
 #[tokio::main]
 async fn main() -> Result<(), anyhow::Error> {
diff --git a/protocol-units/da/movement/celestia/setup/src/common/celestia.rs b/protocol-units/da/movement/protocol/setup/src/common/celestia.rs
similarity index 98%
rename from protocol-units/da/movement/celestia/setup/src/common/celestia.rs
rename to protocol-units/da/movement/protocol/setup/src/common/celestia.rs
index c3edeb04d..47dada518 100644
--- a/protocol-units/da/movement/celestia/setup/src/common/celestia.rs
+++ b/protocol-units/da/movement/protocol/setup/src/common/celestia.rs
@@ -2,7 +2,7 @@ use crate::common;
 use anyhow::Context;
 use celestia_types::nmt::Namespace;
 use dot_movement::DotMovement;
-use movement_celestia_da_util::config::local::Config;
+use movement_da_util::config::local::Config;
 use rand::Rng;
 use tracing::info;
 
diff --git a/protocol-units/da/movement/celestia/setup/src/common/file.rs b/protocol-units/da/movement/protocol/setup/src/common/file.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/setup/src/common/file.rs
rename to protocol-units/da/movement/protocol/setup/src/common/file.rs
diff --git a/protocol-units/da/movement/celestia/setup/src/common/memseq.rs b/protocol-units/da/movement/protocol/setup/src/common/memseq.rs
similarity index 93%
rename from protocol-units/da/movement/celestia/setup/src/common/memseq.rs
rename to protocol-units/da/movement/protocol/setup/src/common/memseq.rs
index f4a10bc9a..1016eacab 100644
--- a/protocol-units/da/movement/celestia/setup/src/common/memseq.rs
+++ b/protocol-units/da/movement/protocol/setup/src/common/memseq.rs
@@ -1,5 +1,5 @@
 use dot_movement::DotMovement;
-use movement_celestia_da_util::config::local::Config;
+use movement_da_util::config::local::Config;
 
 pub fn initialize_memseq_config(
 	dot_movement: DotMovement,
diff --git a/protocol-units/da/movement/celestia/setup/src/common/mod.rs b/protocol-units/da/movement/protocol/setup/src/common/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/setup/src/common/mod.rs
rename to protocol-units/da/movement/protocol/setup/src/common/mod.rs
diff --git a/protocol-units/da/movement/celestia/setup/src/lib.rs b/protocol-units/da/movement/protocol/setup/src/lib.rs
similarity index 59%
rename from protocol-units/da/movement/celestia/setup/src/lib.rs
rename to protocol-units/da/movement/protocol/setup/src/lib.rs
index ea00cbe26..637c6e4d8 100644
--- a/protocol-units/da/movement/celestia/setup/src/lib.rs
+++ b/protocol-units/da/movement/protocol/setup/src/lib.rs
@@ -2,27 +2,27 @@ pub mod arabica;
 pub mod common;
 pub mod local;
 pub mod mocha;
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 
 pub async fn setup(
 	dot_movement: dot_movement::DotMovement,
 	mut config: CelestiaDaLightNodeConfig,
 ) -> Result<CelestiaDaLightNodeConfig, anyhow::Error> {
 	let inner_config = match config.celestia_da_light_node_config {
-		movement_celestia_da_util::config::Config::Local(config) => {
+		movement_da_util::config::Config::Local(config) => {
 			let local = local::Local::new();
 			let local_config = local.setup(dot_movement, config).await?;
-			movement_celestia_da_util::config::Config::Local(local_config)
+			movement_da_util::config::Config::Local(local_config)
 		}
-		movement_celestia_da_util::config::Config::Arabica(config) => {
+		movement_da_util::config::Config::Arabica(config) => {
 			let arabica = arabica::Arabica::new();
 			let arabica_config = arabica.setup(dot_movement, config).await?;
-			movement_celestia_da_util::config::Config::Arabica(arabica_config)
+			movement_da_util::config::Config::Arabica(arabica_config)
 		}
-		movement_celestia_da_util::config::Config::Mocha(config) => {
+		movement_da_util::config::Config::Mocha(config) => {
 			let mocha = mocha::Mocha::new();
 			let mocha_config = mocha.setup(dot_movement, config).await?;
-			movement_celestia_da_util::config::Config::Mocha(mocha_config)
+			movement_da_util::config::Config::Mocha(mocha_config)
 		}
 	};
 	config.celestia_da_light_node_config = inner_config;
diff --git a/protocol-units/da/movement/celestia/setup/src/local.rs b/protocol-units/da/movement/protocol/setup/src/local.rs
similarity index 99%
rename from protocol-units/da/movement/celestia/setup/src/local.rs
rename to protocol-units/da/movement/protocol/setup/src/local.rs
index 5b632c2b5..9e6310641 100644
--- a/protocol-units/da/movement/celestia/setup/src/local.rs
+++ b/protocol-units/da/movement/protocol/setup/src/local.rs
@@ -2,7 +2,7 @@ use crate::common;
 use anyhow::Context;
 use commander::run_command;
 use dot_movement::DotMovement;
-use movement_celestia_da_util::config::local::Config;
+use movement_da_util::config::local::Config;
 use tokio::fs;
 use tracing::info;
 
diff --git a/protocol-units/da/movement/celestia/setup/src/mocha.rs b/protocol-units/da/movement/protocol/setup/src/mocha.rs
similarity index 98%
rename from protocol-units/da/movement/celestia/setup/src/mocha.rs
rename to protocol-units/da/movement/protocol/setup/src/mocha.rs
index 9dd5e5074..3e5e86b60 100644
--- a/protocol-units/da/movement/celestia/setup/src/mocha.rs
+++ b/protocol-units/da/movement/protocol/setup/src/mocha.rs
@@ -2,7 +2,7 @@ use crate::common;
 use anyhow::Context;
 use commander::run_command;
 use dot_movement::DotMovement;
-use movement_celestia_da_util::config::local::Config;
+use movement_da_util::config::local::Config;
 use tracing::info;
 
 #[derive(Debug, Clone)]
diff --git a/protocol-units/da/movement/celestia/light-node-tests/Cargo.toml b/protocol-units/da/movement/protocol/tests/Cargo.toml
similarity index 94%
rename from protocol-units/da/movement/celestia/light-node-tests/Cargo.toml
rename to protocol-units/da/movement/protocol/tests/Cargo.toml
index 3dbd318b0..f42243fe2 100644
--- a/protocol-units/da/movement/celestia/light-node-tests/Cargo.toml
+++ b/protocol-units/da/movement/protocol/tests/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-celestia-da-light-node-tests"
+name = "movement-da-light-node-tests"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
diff --git a/protocol-units/da/movement/celestia/light-node-tests/src/lib.rs b/protocol-units/da/movement/protocol/tests/src/lib.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-tests/src/lib.rs
rename to protocol-units/da/movement/protocol/tests/src/lib.rs
diff --git a/protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/mod.rs b/protocol-units/da/movement/protocol/tests/src/test/e2e/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/mod.rs
rename to protocol-units/da/movement/protocol/tests/src/test/e2e/mod.rs
diff --git a/protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/raw/mod.rs b/protocol-units/da/movement/protocol/tests/src/test/e2e/raw/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/raw/mod.rs
rename to protocol-units/da/movement/protocol/tests/src/test/e2e/raw/mod.rs
diff --git a/protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/raw/passthrough.rs b/protocol-units/da/movement/protocol/tests/src/test/e2e/raw/passthrough.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/raw/passthrough.rs
rename to protocol-units/da/movement/protocol/tests/src/test/e2e/raw/passthrough.rs
diff --git a/protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/raw/sequencer.rs b/protocol-units/da/movement/protocol/tests/src/test/e2e/raw/sequencer.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-tests/src/test/e2e/raw/sequencer.rs
rename to protocol-units/da/movement/protocol/tests/src/test/e2e/raw/sequencer.rs
diff --git a/protocol-units/da/movement/celestia/light-node-tests/src/test/mod.rs b/protocol-units/da/movement/protocol/tests/src/test/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-tests/src/test/mod.rs
rename to protocol-units/da/movement/protocol/tests/src/test/mod.rs
diff --git a/protocol-units/da/movement/celestia/util/Cargo.toml b/protocol-units/da/movement/protocol/util/Cargo.toml
similarity index 97%
rename from protocol-units/da/movement/celestia/util/Cargo.toml
rename to protocol-units/da/movement/protocol/util/Cargo.toml
index e8f1f92c6..2ba85aa27 100644
--- a/protocol-units/da/movement/celestia/util/Cargo.toml
+++ b/protocol-units/da/movement/protocol/util/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-celestia-da-util"
+name = "movement-da-util"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
diff --git a/protocol-units/da/movement/celestia/util/src/bin/wait_for_light_node.rs b/protocol-units/da/movement/protocol/util/src/bin/wait_for_light_node.rs
similarity index 94%
rename from protocol-units/da/movement/celestia/util/src/bin/wait_for_light_node.rs
rename to protocol-units/da/movement/protocol/util/src/bin/wait_for_light_node.rs
index 0e397f63f..42ff79cd2 100644
--- a/protocol-units/da/movement/celestia/util/src/bin/wait_for_light_node.rs
+++ b/protocol-units/da/movement/protocol/util/src/bin/wait_for_light_node.rs
@@ -1,6 +1,6 @@
 use celestia_rpc::HeaderClient;
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_celestia_da_util::config::CelestiaDaLightNodeConfig;
+use movement_da_util::config::CelestiaDaLightNodeConfig;
 use tracing::info;
 
 #[tokio::main]
diff --git a/protocol-units/da/movement/celestia/util/src/config/common.rs b/protocol-units/da/movement/protocol/util/src/config/common.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/config/common.rs
rename to protocol-units/da/movement/protocol/util/src/config/common.rs
diff --git a/protocol-units/da/movement/celestia/util/src/config/local/appd.rs b/protocol-units/da/movement/protocol/util/src/config/local/appd.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/config/local/appd.rs
rename to protocol-units/da/movement/protocol/util/src/config/local/appd.rs
diff --git a/protocol-units/da/movement/celestia/util/src/config/local/bridge.rs b/protocol-units/da/movement/protocol/util/src/config/local/bridge.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/config/local/bridge.rs
rename to protocol-units/da/movement/protocol/util/src/config/local/bridge.rs
diff --git a/protocol-units/da/movement/celestia/util/src/config/local/da_light_node.rs b/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/config/local/da_light_node.rs
rename to protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
diff --git a/protocol-units/da/movement/celestia/util/src/config/local/mod.rs b/protocol-units/da/movement/protocol/util/src/config/local/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/config/local/mod.rs
rename to protocol-units/da/movement/protocol/util/src/config/local/mod.rs
diff --git a/protocol-units/da/movement/celestia/util/src/config/mod.rs b/protocol-units/da/movement/protocol/util/src/config/mod.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/config/mod.rs
rename to protocol-units/da/movement/protocol/util/src/config/mod.rs
diff --git a/protocol-units/da/movement/celestia/util/src/ir_blob.rs b/protocol-units/da/movement/protocol/util/src/ir_blob.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/ir_blob.rs
rename to protocol-units/da/movement/protocol/util/src/ir_blob.rs
diff --git a/protocol-units/da/movement/celestia/util/src/lib.rs b/protocol-units/da/movement/protocol/util/src/lib.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/util/src/lib.rs
rename to protocol-units/da/movement/protocol/util/src/lib.rs
diff --git a/protocol-units/da/movement/celestia/light-node-verifier/Cargo.toml b/protocol-units/da/movement/protocol/verifier/Cargo.toml
similarity index 86%
rename from protocol-units/da/movement/celestia/light-node-verifier/Cargo.toml
rename to protocol-units/da/movement/protocol/verifier/Cargo.toml
index cd88ba8ae..3b425aa1f 100644
--- a/protocol-units/da/movement/celestia/light-node-verifier/Cargo.toml
+++ b/protocol-units/da/movement/protocol/verifier/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-celestia-da-light-node-verifier"
+name = "movement-da-light-node-verifier"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
@@ -18,7 +18,7 @@ tokio-stream = { workspace = true }
 tonic = { workspace = true }
 prost = { workspace = true }
 movement-da-light-node-proto = { workspace = true, features = [] }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 celestia-rpc = { workspace = true }
 celestia-types = { workspace = true }
 anyhow = { workspace = true }
@@ -30,7 +30,7 @@ ecdsa = { workspace = true, features = ["signing", "verifying", "der"] }
 tracing = { workspace = true }
 
 [dev-dependencies]
-movement-celestia-da-light-node-setup = { workspace = true }
+movement-da-light-node-setup = { workspace = true }
 dot-movement = { workspace = true }
 k256 = { workspace = true }
 rand = { workspace = true }
diff --git a/protocol-units/da/movement/celestia/light-node-verifier/src/celestia/mod.rs b/protocol-units/da/movement/protocol/verifier/src/celestia/mod.rs
similarity index 91%
rename from protocol-units/da/movement/celestia/light-node-verifier/src/celestia/mod.rs
rename to protocol-units/da/movement/protocol/verifier/src/celestia/mod.rs
index d0d01e162..5834d1b90 100644
--- a/protocol-units/da/movement/celestia/light-node-verifier/src/celestia/mod.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/celestia/mod.rs
@@ -1,7 +1,7 @@
 use crate::{Error, Verified, VerifierOperations};
 use celestia_rpc::Client;
 use celestia_types::{nmt::Namespace, Blob};
-use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::ir_blob::IntermediateBlobRepresentation;
 use std::sync::Arc;
 
 #[derive(Clone)]
@@ -45,7 +45,7 @@ mod tests {
 	pub async fn test_valid_verifies() -> Result<(), anyhow::Error> {
 		let dot_movement = dot_movement::DotMovement::try_from_env()?;
 		let config = dot_movement
-			.try_get_config_from_json::<movement_celestia_da_util::CelestiaDaLightNodeConfig>()?;
+			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
 
 		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
 		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
@@ -63,7 +63,7 @@ mod tests {
 	pub async fn test_absent_does_not_verify() -> Result<(), anyhow::Error> {
 		let dot_movement = dot_movement::DotMovement::try_from_env()?;
 		let config = dot_movement
-			.try_get_config_from_json::<movement_celestia_da_util::CelestiaDaLightNodeConfig>()?;
+			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
 		let client = Arc::new(config.connect_celestia().await?);
 		let celestia_namespace = config.celestia_namespace();
 
@@ -96,7 +96,7 @@ mod tests {
 	pub async fn test_wrong_height_does_not_verify() -> Result<(), anyhow::Error> {
 		let dot_movement = dot_movement::DotMovement::try_from_env()?;
 		let config = dot_movement
-			.try_get_config_from_json::<movement_celestia_da_util::CelestiaDaLightNodeConfig>()?;
+			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
 		let client = Arc::new(config.connect_celestia().await?);
 		let celestia_namespace = config.celestia_namespace();
 
diff --git a/protocol-units/da/movement/celestia/light-node-verifier/src/celestia/pessimistic.rs b/protocol-units/da/movement/protocol/verifier/src/celestia/pessimistic.rs
similarity index 96%
rename from protocol-units/da/movement/celestia/light-node-verifier/src/celestia/pessimistic.rs
rename to protocol-units/da/movement/protocol/verifier/src/celestia/pessimistic.rs
index f4d14d6ee..92d480c73 100644
--- a/protocol-units/da/movement/celestia/light-node-verifier/src/celestia/pessimistic.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/celestia/pessimistic.rs
@@ -1,7 +1,7 @@
 use crate::{Error, Verified, VerifierOperations};
 use celestia_rpc::{BlobClient, Client, HeaderClient};
 use celestia_types::{consts::appconsts::AppVersion, nmt::Namespace, Blob};
-use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::ir_blob::IntermediateBlobRepresentation;
 use std::sync::Arc;
 
 #[derive(Clone)]
diff --git a/protocol-units/da/movement/celestia/light-node-verifier/src/lib.rs b/protocol-units/da/movement/protocol/verifier/src/lib.rs
similarity index 100%
rename from protocol-units/da/movement/celestia/light-node-verifier/src/lib.rs
rename to protocol-units/da/movement/protocol/verifier/src/lib.rs
diff --git a/protocol-units/da/movement/celestia/light-node-verifier/src/permissioned_signers/mod.rs b/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
similarity index 97%
rename from protocol-units/da/movement/celestia/light-node-verifier/src/permissioned_signers/mod.rs
rename to protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
index 7cd21376f..2b0e3cf81 100644
--- a/protocol-units/da/movement/celestia/light-node-verifier/src/permissioned_signers/mod.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
@@ -17,7 +17,7 @@ use ecdsa::{
 	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
 	SignatureSize,
 };
-use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::ir_blob::IntermediateBlobRepresentation;
 use std::sync::Arc;
 
 /// A verifier of Celestia blobs for permissioned signers
diff --git a/protocol-units/da/movement/celestia/light-node-verifier/src/signed/mod.rs b/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
similarity index 98%
rename from protocol-units/da/movement/celestia/light-node-verifier/src/signed/mod.rs
rename to protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
index 320a0c80c..51b41156c 100644
--- a/protocol-units/da/movement/celestia/light-node-verifier/src/signed/mod.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
@@ -11,7 +11,7 @@ use ecdsa::{
 	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
 	SignatureSize,
 };
-use movement_celestia_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::ir_blob::IntermediateBlobRepresentation;
 use std::collections::HashSet;
 use tracing::info;
 
diff --git a/protocol-units/da/movement/celestia/light-node-verifier/src/v1.rs b/protocol-units/da/movement/protocol/verifier/src/v1.rs
similarity index 94%
rename from protocol-units/da/movement/celestia/light-node-verifier/src/v1.rs
rename to protocol-units/da/movement/protocol/verifier/src/v1.rs
index ff1aa97d7..959de99dd 100644
--- a/protocol-units/da/movement/celestia/light-node-verifier/src/v1.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/v1.rs
@@ -87,7 +87,7 @@ mod tests {
 	pub async fn test_valid_verifies() -> Result<(), anyhow::Error> {
 		let dot_movement = dot_movement::DotMovement::try_from_env()?;
 		let config = dot_movement
-			.try_get_config_from_json::<movement_celestia_da_util::CelestiaDaLightNodeConfig>()?;
+			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
 
 		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
 		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
@@ -105,7 +105,7 @@ mod tests {
 	pub async fn test_absent_does_not_verify() -> Result<(), anyhow::Error> {
 		let dot_movement = dot_movement::DotMovement::try_from_env()?;
 		let config = dot_movement
-			.try_get_config_from_json::<movement_celestia_da_util::CelestiaDaLightNodeConfig>()?;
+			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
 		let client = Arc::new(config.connect_celestia().await?);
 		let celestia_namespace = config.celestia_namespace();
 
@@ -138,7 +138,7 @@ mod tests {
 	pub async fn test_wrong_height_does_not_verify() -> Result<(), anyhow::Error> {
 		let dot_movement = dot_movement::DotMovement::try_from_env()?;
 		let config = dot_movement
-			.try_get_config_from_json::<movement_celestia_da_util::CelestiaDaLightNodeConfig>()?;
+			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
 		let client = Arc::new(config.connect_celestia().await?);
 		let celestia_namespace = config.celestia_namespace();
 
diff --git a/protocol-units/da/movement/providers/celestia/Cargo.toml b/protocol-units/da/movement/providers/celestia/Cargo.toml
new file mode 100644
index 000000000..8e645f3c1
--- /dev/null
+++ b/protocol-units/da/movement/providers/celestia/Cargo.toml
@@ -0,0 +1,29 @@
+[package]
+name = "movement-da-light-node-celestia"
+version = { workspace = true }
+edition = { workspace = true }
+license = { workspace = true }
+authors = { workspace = true }
+repository = { workspace = true }
+homepage = { workspace = true }
+publish = { workspace = true }
+rust-version = { workspace = true }
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+tonic = { workspace = true }
+prost = { workspace = true }
+
+[build-dependencies]
+movement-da-light-node-da = { workspace = true }
+movement-da-util = { workspace = true }
+
+[features]
+default = []
+client = []
+server = []
+
+
+[lints]
+workspace = true
diff --git a/protocol-units/da/movement/providers/celestia/src/blob/mod.rs b/protocol-units/da/movement/providers/celestia/src/blob/mod.rs
new file mode 100644
index 000000000..e69de29bb
diff --git a/protocol-units/da/movement/providers/celestia/src/da/mod.rs b/protocol-units/da/movement/providers/celestia/src/da/mod.rs
new file mode 100644
index 000000000..4a4f6d781
--- /dev/null
+++ b/protocol-units/da/movement/providers/celestia/src/da/mod.rs
@@ -0,0 +1 @@
+pub struct Da {}
diff --git a/protocol-units/da/movement/providers/celestia/src/lib.rs b/protocol-units/da/movement/providers/celestia/src/lib.rs
new file mode 100644
index 000000000..87a16cbec
--- /dev/null
+++ b/protocol-units/da/movement/providers/celestia/src/lib.rs
@@ -0,0 +1,2 @@
+pub mod blob;
+pub mod da;
diff --git a/protocol-units/execution/maptos/util/Cargo.toml b/protocol-units/execution/maptos/util/Cargo.toml
index 1d26bb5f8..d6b01bddc 100644
--- a/protocol-units/execution/maptos/util/Cargo.toml
+++ b/protocol-units/execution/maptos/util/Cargo.toml
@@ -15,7 +15,7 @@ path = "src/lib.rs"
 [dependencies]
 aptos-types = { workspace = true }
 aptos-account-whitelist = { workspace = true }
-movement-celestia-da-util = { workspace = true }
+movement-da-util = { workspace = true }
 anyhow = { workspace = true }
 aptos-crypto = { workspace = true, features = ["cloneable-private-keys"] }
 rand = { workspace = true }

From 4f8f2a37aa5944b757d2ce33c96f17ccedec428a Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Mon, 13 Jan 2025 15:02:41 -0800
Subject: [PATCH 05/43] fix: refactor stream.

---
 Cargo.lock                                    |   3 +
 .../da/movement/protocol/da/src/lib.rs        |  46 +++-
 .../protocol/util/src/blob/ir/blob.rs         | 129 +++++++++
 .../protocol/util/src/blob/ir/data.rs         |  73 +++++
 .../movement/protocol/util/src/blob/ir/id.rs  |  25 ++
 .../movement/protocol/util/src/blob/ir/mod.rs |   3 +
 .../da/movement/protocol/util/src/blob/mod.rs |   1 +
 .../da/movement/protocol/util/src/ir_blob.rs  | 253 ------------------
 .../da/movement/protocol/util/src/lib.rs      |   2 +-
 .../da/movement/protocol/verifier/src/lib.rs  |   1 -
 .../verifier/src/permissioned_signers/mod.rs  |   8 +-
 .../protocol/verifier/src/signed/mod.rs       |   2 +-
 .../da/movement/protocol/verifier/src/v1.rs   | 168 ------------
 .../da/movement/providers/celestia/Cargo.toml |   3 +
 .../da/movement/providers/celestia/src/lib.rs |   1 +
 .../celestia/src/verifier}/mod.rs             |   4 +-
 .../celestia/src/verifier}/pessimistic.rs     |   4 +-
 17 files changed, 291 insertions(+), 435 deletions(-)
 create mode 100644 protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
 create mode 100644 protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
 create mode 100644 protocol-units/da/movement/protocol/util/src/blob/ir/id.rs
 create mode 100644 protocol-units/da/movement/protocol/util/src/blob/ir/mod.rs
 create mode 100644 protocol-units/da/movement/protocol/util/src/blob/mod.rs
 delete mode 100644 protocol-units/da/movement/protocol/util/src/ir_blob.rs
 delete mode 100644 protocol-units/da/movement/protocol/verifier/src/v1.rs
 rename protocol-units/da/movement/{protocol/verifier/src/celestia => providers/celestia/src/verifier}/mod.rs (96%)
 rename protocol-units/da/movement/{protocol/verifier/src/celestia => providers/celestia/src/verifier}/pessimistic.rs (93%)

diff --git a/Cargo.lock b/Cargo.lock
index b4b99a0a1..ea823782f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10235,7 +10235,10 @@ dependencies = [
 name = "movement-da-light-node-celestia"
 version = "0.0.2"
 dependencies = [
+ "celestia-rpc",
+ "celestia-types",
  "movement-da-light-node-da",
+ "movement-da-light-node-verifier",
  "movement-da-util",
  "prost 0.13.3",
  "tonic 0.12.3",
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 82d8fad73..add0d1d85 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -1,8 +1,29 @@
-use movement_da_util::ir_blob::IntermediateBlobRepresentation;
 use movement_da_light_node_proto::Blob;
+use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
 use std::error;
 use std::future::Future;
 
+/// A blob meant for the DA.
+#[derive(Debug, Clone)]
+pub struct DaBlob(Vec<u8>);
+
+impl DaBlob {
+	/// Creates a new [DaBlob] from a vector of bytes.
+	pub fn new(data: Vec<u8>) -> Self {
+		Self(data)
+	}
+
+	/// Returns a reference to the inner vector of bytes.
+	pub fn as_ref(&self) -> &[u8] {
+		self.0.as_slice()
+	}
+
+	/// Consumes the [DaBlob] and returns the inner vector of bytes.
+	pub fn into_inner(self) -> Vec<u8> {
+		self.0
+	}
+}
+
 /// A certificate from consensus indicating a height.
 #[derive(Debug, Clone)]
 pub enum Certificate {
@@ -17,10 +38,10 @@ pub enum Certificate {
 pub enum DaError {
 	#[error("blob submission error: {0}")]
 	BlobSubmission(#[source] Box<dyn error::Error + Send + Sync>),
-	#[error("blobs at height fatal error: {0}")]
-	BlobsAtHeightFatal(#[source] Box<dyn error::Error + Send + Sync>),
 	#[error("blobs at height error: {0}")]
 	BlobsAtHeight(#[source] Box<dyn error::Error + Send + Sync>),
+	#[error("blobs at height fatal error: {0}")]
+	BlobsAtHeightNonFatal(#[source] Box<dyn error::Error + Send + Sync>),
 	#[error("internal error: {0}")]
 	Internal(String),
 }
@@ -28,8 +49,8 @@ pub enum DaError {
 pub trait DaOperations {
 	/// Submits a blob to the DA.
 	///
-	/// A DA must allow for submission of raw blobs.
-	fn submit_blob(&self, data: Vec<u8>) -> impl Future<Output = Result<Blob, DaError>>;
+	/// A DA must allow for submission of raw [DaBlob]s and return a [Blob].
+	fn submit_blob(&self, data: DaBlob) -> impl Future<Output = Result<Blob, DaError>>;
 
 	/// Gets the blobs at a given height.
 	///
@@ -39,6 +60,21 @@ pub trait DaOperations {
 		height: u64,
 	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>>;
 
+	/// Gets the IR blobs at a given height as would be used by the stream.
+	fn get_ir_blobs_at_height_for_stream(
+		&self,
+		height: u64,
+	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> {
+		async move {
+			// get the blobs at a given height, if the error is NonFatal, return an empty vec
+			match self.get_ir_blobs_at_height(height).await {
+				Ok(blobs) => Ok(blobs),
+				Err(DaError::BlobsAtHeightNonFatal(_)) => Ok(vec![]),
+				Err(e) => Err(e),
+			}
+		}
+	}
+
 	/// Streams certificates from the DA.
 	///
 	/// A DA must allow for streaming of [Certificate]s. This is used to inform [Blob] polling.
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
new file mode 100644
index 000000000..924832152
--- /dev/null
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
@@ -0,0 +1,129 @@
+use crate::blob::ir::data::InnerSignedBlobV1Data;
+use crate::blob::ir::id::Id;
+use ecdsa::{
+	elliptic_curve::{
+		generic_array::ArrayLength,
+		ops::Invert,
+		point::PointCompression,
+		sec1::{FromEncodedPoint, ModulusSize, ToEncodedPoint},
+		subtle::CtOption,
+		AffinePoint, CurveArithmetic, FieldBytesSize, PrimeCurve, Scalar,
+	},
+	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
+	signature::{digest::Digest, DigestVerifier},
+	SignatureSize, VerifyingKey,
+};
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct InnerSignedBlobV1 {
+	pub data: InnerSignedBlobV1Data,
+	pub signature: Vec<u8>,
+	pub signer: Vec<u8>,
+	pub id: Id,
+}
+
+impl InnerSignedBlobV1 {
+	pub fn try_verify<C>(&self) -> Result<(), anyhow::Error>
+	where
+		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
+		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
+		SignatureSize<C>: ArrayLength<u8>,
+		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
+		FieldBytesSize<C>: ModulusSize,
+	{
+		let mut hasher = C::Digest::new();
+		hasher.update(self.data.blob.as_slice());
+		hasher.update(&self.data.timestamp.to_be_bytes());
+		hasher.update(self.id.as_slice());
+
+		let verifying_key = VerifyingKey::<C>::from_sec1_bytes(self.signer.as_slice())?;
+		let signature = ecdsa::Signature::from_bytes(self.signature.as_slice().into())?;
+
+		match verifying_key.verify_digest(hasher, &signature) {
+			Ok(_) => Ok(()),
+			Err(_) => Err(anyhow::anyhow!("Failed to verify signature")),
+		}
+	}
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub enum IntermediateBlobRepresentation {
+	SignedV1(InnerSignedBlobV1),
+}
+
+impl From<InnerSignedBlobV1> for IntermediateBlobRepresentation {
+	fn from(inner: InnerSignedBlobV1) -> Self {
+		IntermediateBlobRepresentation::SignedV1(inner)
+	}
+}
+
+impl IntermediateBlobRepresentation {
+	pub fn blob(&self) -> &[u8] {
+		match self {
+			IntermediateBlobRepresentation::SignedV1(inner) => inner.data.blob.as_slice(),
+		}
+	}
+
+	pub fn signature(&self) -> &[u8] {
+		match self {
+			IntermediateBlobRepresentation::SignedV1(inner) => inner.signature.as_slice(),
+		}
+	}
+
+	pub fn timestamp(&self) -> u64 {
+		match self {
+			IntermediateBlobRepresentation::SignedV1(inner) => inner.data.timestamp,
+		}
+	}
+
+	pub fn signer(&self) -> &[u8] {
+		match self {
+			IntermediateBlobRepresentation::SignedV1(inner) => inner.signer.as_slice(),
+		}
+	}
+
+	pub fn signer_hex(&self) -> String {
+		hex::encode(self.signer())
+	}
+
+	pub fn id(&self) -> &[u8] {
+		match self {
+			IntermediateBlobRepresentation::SignedV1(inner) => inner.id.as_slice(),
+		}
+	}
+
+	pub fn verify_signature<C>(&self) -> Result<(), anyhow::Error>
+	where
+		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
+		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
+		SignatureSize<C>: ArrayLength<u8>,
+		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
+		FieldBytesSize<C>: ModulusSize,
+	{
+		match self {
+			IntermediateBlobRepresentation::SignedV1(inner) => inner.try_verify::<C>(),
+		}
+	}
+}
+
+#[cfg(test)]
+pub mod test {
+
+	use super::*;
+	use ecdsa::SigningKey;
+
+	#[test]
+	fn test_cannot_change_id_and_verify() -> Result<(), anyhow::Error> {
+		let blob = InnerSignedBlobV1Data::new(vec![1, 2, 3], 123);
+		let signing_key = SigningKey::<k256::Secp256k1>::random(&mut rand::thread_rng());
+		let signed_blob = blob.try_to_sign(&signing_key)?;
+
+		let mut changed_blob = signed_blob.clone();
+		changed_blob.id = Id::new(vec![1, 2, 3, 4]);
+
+		assert!(changed_blob.try_verify::<k256::Secp256k1>().is_err());
+
+		Ok(())
+	}
+}
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
new file mode 100644
index 000000000..ea8929caa
--- /dev/null
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
@@ -0,0 +1,73 @@
+use crate::blob::ir::blob::InnerSignedBlobV1;
+use crate::blob::ir::id::Id;
+use ecdsa::{
+	elliptic_curve::{
+		generic_array::ArrayLength,
+		ops::Invert,
+		point::PointCompression,
+		sec1::{FromEncodedPoint, ModulusSize, ToEncodedPoint},
+		subtle::CtOption,
+		AffinePoint, CurveArithmetic, FieldBytesSize, PrimeCurve, Scalar,
+	},
+	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
+	signature::digest::Digest,
+	SignatureSize, SigningKey,
+};
+use serde::{Deserialize, Serialize};
+
+/// The data that should be signed before submission.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct InnerSignedBlobV1Data {
+	pub blob: Vec<u8>,
+	pub timestamp: u64,
+}
+
+impl InnerSignedBlobV1Data {
+	pub fn new(blob: Vec<u8>, timestamp: u64) -> Self {
+		Self { blob, timestamp }
+	}
+
+	/// Computes the id of InnerSignedBlobV1Data
+	pub fn compute_id<C>(&self) -> Id
+	where
+		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
+		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
+		SignatureSize<C>: ArrayLength<u8>,
+		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
+		FieldBytesSize<C>: ModulusSize,
+	{
+		let mut id_hasher = C::Digest::new();
+		id_hasher.update(self.blob.as_slice());
+		id_hasher.update(&self.timestamp.to_be_bytes());
+		Id::new(id_hasher.finalize().to_vec())
+	}
+
+	pub fn try_to_sign<C>(
+		self,
+		signing_key: &SigningKey<C>,
+	) -> Result<InnerSignedBlobV1, anyhow::Error>
+	where
+		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
+		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
+		SignatureSize<C>: ArrayLength<u8>,
+		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
+		FieldBytesSize<C>: ModulusSize,
+	{
+		let id = self.compute_id::<C>();
+		let mut hasher = C::Digest::new();
+		hasher.update(self.blob.as_slice());
+		hasher.update(&self.timestamp.to_be_bytes());
+		hasher.update(id.as_slice());
+		let prehash = hasher.finalize();
+		let prehash_bytes = prehash.as_slice();
+
+		let (signature, _recovery_id) = signing_key.sign_prehash_recoverable(prehash_bytes)?;
+
+		Ok(InnerSignedBlobV1 {
+			data: self,
+			signature: signature.to_vec(),
+			signer: signing_key.verifying_key().to_sec1_bytes().to_vec(),
+			id,
+		})
+	}
+}
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/id.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/id.rs
new file mode 100644
index 000000000..ab1208bde
--- /dev/null
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/id.rs
@@ -0,0 +1,25 @@
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct Id(Vec<u8>);
+
+/// The id for an Ir Blob
+impl Id {
+	pub fn new(id: Vec<u8>) -> Self {
+		Id(id)
+	}
+
+	pub fn as_slice(&self) -> &[u8] {
+		self.0.as_slice()
+	}
+
+	pub fn into_vec(self) -> Vec<u8> {
+		self.0
+	}
+}
+
+impl From<Vec<u8>> for Id {
+	fn from(id: Vec<u8>) -> Self {
+		Id(id)
+	}
+}
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/mod.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/mod.rs
new file mode 100644
index 000000000..9aa758c9e
--- /dev/null
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/mod.rs
@@ -0,0 +1,3 @@
+pub mod blob;
+pub mod data;
+pub mod id;
diff --git a/protocol-units/da/movement/protocol/util/src/blob/mod.rs b/protocol-units/da/movement/protocol/util/src/blob/mod.rs
new file mode 100644
index 000000000..19d2d13f0
--- /dev/null
+++ b/protocol-units/da/movement/protocol/util/src/blob/mod.rs
@@ -0,0 +1 @@
+pub mod ir;
diff --git a/protocol-units/da/movement/protocol/util/src/ir_blob.rs b/protocol-units/da/movement/protocol/util/src/ir_blob.rs
deleted file mode 100644
index a853d0f9e..000000000
--- a/protocol-units/da/movement/protocol/util/src/ir_blob.rs
+++ /dev/null
@@ -1,253 +0,0 @@
-use ecdsa::{
-	elliptic_curve::{
-		generic_array::ArrayLength,
-		ops::Invert,
-		point::PointCompression,
-		sec1::{FromEncodedPoint, ModulusSize, ToEncodedPoint},
-		subtle::CtOption,
-		AffinePoint, CurveArithmetic, FieldBytesSize, PrimeCurve, Scalar,
-	},
-	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
-	signature::{digest::Digest, DigestVerifier},
-	SignatureSize, SigningKey, VerifyingKey,
-};
-use serde::{Deserialize, Serialize};
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct InnerSignedBlobV1Data {
-	pub blob: Vec<u8>,
-	pub timestamp: u64,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct Id(Vec<u8>);
-
-/// The id for an Ir Blob
-impl Id {
-	pub fn as_slice(&self) -> &[u8] {
-		self.0.as_slice()
-	}
-
-	pub fn into_vec(self) -> Vec<u8> {
-		self.0
-	}
-}
-
-impl From<Vec<u8>> for Id {
-	fn from(id: Vec<u8>) -> Self {
-		Id(id)
-	}
-}
-
-impl InnerSignedBlobV1Data {
-	pub fn new(blob: Vec<u8>, timestamp: u64) -> Self {
-		Self { blob, timestamp }
-	}
-
-	/// Computes the id of InnerSignedBlobV1Data
-	pub fn compute_id<C>(&self) -> Id
-	where
-		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-		SignatureSize<C>: ArrayLength<u8>,
-		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-		FieldBytesSize<C>: ModulusSize,
-	{
-		let mut id_hasher = C::Digest::new();
-		id_hasher.update(self.blob.as_slice());
-		id_hasher.update(&self.timestamp.to_be_bytes());
-		Id(id_hasher.finalize().to_vec())
-	}
-
-	pub fn try_to_sign<C>(
-		self,
-		signing_key: &SigningKey<C>,
-	) -> Result<InnerSignedBlobV1, anyhow::Error>
-	where
-		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-		SignatureSize<C>: ArrayLength<u8>,
-		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-		FieldBytesSize<C>: ModulusSize,
-	{
-		let id = self.compute_id::<C>();
-		let mut hasher = C::Digest::new();
-		hasher.update(self.blob.as_slice());
-		hasher.update(&self.timestamp.to_be_bytes());
-		hasher.update(id.as_slice());
-		let prehash = hasher.finalize();
-		let prehash_bytes = prehash.as_slice();
-
-		let (signature, _recovery_id) = signing_key.sign_prehash_recoverable(prehash_bytes)?;
-
-		Ok(InnerSignedBlobV1 {
-			data: self,
-			signature: signature.to_vec(),
-			signer: signing_key.verifying_key().to_sec1_bytes().to_vec(),
-			id,
-		})
-	}
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub struct InnerSignedBlobV1 {
-	pub data: InnerSignedBlobV1Data,
-	pub signature: Vec<u8>,
-	pub signer: Vec<u8>,
-	pub id: Id,
-}
-
-impl InnerSignedBlobV1 {
-	pub fn try_verify<C>(&self) -> Result<(), anyhow::Error>
-	where
-		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-		SignatureSize<C>: ArrayLength<u8>,
-		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-		FieldBytesSize<C>: ModulusSize,
-	{
-		let mut hasher = C::Digest::new();
-		hasher.update(self.data.blob.as_slice());
-		hasher.update(&self.data.timestamp.to_be_bytes());
-		hasher.update(self.id.as_slice());
-
-		let verifying_key = VerifyingKey::<C>::from_sec1_bytes(self.signer.as_slice())?;
-		let signature = ecdsa::Signature::from_bytes(self.signature.as_slice().into())?;
-
-		match verifying_key.verify_digest(hasher, &signature) {
-			Ok(_) => Ok(()),
-			Err(_) => Err(anyhow::anyhow!("Failed to verify signature")),
-		}
-	}
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum IntermediateBlobRepresentation {
-	SignedV1(InnerSignedBlobV1),
-}
-
-impl From<InnerSignedBlobV1> for IntermediateBlobRepresentation {
-	fn from(inner: InnerSignedBlobV1) -> Self {
-		IntermediateBlobRepresentation::SignedV1(inner)
-	}
-}
-
-impl IntermediateBlobRepresentation {
-	pub fn blob(&self) -> &[u8] {
-		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.data.blob.as_slice(),
-		}
-	}
-
-	pub fn signature(&self) -> &[u8] {
-		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.signature.as_slice(),
-		}
-	}
-
-	pub fn timestamp(&self) -> u64 {
-		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.data.timestamp,
-		}
-	}
-
-	pub fn signer(&self) -> &[u8] {
-		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.signer.as_slice(),
-		}
-	}
-
-	pub fn signer_hex(&self) -> String {
-		hex::encode(self.signer())
-	}
-
-	pub fn id(&self) -> &[u8] {
-		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.id.as_slice(),
-		}
-	}
-
-	pub fn verify_signature<C>(&self) -> Result<(), anyhow::Error>
-	where
-		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-		SignatureSize<C>: ArrayLength<u8>,
-		AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-		FieldBytesSize<C>: ModulusSize,
-	{
-		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.try_verify::<C>(),
-		}
-	}
-}
-
-#[cfg(test)]
-pub mod test {
-
-	use super::*;
-
-	#[test]
-	fn test_cannot_change_id_and_verify() -> Result<(), anyhow::Error> {
-		let blob = InnerSignedBlobV1Data::new(vec![1, 2, 3], 123);
-		let signing_key = SigningKey::<k256::Secp256k1>::random(&mut rand::thread_rng());
-		let signed_blob = blob.try_to_sign(&signing_key)?;
-
-		let mut changed_blob = signed_blob.clone();
-		changed_blob.id = Id(vec![1, 2, 3, 4]);
-
-		assert!(changed_blob.try_verify::<k256::Secp256k1>().is_err());
-
-		Ok(())
-	}
-}
-
-pub mod celestia {
-
-	use super::IntermediateBlobRepresentation;
-	use anyhow::Context;
-	use celestia_types::{consts::appconsts::AppVersion, nmt::Namespace, Blob as CelestiaBlob};
-
-	impl TryFrom<CelestiaBlob> for IntermediateBlobRepresentation {
-		type Error = anyhow::Error;
-
-		// todo: it would be nice to have this be self describing over the compression and serialization format
-		fn try_from(blob: CelestiaBlob) -> Result<Self, Self::Error> {
-			// decompress blob.data with zstd
-			let decompressed =
-				zstd::decode_all(blob.data.as_slice()).context("failed to decompress blob")?;
-
-			// deserialize the decompressed data with bcs
-			let blob =
-				bcs::from_bytes(decompressed.as_slice()).context("failed to deserialize blob")?;
-
-			Ok(blob)
-		}
-	}
-
-	pub struct CelestiaIntermediateBlobRepresentation(
-		pub IntermediateBlobRepresentation,
-		pub Namespace,
-	);
-
-	/// Tries to form a CelestiaBlob from a CelestiaIntermediateBlobRepresentation
-	impl TryFrom<CelestiaIntermediateBlobRepresentation> for CelestiaBlob {
-		type Error = anyhow::Error;
-
-		fn try_from(ir_blob: CelestiaIntermediateBlobRepresentation) -> Result<Self, Self::Error> {
-			// Extract the inner blob and namespace
-			let CelestiaIntermediateBlobRepresentation(ir_blob, namespace) = ir_blob;
-
-			// Serialize the inner blob with bcs
-			let serialized_blob = bcs::to_bytes(&ir_blob).context("failed to serialize blob")?;
-
-			// Compress the serialized data with zstd
-			let compressed_blob = zstd::encode_all(serialized_blob.as_slice(), 0)
-				.context("failed to compress blob")?;
-
-			// Construct the final CelestiaBlob by assigning the compressed data
-			// and associating it with the provided namespace
-			Ok(CelestiaBlob::new(namespace, compressed_blob, AppVersion::V2)
-				.map_err(|e| anyhow::anyhow!(e))?)
-		}
-	}
-}
diff --git a/protocol-units/da/movement/protocol/util/src/lib.rs b/protocol-units/da/movement/protocol/util/src/lib.rs
index 32171cd10..323cdc3f9 100644
--- a/protocol-units/da/movement/protocol/util/src/lib.rs
+++ b/protocol-units/da/movement/protocol/util/src/lib.rs
@@ -1,3 +1,3 @@
 pub mod config;
 pub use config::*;
-pub mod ir_blob;
+pub mod blob;
diff --git a/protocol-units/da/movement/protocol/verifier/src/lib.rs b/protocol-units/da/movement/protocol/verifier/src/lib.rs
index 24b8aa015..d03ea21b0 100644
--- a/protocol-units/da/movement/protocol/verifier/src/lib.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/lib.rs
@@ -1,4 +1,3 @@
-pub mod celestia;
 pub mod permissioned_signers;
 pub mod signed;
 
diff --git a/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs b/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
index 2b0e3cf81..3cb396ccc 100644
--- a/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
@@ -17,7 +17,7 @@ use ecdsa::{
 	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
 	SignatureSize,
 };
-use movement_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
 use std::sync::Arc;
 
 /// A verifier of Celestia blobs for permissioned signers
@@ -69,7 +69,11 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 {
-	async fn verify(&self, blob: CelestiaBlob, height: u64) -> Result<Verified<IntermediateBlobRepresentation>, Error> {
+	async fn verify(
+		&self,
+		blob: CelestiaBlob,
+		height: u64,
+	) -> Result<Verified<IntermediateBlobRepresentation>, Error> {
 		let verified_blob = self.celestia.verify(blob, height).await?;
 		self.known_signers.verify(verified_blob.into_inner(), height).await
 	}
diff --git a/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs b/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
index 51b41156c..71cf677ff 100644
--- a/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
@@ -11,7 +11,7 @@ use ecdsa::{
 	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
 	SignatureSize,
 };
-use movement_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
 use std::collections::HashSet;
 use tracing::info;
 
diff --git a/protocol-units/da/movement/protocol/verifier/src/v1.rs b/protocol-units/da/movement/protocol/verifier/src/v1.rs
deleted file mode 100644
index 959de99dd..000000000
--- a/protocol-units/da/movement/protocol/verifier/src/v1.rs
+++ /dev/null
@@ -1,168 +0,0 @@
-use crate::Verifier;
-use celestia_rpc::{BlobClient, Client, HeaderClient};
-use celestia_types::{nmt::Namespace, Blob};
-use movement_da_light_node_proto::VerificationMode;
-use std::sync::Arc;
-
-#[derive(Clone)]
-pub struct V1Verifier {
-	pub client: Arc<Client>,
-	pub namespace: Namespace,
-}
-
-#[tonic::async_trait]
-impl Verifier for V1Verifier {
-	/// All verification is the same for now
-	async fn verify(
-		&self,
-		_verification_mode: VerificationMode,
-		blob: &[u8],
-		height: u64,
-	) -> Result<bool, anyhow::Error> {
-		let celestia_blob = Blob::new(self.namespace.clone(), blob.to_vec())?;
-
-		celestia_blob.validate()?;
-
-		// wait for the header to be at the correct height
-		self.client.header_wait_for_height(height).await?;
-
-		// get the root
-		let dah = self.client.header_get_by_height(height).await?.dah;
-		let root_hash = dah.row_root(0).ok_or(anyhow::anyhow!("No root hash found"))?;
-
-		// get the proof
-		let proofs = self
-			.client
-			.blob_get_proof(height, self.namespace.clone(), celestia_blob.commitment)
-			.await?;
-
-		// get the leaves
-		let leaves = celestia_blob.to_shares()?;
-
-		// check if included
-		for proof in proofs.iter() {
-			proof
-				.verify_complete_namespace(&root_hash, &leaves, self.namespace.into())
-				.map_err(|e| anyhow::anyhow!("Failed to verify proof: {:?}", e))?;
-		}
-
-		Ok(true)
-	}
-
-	async fn verify_cowboy(
-		&self,
-		_verification_mode: VerificationMode,
-		_blob: &[u8],
-		_height: u64,
-	) -> Result<bool, anyhow::Error> {
-		unimplemented!()
-	}
-
-	async fn verify_m_of_n(
-		&self,
-		_verification_mode: VerificationMode,
-		_blob: &[u8],
-		_height: u64,
-	) -> Result<bool, anyhow::Error> {
-		unimplemented!()
-	}
-
-	async fn verifiy_validator_in(
-		&self,
-		_verification_mode: VerificationMode,
-		_blob: &[u8],
-		_height: u64,
-	) -> Result<bool, anyhow::Error> {
-		unimplemented!()
-	}
-}
-
-#[cfg(all(test, feature = "integration-tests"))]
-mod tests {
-	use super::*;
-	use celestia_types::blob::GasPrice;
-
-	/// todo: Investigate why this test sporadically fails.
-	#[tokio::test]
-	pub async fn test_valid_verifies() -> Result<(), anyhow::Error> {
-		let dot_movement = dot_movement::DotMovement::try_from_env()?;
-		let config = dot_movement
-			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
-
-		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
-		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
-
-		let height = client.blob_submit(&[blob], GasPrice::default()).await?;
-
-		let included = verifier.verify(VerificationMode::Cowboy, &data, height).await?;
-
-		assert!(included);
-
-		Ok(())
-	}
-
-	#[tokio::test]
-	pub async fn test_absent_does_not_verify() -> Result<(), anyhow::Error> {
-		let dot_movement = dot_movement::DotMovement::try_from_env()?;
-		let config = dot_movement
-			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
-		let client = Arc::new(config.connect_celestia().await?);
-		let celestia_namespace = config.celestia_namespace();
-
-		let verifier = V1Verifier { client: client.clone(), namespace: celestia_namespace.clone() };
-
-		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
-		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
-
-		let height = client.blob_submit(&[blob], GasPrice::default()).await?;
-
-		let included = verifier.verify(VerificationMode::Cowboy, &data, height).await?;
-
-		assert!(included);
-
-		let absent_data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 7];
-
-		let absent_included = verifier.verify(VerificationMode::Cowboy, &absent_data, height).await;
-
-		match absent_included {
-			Ok(_) => {
-				assert!(false, "Should not have verified")
-			}
-			Err(_) => {}
-		}
-
-		Ok(())
-	}
-
-	#[tokio::test]
-	pub async fn test_wrong_height_does_not_verify() -> Result<(), anyhow::Error> {
-		let dot_movement = dot_movement::DotMovement::try_from_env()?;
-		let config = dot_movement
-			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
-		let client = Arc::new(config.connect_celestia().await?);
-		let celestia_namespace = config.celestia_namespace();
-
-		let verifier = V1Verifier { client: client.clone(), namespace: celestia_namespace.clone() };
-
-		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
-		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
-
-		let height = client.blob_submit(&[blob], GasPrice::default()).await?;
-
-		let included = verifier.verify(VerificationMode::Cowboy, &data, height).await?;
-
-		assert!(included);
-
-		let wrong_height_included =
-			verifier.verify(VerificationMode::Cowboy, &data, height + 1).await;
-
-		match wrong_height_included {
-			Ok(_) => {
-				assert!(false, "Should not have verified")
-			}
-			Err(_) => {}
-		}
-
-		Ok(())
-	}
-}
diff --git a/protocol-units/da/movement/providers/celestia/Cargo.toml b/protocol-units/da/movement/providers/celestia/Cargo.toml
index 8e645f3c1..a1e2ed451 100644
--- a/protocol-units/da/movement/providers/celestia/Cargo.toml
+++ b/protocol-units/da/movement/providers/celestia/Cargo.toml
@@ -18,6 +18,9 @@ prost = { workspace = true }
 [build-dependencies]
 movement-da-light-node-da = { workspace = true }
 movement-da-util = { workspace = true }
+movement-da-light-node-verifier = { workspace = true }
+celestia-rpc = { workspace = true }
+celestia-types = { workspace = true }
 
 [features]
 default = []
diff --git a/protocol-units/da/movement/providers/celestia/src/lib.rs b/protocol-units/da/movement/providers/celestia/src/lib.rs
index 87a16cbec..42e3357ce 100644
--- a/protocol-units/da/movement/providers/celestia/src/lib.rs
+++ b/protocol-units/da/movement/providers/celestia/src/lib.rs
@@ -1,2 +1,3 @@
 pub mod blob;
 pub mod da;
+pub mod verifier;
diff --git a/protocol-units/da/movement/protocol/verifier/src/celestia/mod.rs b/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
similarity index 96%
rename from protocol-units/da/movement/protocol/verifier/src/celestia/mod.rs
rename to protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
index 5834d1b90..079a12b0d 100644
--- a/protocol-units/da/movement/protocol/verifier/src/celestia/mod.rs
+++ b/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
@@ -1,7 +1,7 @@
-use crate::{Error, Verified, VerifierOperations};
 use celestia_rpc::Client;
 use celestia_types::{nmt::Namespace, Blob};
-use movement_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_light_node_verifier::{Error, Verified, VerifierOperations};
+use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
 use std::sync::Arc;
 
 #[derive(Clone)]
diff --git a/protocol-units/da/movement/protocol/verifier/src/celestia/pessimistic.rs b/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
similarity index 93%
rename from protocol-units/da/movement/protocol/verifier/src/celestia/pessimistic.rs
rename to protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
index 92d480c73..8c6e33f1b 100644
--- a/protocol-units/da/movement/protocol/verifier/src/celestia/pessimistic.rs
+++ b/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
@@ -1,7 +1,7 @@
-use crate::{Error, Verified, VerifierOperations};
 use celestia_rpc::{BlobClient, Client, HeaderClient};
 use celestia_types::{consts::appconsts::AppVersion, nmt::Namespace, Blob};
-use movement_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_light_node_verifier::{Error, Verified, VerifierOperations};
+use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
 use std::sync::Arc;
 
 #[derive(Clone)]

From ff9136af962f047cae9fc143ad429d439d83c83c Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Mon, 13 Jan 2025 15:35:10 -0800
Subject: [PATCH 06/43] fix: reworking stream api.

---
 Cargo.lock                                    |   2 +
 .../da/movement/protocol/da/Cargo.toml        |   4 +-
 .../da/movement/protocol/da/src/fifo/mod.rs   |   2 +
 .../da/movement/protocol/da/src/lib.rs        | 113 ++++++++++++++++--
 4 files changed, 113 insertions(+), 8 deletions(-)
 create mode 100644 protocol-units/da/movement/protocol/da/src/fifo/mod.rs

diff --git a/Cargo.lock b/Cargo.lock
index ea823782f..27d0cd2bb 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10262,10 +10262,12 @@ dependencies = [
 name = "movement-da-light-node-da"
 version = "0.0.2"
 dependencies = [
+ "async-stream",
  "futures",
  "movement-da-light-node-proto",
  "movement-da-util",
  "thiserror 1.0.69",
+ "tokio-stream",
 ]
 
 [[package]]
diff --git a/protocol-units/da/movement/protocol/da/Cargo.toml b/protocol-units/da/movement/protocol/da/Cargo.toml
index 4e5f1760c..6dd62ee9c 100644
--- a/protocol-units/da/movement/protocol/da/Cargo.toml
+++ b/protocol-units/da/movement/protocol/da/Cargo.toml
@@ -16,7 +16,9 @@ integration-tests = []
 thiserror = { workspace = true }
 movement-da-util = { workspace = true }
 movement-da-light-node-proto = { workspace = true }
-futures = { workspace = true}
+futures = { workspace = true }
+tokio-stream = { workspace = true }
+async-stream = { workspace = true }
 
 [lints]
 workspace = true
diff --git a/protocol-units/da/movement/protocol/da/src/fifo/mod.rs b/protocol-units/da/movement/protocol/da/src/fifo/mod.rs
new file mode 100644
index 000000000..bce153c20
--- /dev/null
+++ b/protocol-units/da/movement/protocol/da/src/fifo/mod.rs
@@ -0,0 +1,2 @@
+/// A First-In-First-Out (FIFO) DA implementation.
+pub struct Fifo {}
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index add0d1d85..3ec8c91fd 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -1,7 +1,14 @@
-use movement_da_light_node_proto::Blob;
+pub mod fifo;
+
 use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
 use std::error;
 use std::future::Future;
+use tokio_stream::{Stream, StreamExt};
+
+pub type CertificateStream =
+	std::pin::Pin<Box<dyn Stream<Item = Result<Certificate, DaError>> + Send>>;
+pub type IntermediateBlobRepresentationStream =
+	std::pin::Pin<Box<dyn Stream<Item = Result<IntermediateBlobRepresentation, DaError>> + Send>>;
 
 /// A blob meant for the DA.
 #[derive(Debug, Clone)]
@@ -24,6 +31,22 @@ impl DaBlob {
 	}
 }
 
+/// A height for a blob on the DA.
+#[derive(Debug, Clone)]
+pub struct DaHeight(u64);
+
+impl DaHeight {
+	/// Creates a new [DaHeight] from a u64.
+	pub fn new(height: u64) -> Self {
+		Self(height)
+	}
+
+	/// Returns the inner u64.
+	pub fn as_u64(&self) -> u64 {
+		self.0
+	}
+}
+
 /// A certificate from consensus indicating a height.
 #[derive(Debug, Clone)]
 pub enum Certificate {
@@ -46,11 +69,17 @@ pub enum DaError {
 	Internal(String),
 }
 
-pub trait DaOperations {
+pub trait DaOperations
+where
+	Self: Send + Sync + 'static,
+{
 	/// Submits a blob to the DA.
 	///
-	/// A DA must allow for submission of raw [DaBlob]s and return a [Blob].
-	fn submit_blob(&self, data: DaBlob) -> impl Future<Output = Result<Blob, DaError>>;
+	/// A DA must allow for submission of raw [DaBlob]s and return a [IntermediateBlobRepresentation].
+	fn submit_blob(
+		&self,
+		data: DaBlob,
+	) -> impl Future<Output = Result<IntermediateBlobRepresentation, DaError>>;
 
 	/// Gets the blobs at a given height.
 	///
@@ -58,13 +87,14 @@ pub trait DaOperations {
 	fn get_ir_blobs_at_height(
 		&self,
 		height: u64,
-	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>>;
+	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> + Send + Sync + 'static;
 
 	/// Gets the IR blobs at a given height as would be used by the stream.
 	fn get_ir_blobs_at_height_for_stream(
 		&self,
 		height: u64,
-	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> {
+	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> + Send + Sync + 'static
+	{
 		async move {
 			// get the blobs at a given height, if the error is NonFatal, return an empty vec
 			match self.get_ir_blobs_at_height(height).await {
@@ -78,5 +108,74 @@ pub trait DaOperations {
 	/// Streams certificates from the DA.
 	///
 	/// A DA must allow for streaming of [Certificate]s. This is used to inform [Blob] polling.
-	fn stream_certificates(&self) -> impl futures::Stream<Item = Result<Certificate, DaError>>;
+	fn stream_certificates(&self) -> impl Future<Output = Result<CertificateStream, DaError>>;
+
+	/// Streams [IntermediateBlobRepresentation]s from the between two heights.
+	///
+	/// A DA implements a standard API for streaming [IntermediateBlobRepresentation]s.
+	fn stream_ir_blobs_between_heights(
+		&self,
+		start_height: u64,
+		end_height: u64,
+	) -> impl Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> {
+		async move {
+			let stream = async_stream::try_stream! {
+
+				for height in start_height..end_height {
+					let blobs = self.get_ir_blobs_at_height_for_stream(height).await?;
+					for blob in blobs {
+						yield blob;
+					}
+				}
+
+			};
+
+			Ok(Box::pin(stream) as IntermediateBlobRepresentationStream)
+		}
+	}
+
+	/// Streams ir blobs from a certain height.
+	///
+	/// A DA implements a standard API for streaming [IntermediateBlobRepresentation]s.
+	fn stream_ir_blobs_from_height(
+		&self,
+		start_height: u64,
+	) -> impl Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> {
+		async move {
+			let stream = async_stream::try_stream! {
+
+				// record the last height
+				let mut last_height = start_height;
+
+				// listen to the certificate stream to find the next height
+				let mut certificate_stream = self.stream_certificates().await?;
+
+				// loop through the certificate stream
+				while let Some(certificate) = certificate_stream.next().await {
+					match certificate {
+						Ok(Certificate::Height(height)) => {
+							// if the certificate height is greater than the last height, stream the blobs between the last height and the certificate height
+							if height > last_height {
+								let blobs = self.stream_ir_blobs_between_heights(last_height, height).await?;
+								for blob in blobs {
+									yield Ok(blob);
+								}
+								last_height = height;
+							}
+
+						}
+						Ok(Certificate::Nolo) => {
+							// do nothing
+						}
+						Err(e) => {
+							yield Err(e);
+						}
+					}
+				}
+
+			};
+
+			Ok(stream)
+		}
+	}
 }

From 480bee705704d2584be32c2810bcde69404b0b92 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Mon, 13 Jan 2025 21:04:50 -0800
Subject: [PATCH 07/43] fix: types.

---
 Cargo.lock                                    |   2 +
 .../da/movement/protocol/da/Cargo.toml        |   2 +
 .../da/movement/protocol/da/src/lib.rs        | 134 +++++++-----------
 3 files changed, 53 insertions(+), 85 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 27d0cd2bb..001cd3e47 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10267,7 +10267,9 @@ dependencies = [
  "movement-da-light-node-proto",
  "movement-da-util",
  "thiserror 1.0.69",
+ "tokio",
  "tokio-stream",
+ "tracing",
 ]
 
 [[package]]
diff --git a/protocol-units/da/movement/protocol/da/Cargo.toml b/protocol-units/da/movement/protocol/da/Cargo.toml
index 6dd62ee9c..7e4c76507 100644
--- a/protocol-units/da/movement/protocol/da/Cargo.toml
+++ b/protocol-units/da/movement/protocol/da/Cargo.toml
@@ -19,6 +19,8 @@ movement-da-light-node-proto = { workspace = true }
 futures = { workspace = true }
 tokio-stream = { workspace = true }
 async-stream = { workspace = true }
+tokio = { workspace = true }
+tracing = { workspace = true }
 
 [lints]
 workspace = true
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 3ec8c91fd..ba05a7079 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -1,31 +1,30 @@
 pub mod fifo;
 
+use async_stream::try_stream;
 use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
-use std::error;
 use std::future::Future;
+use std::pin::Pin;
 use tokio_stream::{Stream, StreamExt};
+use tracing::warn;
 
-pub type CertificateStream =
-	std::pin::Pin<Box<dyn Stream<Item = Result<Certificate, DaError>> + Send>>;
-pub type IntermediateBlobRepresentationStream =
-	std::pin::Pin<Box<dyn Stream<Item = Result<IntermediateBlobRepresentation, DaError>> + Send>>;
+pub type CertificateStream<'a> =
+	Pin<Box<dyn Stream<Item = Result<Certificate, DaError>> + Send + 'a>>;
+pub type IntermediateBlobRepresentationStream<'a> =
+	Pin<Box<dyn Stream<Item = Result<IntermediateBlobRepresentation, DaError>> + Send + 'a>>;
 
 /// A blob meant for the DA.
 #[derive(Debug, Clone)]
 pub struct DaBlob(Vec<u8>);
 
 impl DaBlob {
-	/// Creates a new [DaBlob] from a vector of bytes.
 	pub fn new(data: Vec<u8>) -> Self {
 		Self(data)
 	}
 
-	/// Returns a reference to the inner vector of bytes.
 	pub fn as_ref(&self) -> &[u8] {
-		self.0.as_slice()
+		&self.0
 	}
 
-	/// Consumes the [DaBlob] and returns the inner vector of bytes.
 	pub fn into_inner(self) -> Vec<u8> {
 		self.0
 	}
@@ -36,12 +35,10 @@ impl DaBlob {
 pub struct DaHeight(u64);
 
 impl DaHeight {
-	/// Creates a new [DaHeight] from a u64.
 	pub fn new(height: u64) -> Self {
 		Self(height)
 	}
 
-	/// Returns the inner u64.
 	pub fn as_u64(&self) -> u64 {
 		self.0
 	}
@@ -50,132 +47,99 @@ impl DaHeight {
 /// A certificate from consensus indicating a height.
 #[derive(Debug, Clone)]
 pub enum Certificate {
-	/// A certificate from consensus indicating a height.
 	Height(u64),
-	/// A certificate that cannot be interpreted for a height.
 	Nolo,
 }
 
-/// Errors thrown by [Da].
+/// Errors thrown by the DA.
 #[derive(Debug, thiserror::Error)]
 pub enum DaError {
 	#[error("blob submission error: {0}")]
-	BlobSubmission(#[source] Box<dyn error::Error + Send + Sync>),
+	BlobSubmission(#[source] Box<dyn std::error::Error + Send + Sync>),
 	#[error("blobs at height error: {0}")]
-	BlobsAtHeight(#[source] Box<dyn error::Error + Send + Sync>),
-	#[error("blobs at height fatal error: {0}")]
-	BlobsAtHeightNonFatal(#[source] Box<dyn error::Error + Send + Sync>),
+	BlobsAtHeight(#[source] Box<dyn std::error::Error + Send + Sync>),
 	#[error("internal error: {0}")]
 	Internal(String),
 }
 
-pub trait DaOperations
-where
-	Self: Send + Sync + 'static,
-{
-	/// Submits a blob to the DA.
-	///
-	/// A DA must allow for submission of raw [DaBlob]s and return a [IntermediateBlobRepresentation].
+/// Trait for DA operations.
+pub trait DaOperations: Send + Sync {
 	fn submit_blob(
 		&self,
 		data: DaBlob,
-	) -> impl Future<Output = Result<IntermediateBlobRepresentation, DaError>>;
+	) -> Pin<Box<dyn Future<Output = Result<IntermediateBlobRepresentation, DaError>> + Send + '_>>;
 
-	/// Gets the blobs at a given height.
-	///
-	/// A DA must allow for retrieval of [IntermediateBlobRepresentation]s at a given height.
 	fn get_ir_blobs_at_height(
 		&self,
 		height: u64,
-	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> + Send + Sync + 'static;
+	) -> Pin<
+		Box<dyn Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> + Send + '_>,
+	>;
 
-	/// Gets the IR blobs at a given height as would be used by the stream.
-	fn get_ir_blobs_at_height_for_stream(
+	fn stream_certificates(
 		&self,
-		height: u64,
-	) -> impl Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> + Send + Sync + 'static
-	{
-		async move {
-			// get the blobs at a given height, if the error is NonFatal, return an empty vec
-			match self.get_ir_blobs_at_height(height).await {
-				Ok(blobs) => Ok(blobs),
-				Err(DaError::BlobsAtHeightNonFatal(_)) => Ok(vec![]),
-				Err(e) => Err(e),
-			}
-		}
-	}
-
-	/// Streams certificates from the DA.
-	///
-	/// A DA must allow for streaming of [Certificate]s. This is used to inform [Blob] polling.
-	fn stream_certificates(&self) -> impl Future<Output = Result<CertificateStream, DaError>>;
+	) -> Pin<Box<dyn Future<Output = Result<CertificateStream, DaError>> + Send + '_>>;
 
-	/// Streams [IntermediateBlobRepresentation]s from the between two heights.
-	///
-	/// A DA implements a standard API for streaming [IntermediateBlobRepresentation]s.
 	fn stream_ir_blobs_between_heights(
 		&self,
 		start_height: u64,
 		end_height: u64,
-	) -> impl Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> {
-		async move {
-			let stream = async_stream::try_stream! {
-
+	) -> Pin<
+		Box<dyn Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> + Send + '_>,
+	> {
+		let fut = async move {
+			let stream = try_stream! {
 				for height in start_height..end_height {
-					let blobs = self.get_ir_blobs_at_height_for_stream(height).await?;
+					let blobs = self.get_ir_blobs_at_height(height).await?;
 					for blob in blobs {
 						yield blob;
 					}
 				}
-
 			};
-
 			Ok(Box::pin(stream) as IntermediateBlobRepresentationStream)
-		}
+		};
+		Box::pin(fut)
 	}
 
-	/// Streams ir blobs from a certain height.
-	///
-	/// A DA implements a standard API for streaming [IntermediateBlobRepresentation]s.
 	fn stream_ir_blobs_from_height(
 		&self,
 		start_height: u64,
-	) -> impl Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> {
-		async move {
-			let stream = async_stream::try_stream! {
-
-				// record the last height
+	) -> Pin<
+		Box<dyn Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> + Send + '_>,
+	> {
+		let fut = async move {
+			let certificate_stream = self.stream_certificates().await?;
+			let stream = try_stream! {
 				let mut last_height = start_height;
+				let mut certificate_stream = certificate_stream;
 
-				// listen to the certificate stream to find the next height
-				let mut certificate_stream = self.stream_certificates().await?;
-
-				// loop through the certificate stream
 				while let Some(certificate) = certificate_stream.next().await {
 					match certificate {
-						Ok(Certificate::Height(height)) => {
-							// if the certificate height is greater than the last height, stream the blobs between the last height and the certificate height
-							if height > last_height {
-								let blobs = self.stream_ir_blobs_between_heights(last_height, height).await?;
-								for blob in blobs {
-									yield Ok(blob);
-								}
-								last_height = height;
+						Ok(Certificate::Height(height)) if height > last_height => {
+							let blob_stream = self
+								.stream_ir_blobs_between_heights(last_height, height)
+								.await?;
+							tokio::pin!(blob_stream);
+
+							while let Some(blob) = blob_stream.next().await {
+								yield blob?;
 							}
 
+							last_height = height;
 						}
 						Ok(Certificate::Nolo) => {
-							// do nothing
+							// Ignore Nolo
 						}
 						Err(e) => {
-							yield Err(e);
+							warn!("failed to process stream");
 						}
+						_ => warn!("back fetch")
 					}
 				}
-
 			};
 
-			Ok(stream)
-		}
+			Ok(Box::pin(stream) as IntermediateBlobRepresentationStream)
+		};
+		Box::pin(fut)
 	}
 }

From 7aa181b800759c5f3e46481dd716c202ce90c57f Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Tue, 14 Jan 2025 13:53:52 -0800
Subject: [PATCH 08/43] fix: refactor celestia provider.

---
 Cargo.lock                                    |   9 +-
 .../da/movement/protocol/da/Cargo.toml        |   1 +
 .../da/movement/protocol/da/src/fifo/mod.rs   |   2 -
 .../da/movement/protocol/da/src/lib.rs        |  79 +++--
 .../da/movement/protocol/da/src/mock/mod.rs   | 218 ++++++++++++
 .../movement/protocol/light-node/Cargo.toml   |   1 +
 .../protocol/light-node/src/passthrough.rs    | 331 ++----------------
 .../protocol/util/src/blob/ir/blob.rs         |  20 +-
 .../verifier/src/permissioned_signers/mod.rs  |  80 -----
 .../protocol/verifier/src/signed/mod.rs       |  14 +-
 .../da/movement/providers/celestia/Cargo.toml |  12 +-
 .../providers/celestia/src/blob/ir.rs         |  39 +++
 .../providers/celestia/src/blob/mod.rs        |   1 +
 .../movement/providers/celestia/src/da/mod.rs | 123 ++++++-
 .../da/movement/providers/celestia/src/lib.rs |   2 +-
 .../providers/celestia/src/verifier/mod.rs    |  12 +-
 .../celestia/src/verifier/pessimistic.rs      |  10 +-
 17 files changed, 488 insertions(+), 466 deletions(-)
 delete mode 100644 protocol-units/da/movement/protocol/da/src/fifo/mod.rs
 create mode 100644 protocol-units/da/movement/protocol/da/src/mock/mod.rs
 delete mode 100644 protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
 create mode 100644 protocol-units/da/movement/providers/celestia/src/blob/ir.rs

diff --git a/Cargo.lock b/Cargo.lock
index 001cd3e47..1cd2bffc8 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10212,6 +10212,7 @@ dependencies = [
  "k256",
  "memseq",
  "movement-algs",
+ "movement-da-light-node-da",
  "movement-da-light-node-prevalidator",
  "movement-da-light-node-proto",
  "movement-da-light-node-verifier",
@@ -10235,13 +10236,16 @@ dependencies = [
 name = "movement-da-light-node-celestia"
 version = "0.0.2"
 dependencies = [
+ "anyhow",
+ "async-stream",
+ "bcs 0.1.4",
  "celestia-rpc",
  "celestia-types",
  "movement-da-light-node-da",
- "movement-da-light-node-verifier",
  "movement-da-util",
- "prost 0.13.3",
  "tonic 0.12.3",
+ "tracing",
+ "zstd 0.13.2",
 ]
 
 [[package]]
@@ -10262,6 +10266,7 @@ dependencies = [
 name = "movement-da-light-node-da"
 version = "0.0.2"
 dependencies = [
+ "anyhow",
  "async-stream",
  "futures",
  "movement-da-light-node-proto",
diff --git a/protocol-units/da/movement/protocol/da/Cargo.toml b/protocol-units/da/movement/protocol/da/Cargo.toml
index 7e4c76507..52942798c 100644
--- a/protocol-units/da/movement/protocol/da/Cargo.toml
+++ b/protocol-units/da/movement/protocol/da/Cargo.toml
@@ -21,6 +21,7 @@ tokio-stream = { workspace = true }
 async-stream = { workspace = true }
 tokio = { workspace = true }
 tracing = { workspace = true }
+anyhow = { workspace = true }
 
 [lints]
 workspace = true
diff --git a/protocol-units/da/movement/protocol/da/src/fifo/mod.rs b/protocol-units/da/movement/protocol/da/src/fifo/mod.rs
deleted file mode 100644
index bce153c20..000000000
--- a/protocol-units/da/movement/protocol/da/src/fifo/mod.rs
+++ /dev/null
@@ -1,2 +0,0 @@
-/// A First-In-First-Out (FIFO) DA implementation.
-pub struct Fifo {}
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index ba05a7079..73d9ab84d 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -1,7 +1,7 @@
-pub mod fifo;
+pub mod mock;
 
 use async_stream::try_stream;
-use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
+use movement_da_util::blob::ir::blob::DaBlob;
 use std::future::Future;
 use std::pin::Pin;
 use tokio_stream::{Stream, StreamExt};
@@ -9,26 +9,7 @@ use tracing::warn;
 
 pub type CertificateStream<'a> =
 	Pin<Box<dyn Stream<Item = Result<Certificate, DaError>> + Send + 'a>>;
-pub type IntermediateBlobRepresentationStream<'a> =
-	Pin<Box<dyn Stream<Item = Result<IntermediateBlobRepresentation, DaError>> + Send + 'a>>;
-
-/// A blob meant for the DA.
-#[derive(Debug, Clone)]
-pub struct DaBlob(Vec<u8>);
-
-impl DaBlob {
-	pub fn new(data: Vec<u8>) -> Self {
-		Self(data)
-	}
-
-	pub fn as_ref(&self) -> &[u8] {
-		&self.0
-	}
-
-	pub fn into_inner(self) -> Vec<u8> {
-		self.0
-	}
-}
+pub type DaBlobStream<'a> = Pin<Box<dyn Stream<Item = Result<DaBlob, DaError>> + Send + 'a>>;
 
 /// A height for a blob on the DA.
 #[derive(Debug, Clone)]
@@ -58,6 +39,12 @@ pub enum DaError {
 	BlobSubmission(#[source] Box<dyn std::error::Error + Send + Sync>),
 	#[error("blobs at height error: {0}")]
 	BlobsAtHeight(#[source] Box<dyn std::error::Error + Send + Sync>),
+	#[error("non-fatal blobs at height error: {0}")]
+	NonFatalBlobsAtHeight(#[source] Box<dyn std::error::Error + Send + Sync>),
+	#[error("certificate error: {0}")]
+	Certificate(#[source] Box<dyn std::error::Error + Send + Sync>),
+	#[error("non-fatal certificate error: {0}")]
+	NonFatalCertificate(#[source] Box<dyn std::error::Error + Send + Sync>),
 	#[error("internal error: {0}")]
 	Internal(String),
 }
@@ -67,14 +54,28 @@ pub trait DaOperations: Send + Sync {
 	fn submit_blob(
 		&self,
 		data: DaBlob,
-	) -> Pin<Box<dyn Future<Output = Result<IntermediateBlobRepresentation, DaError>> + Send + '_>>;
+	) -> Pin<Box<dyn Future<Output = Result<(), DaError>> + Send + '_>>;
 
 	fn get_ir_blobs_at_height(
 		&self,
 		height: u64,
-	) -> Pin<
-		Box<dyn Future<Output = Result<Vec<IntermediateBlobRepresentation>, DaError>> + Send + '_>,
-	>;
+	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>>;
+
+	fn get_ir_blobs_at_height_for_stream(
+		&self,
+		height: u64,
+	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
+		Box::pin(async move {
+			let result = self.get_ir_blobs_at_height(height).await;
+			match result {
+				Ok(blobs) => Ok(blobs),
+				Err(e) => {
+					warn!("failed to get blobs at height: {}", e);
+					Ok(vec![])
+				}
+			}
+		})
+	}
 
 	fn stream_certificates(
 		&self,
@@ -84,19 +85,17 @@ pub trait DaOperations: Send + Sync {
 		&self,
 		start_height: u64,
 		end_height: u64,
-	) -> Pin<
-		Box<dyn Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> + Send + '_>,
-	> {
+	) -> Pin<Box<dyn Future<Output = Result<DaBlobStream, DaError>> + Send + '_>> {
 		let fut = async move {
 			let stream = try_stream! {
 				for height in start_height..end_height {
-					let blobs = self.get_ir_blobs_at_height(height).await?;
+					let blobs = self.get_ir_blobs_at_height_for_stream(height).await?;
 					for blob in blobs {
 						yield blob;
 					}
 				}
 			};
-			Ok(Box::pin(stream) as IntermediateBlobRepresentationStream)
+			Ok(Box::pin(stream) as DaBlobStream)
 		};
 		Box::pin(fut)
 	}
@@ -104,9 +103,7 @@ pub trait DaOperations: Send + Sync {
 	fn stream_ir_blobs_from_height(
 		&self,
 		start_height: u64,
-	) -> Pin<
-		Box<dyn Future<Output = Result<IntermediateBlobRepresentationStream, DaError>> + Send + '_>,
-	> {
+	) -> Pin<Box<dyn Future<Output = Result<DaBlobStream, DaError>> + Send + '_>> {
 		let fut = async move {
 			let certificate_stream = self.stream_certificates().await?;
 			let stream = try_stream! {
@@ -130,15 +127,23 @@ pub trait DaOperations: Send + Sync {
 						Ok(Certificate::Nolo) => {
 							// Ignore Nolo
 						}
+						// Warn log non-fatal certificate errors
+						Err(DaError::NonFatalCertificate(e)) => {
+							warn!("non-fatal certificate error: {}", e);
+						}
+						// Exit on all other errors
 						Err(e) => {
-							warn!("failed to process stream");
+							yield Err(e)?;
+						}
+						// If height is less than last height, ignore
+						_ => {
+							warn!("ignoring certificate with height less than last height");
 						}
-						_ => warn!("back fetch")
 					}
 				}
 			};
 
-			Ok(Box::pin(stream) as IntermediateBlobRepresentationStream)
+			Ok(Box::pin(stream) as DaBlobStream)
 		};
 		Box::pin(fut)
 	}
diff --git a/protocol-units/da/movement/protocol/da/src/mock/mod.rs b/protocol-units/da/movement/protocol/da/src/mock/mod.rs
new file mode 100644
index 000000000..dc60046ed
--- /dev/null
+++ b/protocol-units/da/movement/protocol/da/src/mock/mod.rs
@@ -0,0 +1,218 @@
+use crate::{Certificate, CertificateStream, DaError, DaOperations};
+use movement_da_util::blob::ir::blob::DaBlob;
+use std::collections::{HashMap, VecDeque};
+use std::future::Future;
+use std::pin::Pin;
+use std::sync::{Arc, Mutex};
+use tokio::sync::mpsc;
+use tokio_stream::wrappers::ReceiverStream;
+
+/// A mock DA implementation, useful for testing.
+pub struct Mock {
+	// A queue for certificates.
+	certificate_queue: Arc<Mutex<VecDeque<Result<Certificate, DaError>>>>,
+
+	// Map for mocking results of `get_ir_blobs_at_height`.
+	height_results: Arc<Mutex<HashMap<u64, Result<Vec<DaBlob>, DaError>>>>,
+
+	// Collection to store submitted blobs.
+	submitted_blobs: Arc<Mutex<Vec<DaBlob>>>,
+}
+
+impl Mock {
+	/// Creates a new `Mock` instance.
+	pub fn new() -> Self {
+		Self {
+			certificate_queue: Arc::new(Mutex::new(VecDeque::new())),
+			height_results: Arc::new(Mutex::new(HashMap::new())),
+			submitted_blobs: Arc::new(Mutex::new(Vec::new())),
+		}
+	}
+
+	/// Adds a certificate to the queue.
+	pub fn add_certificate(
+		&self,
+		certificate: Result<Certificate, DaError>,
+	) -> Result<(), DaError> {
+		let mut queue = self.certificate_queue.lock().map_err(|_| {
+			DaError::Internal("Failed to acquire lock for certificate queue".to_string())
+		})?;
+		queue.push_back(certificate);
+		Ok(())
+	}
+
+	/// Sets the result for a specific height.
+	pub fn set_height_result(
+		&self,
+		height: u64,
+		result: Result<Vec<DaBlob>, DaError>,
+	) -> Result<(), DaError> {
+		let mut height_results = self.height_results.lock().map_err(|_| {
+			DaError::Internal("Failed to acquire lock for height results".to_string())
+		})?;
+		height_results.insert(height, result);
+		Ok(())
+	}
+
+	/// Gets all submitted blobs.
+	pub fn get_submitted_blobs(&self) -> Result<Vec<DaBlob>, DaError> {
+		self.submitted_blobs.lock().map(|blobs| blobs.clone()).map_err(|_| {
+			DaError::Internal("Failed to acquire lock for submitted blobs".to_string())
+		})
+	}
+}
+
+impl DaOperations for Mock {
+	fn submit_blob(
+		&self,
+		data: DaBlob,
+	) -> Pin<Box<dyn Future<Output = Result<(), DaError>> + Send + '_>> {
+		let submitted_blobs = self.submitted_blobs.clone();
+		Box::pin(async move {
+			submitted_blobs
+				.lock()
+				.map_err(|_| {
+					DaError::Internal("Failed to acquire lock for submitted blobs".to_string())
+				})?
+				.push(data);
+			Ok(())
+		})
+	}
+
+	fn get_ir_blobs_at_height(
+		&self,
+		height: u64,
+	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
+		let height_results = self.height_results.clone();
+		Box::pin(async move {
+			height_results
+				.lock()
+				.map_err(|_| {
+					DaError::Internal("Failed to acquire lock for height results".to_string())
+				})?
+				.remove(&height)
+				.ok_or_else(|| DaError::Internal(format!("No result set for height {}", height)))?
+		})
+	}
+
+	fn stream_certificates(
+		&self,
+	) -> Pin<Box<dyn Future<Output = Result<CertificateStream, DaError>> + Send + '_>> {
+		let certificate_queue = self.certificate_queue.clone();
+
+		Box::pin(async move {
+			// Create an mpsc channel for streaming certificates.
+			let (sender, receiver) = mpsc::channel(10);
+
+			// Move certificates from the queue into the channel in a background task.
+			let queue_worker = async move {
+				loop {
+					// Lock the queue and pop the next certificate.
+					let certificate = {
+						let mut queue = certificate_queue.lock().unwrap();
+						queue.pop_front()
+					};
+
+					match certificate {
+						Some(cert) => {
+							if sender.send(cert).await.is_err() {
+								break; // Stop if the receiver has been dropped.
+							}
+						}
+						None => break, // Exit the loop when the queue is empty.
+					}
+				}
+			};
+
+			tokio::spawn(queue_worker);
+
+			// Wrap the receiver in a `ReceiverStream` and return it.
+			let stream = ReceiverStream::new(receiver);
+			Ok(Box::pin(stream) as CertificateStream)
+		})
+	}
+}
+
+#[cfg(test)]
+pub mod test {
+
+	use super::*;
+	use tokio_stream::StreamExt;
+
+	#[tokio::test]
+	async fn test_stream_stays_open_with_non_fatal_certificate() -> Result<(), anyhow::Error> {
+		// Create a mock DA instance.
+		let mock = Mock::new();
+
+		// Add a mix of valid certificates and a non-fatal error to the queue.
+		mock.add_certificate(Ok(Certificate::Height(1)))?;
+		mock.add_certificate(Err(DaError::NonFatalCertificate(
+			"non-fatal error".to_string().into(),
+		)))?;
+		mock.add_certificate(Ok(Certificate::Height(2)))?;
+
+		// Get the stream of certificates.
+		let certificate_stream = mock.stream_certificates().await?;
+		tokio::pin!(certificate_stream);
+
+		let mut results = Vec::new();
+
+		// Process the stream.
+		while let Some(cert) = certificate_stream.next().await {
+			match cert {
+				Ok(Certificate::Height(height)) => results.push(Ok(height)),
+				Err(e) => results.push(Err(e.to_string())),
+				_ => {}
+			}
+		}
+
+		// Validate the results.
+		assert_eq!(
+			results,
+			vec![
+				Ok(1),                                                           // First certificate
+				Err("non-fatal certificate error: non-fatal error".to_string()), // Non-fatal error
+				Ok(2),                                                           // Second certificate
+			]
+		);
+
+		Ok(())
+	}
+
+	#[tokio::test]
+	async fn test_stream_closes_with_fatal() -> Result<(), anyhow::Error> {
+		// Create a mock DA instance.
+		let mock = Mock::new();
+
+		// Add a mix of valid certificates and a fatal error to the queue.
+		mock.add_certificate(Ok(Certificate::Height(1)))?;
+		mock.add_certificate(Err(DaError::Internal("fatal error".to_string())))?;
+		mock.add_certificate(Ok(Certificate::Height(2)))?;
+
+		// Get the stream of certificates.
+		let certificate_stream = mock.stream_certificates().await?;
+		tokio::pin!(certificate_stream);
+
+		let mut results = Vec::new();
+
+		// Process the stream.
+		while let Some(cert) = certificate_stream.next().await {
+			match cert {
+				Ok(Certificate::Height(height)) => results.push(Ok(height)),
+				Err(e) => results.push(Err(e.to_string())),
+				_ => {}
+			}
+		}
+
+		// Validate the results.
+		assert_eq!(
+			results,
+			vec![
+				Ok(1),                                          // First certificate
+				Err("internal error: fatal error".to_string()), // Fatal error
+			]
+		);
+
+		Ok(())
+	}
+}
diff --git a/protocol-units/da/movement/protocol/light-node/Cargo.toml b/protocol-units/da/movement/protocol/light-node/Cargo.toml
index 7eddd353e..a1df45a9f 100644
--- a/protocol-units/da/movement/protocol/light-node/Cargo.toml
+++ b/protocol-units/da/movement/protocol/light-node/Cargo.toml
@@ -42,6 +42,7 @@ zstd = { workspace = true }
 ecdsa = { workspace = true }
 k256 = { workspace = true }
 thiserror = { workspace = true }
+movement-da-light-node-da = { workspace = true }
 
 # sequencer
 memseq = { workspace = true, optional = true }
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 64e0ab7c7..e2a9a5506 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -1,4 +1,4 @@
-use movement_da_util::ir_blob::IntermediateBlobRepresentation;
+use movement_da_util::ir_blob::DaBlob;
 use std::fmt::{self, Debug, Formatter};
 use std::sync::Arc;
 use tokio_stream::{Stream, StreamExt};
@@ -8,12 +8,13 @@ use celestia_rpc::{BlobClient, Client, HeaderClient};
 use celestia_types::{nmt::Namespace, Blob as CelestiaBlob, TxConfig};
 
 // FIXME: glob imports are bad style
+use movement_da_light_node_da::{DaBlob, DaOperations};
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
 use movement_da_light_node_proto::*;
 use movement_da_light_node_verifier::{permissioned_signers::Verifier, VerifierOperations};
 use movement_da_util::{
 	config::Config,
-	ir_blob::{celestia::CelestiaIntermediateBlobRepresentation, InnerSignedBlobV1Data},
+	ir_blob::{celestia::CelestiaDaBlob, InnerSignedBlobV1Data},
 };
 
 use crate::LightNodeRuntime;
@@ -31,30 +32,31 @@ use ecdsa::{
 };
 
 #[derive(Clone)]
-pub struct LightNode<C>
+pub struct LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
 	pub config: Config,
-	pub celestia_namespace: Namespace,
-	pub default_client: Arc<Client>,
 	pub verifier: Arc<
-		Box<dyn VerifierOperations<CelestiaBlob, IntermediateBlobRepresentation> + Send + Sync>,
+		Box<dyn VerifierOperations<CelestiaBlob, DaBlob> + Send + Sync>,
 	>,
 	pub signing_key: SigningKey<C>,
+	pub da: Arc<Da>,
 }
 
-impl<C> Debug for LightNode<C>
+impl<C, Da> Debug for LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
 	fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 		f.debug_struct("LightNode")
@@ -63,13 +65,14 @@ where
 	}
 }
 
-impl<C> LightNodeRuntime for LightNode<C>
+impl<C, Da> LightNodeRuntime for LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
 	/// Tries to create a new LightNode instance from the toml config file.
 	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error> {
@@ -104,230 +107,15 @@ where
 	}
 }
 
-impl<C> LightNode<C>
+impl<C, Da> LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
-	/// Creates a new signed blob instance with the provided data.
-	pub fn create_new_celestia_blob(&self, data: Vec<u8>) -> Result<CelestiaBlob, anyhow::Error> {
-		// mark the timestamp as now in milliseconds
-		let timestamp = chrono::Utc::now().timestamp_micros() as u64;
-
-		// sign the blob data and the timestamp
-		let data = InnerSignedBlobV1Data::new(data, timestamp).try_to_sign(&self.signing_key)?;
-
-		// create the celestia blob
-		CelestiaIntermediateBlobRepresentation(data.into(), self.celestia_namespace.clone())
-			.try_into()
-	}
-
-	/// Submits a CelestiaBlob to the Celestia node.
-	pub async fn submit_celestia_blob(&self, blob: CelestiaBlob) -> Result<u64, anyhow::Error> {
-		let config = TxConfig::default();
-		// config.with_gas(2);
-		let height = self.default_client.blob_submit(&[blob], config).await.map_err(|e| {
-			error!(error = %e, "failed to submit the blob");
-			anyhow::anyhow!("Failed submitting the blob: {}", e)
-		})?;
-
-		Ok(height)
-	}
-
-	/// Submits Celestia blobs to the Celestia node.
-	pub async fn submit_celestia_blobs(
-		&self,
-		blobs: &[CelestiaBlob],
-	) -> Result<u64, anyhow::Error> {
-		let height =
-			self.default_client.blob_submit(blobs, TxConfig::default()).await.map_err(|e| {
-				error!(error = %e, "failed to submit the blobs");
-				anyhow::anyhow!("Failed submitting the blob: {}", e)
-			})?;
-
-		Ok(height)
-	}
-
-	/// Submits a blob to the Celestia node.
-	pub async fn submit_blob(&self, data: Vec<u8>) -> Result<Blob, anyhow::Error> {
-		let celestia_blob = self.create_new_celestia_blob(data)?;
-		let height = self.submit_celestia_blob(celestia_blob.clone()).await?;
-		Ok(Self::celestia_blob_to_blob(celestia_blob, height)?)
-	}
-
-	/// Gets the blobs at a given height.
-	pub async fn get_ir_blobs_at_height(
-		&self,
-		height: u64,
-	) -> Result<Vec<IntermediateBlobRepresentation>, anyhow::Error> {
-		let height = if height == 0 { 1 } else { height };
-		match self.default_client.blob_get_all(height, &[self.celestia_namespace]).await {
-			Err(e) => {
-				error!(error = %e, "failed to get blobs at height {height}");
-				anyhow::bail!(e);
-			}
-			Ok(blobs) => {
-				let blobs = blobs.unwrap_or_default();
-
-				let mut verified_blobs = Vec::new();
-				for blob in blobs {
-					match self.verifier.verify(blob, height).await {
-						Ok(verified_blob) => {
-							let blob = verified_blob.into_inner();
-							info!("verified blob at height {}: {}", height, hex::encode(blob.id()));
-							verified_blobs.push(blob);
-						}
-						Err(e) => {
-							error!(error = %e, "failed to verify blob");
-						}
-					}
-				}
-
-				Ok(verified_blobs)
-			}
-		}
-	}
-
-	#[tracing::instrument(target = "movement_timing", level = "info", skip(self))]
-	async fn get_blobs_at_height(&self, height: u64) -> Result<Vec<Blob>, anyhow::Error> {
-		let ir_blobs = self.get_ir_blobs_at_height(height).await?;
-		let mut blobs = Vec::new();
-		for ir_blob in ir_blobs {
-			let blob = Self::ir_blob_to_blob(ir_blob, height)?;
-			// todo: update logging here
-			blobs.push(blob);
-		}
-		Ok(blobs)
-	}
-
-	/// Streams blobs until it can't get another one in the loop
-	pub async fn stream_blobs_in_range(
-		&self,
-		start_height: u64,
-		end_height: Option<u64>,
-	) -> Result<
-		std::pin::Pin<Box<dyn Stream<Item = Result<Blob, anyhow::Error>> + Send>>,
-		anyhow::Error,
-	> {
-		let mut height = start_height;
-		let end_height = end_height.unwrap_or_else(|| u64::MAX);
-		let me = Arc::new(self.clone());
-
-		let stream = async_stream::try_stream! {
-			loop {
-				if height > end_height {
-					break;
-				}
-
-				// to avoid stopping the stream when get blobs at height fails, simply warn!
-				match me.get_blobs_at_height(height).await {
-					Ok(blobs) => {
-						for blob in blobs {
-							yield blob;
-						}
-					}
-					Err(e) => {
-						warn!(error = %e, "failed to get blobs at height");
-					}
-				}
-
-				let blobs = me.get_blobs_at_height(height).await?;
-				for blob in blobs {
-					yield blob;
-				}
-				height += 1;
-			}
-		};
-
-		Ok(Box::pin(stream)
-			as std::pin::Pin<Box<dyn Stream<Item = Result<Blob, anyhow::Error>> + Send>>)
-	}
-
-	/// Streams the latest blobs that can subscribed to.
-	async fn stream_blobs_from_height_on(
-		&self,
-		start_height: Option<u64>,
-	) -> Result<
-		std::pin::Pin<Box<dyn Stream<Item = Result<Blob, anyhow::Error>> + Send>>,
-		anyhow::Error,
-	> {
-		let start_height = start_height.unwrap_or_else(|| u64::MAX);
-		let me = Arc::new(self.clone());
-		let mut subscription = me.default_client.header_subscribe().await?;
-
-		let stream = async_stream::try_stream! {
-			let mut first_flag = true;
-			while let Some(header_res) = subscription.next().await {
-
-				let header = header_res?;
-				let height = header.height().into();
-
-				info!("Stream got header: {:?}", header.height());
-
-				// back fetch the blobs
-				if first_flag && (height > start_height) {
-
-					let mut blob_stream = me.stream_blobs_in_range(start_height, Some(height)).await?;
-
-					while let Some(blob) = blob_stream.next().await {
-
-						debug!("Stream got blob: {:?}", blob);
-
-						yield blob?;
-					}
-
-				}
-				first_flag = false;
-
-				// to avoid stopping the stream when get blobs at height fails, simply warn!
-				match me.get_blobs_at_height(height).await {
-					Ok(blobs) => {
-						for blob in blobs {
-							yield blob;
-						}
-					}
-					Err(e) => {
-						warn!(error = %e, "failed to get blobs at height");
-					}
-				}
-
-			}
-		};
-
-		Ok(Box::pin(stream)
-			as std::pin::Pin<Box<dyn Stream<Item = Result<Blob, anyhow::Error>> + Send>>)
-	}
-
-	pub fn ir_blob_to_blob(
-		ir_blob: IntermediateBlobRepresentation,
-		height: u64,
-	) -> Result<Blob, anyhow::Error> {
-		Ok(Blob {
-			data: ir_blob.blob().to_vec(),
-			signature: ir_blob.signature().to_vec(),
-			timestamp: ir_blob.timestamp(),
-			signer: ir_blob.signer().to_vec(),
-			blob_id: ir_blob.id().to_vec(),
-			height,
-		})
-	}
-
-	pub fn celestia_blob_to_blob(blob: CelestiaBlob, height: u64) -> Result<Blob, anyhow::Error> {
-		let ir_blob: IntermediateBlobRepresentation = blob.try_into()?;
-
-		Ok(Blob {
-			data: ir_blob.blob().to_vec(),
-			signature: ir_blob.signature().to_vec(),
-			timestamp: ir_blob.timestamp(),
-			signer: ir_blob.signer().to_vec(),
-			blob_id: ir_blob.id().to_vec(),
-			height,
-		})
-	}
-
 	pub fn blob_to_blob_write_response(blob: Blob) -> Result<BlobResponse, anyhow::Error> {
 		Ok(BlobResponse { blob_type: Some(blob_response::BlobType::PassedThroughBlob(blob)) })
 	}
@@ -346,13 +134,14 @@ where
 }
 
 #[tonic::async_trait]
-impl<C> LightNodeService for LightNode<C>
+impl<C, Da> LightNodeService for LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
 	/// Server streaming response type for the StreamReadFromHeight method.
 	type StreamReadFromHeightStream = std::pin::Pin<
@@ -373,7 +162,7 @@ where
 
 		let output = async_stream::try_stream! {
 
-			let mut blob_stream = me.stream_blobs_from_height_on(Some(height)).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
+			let mut blob_stream = me.da.stream_ir_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
 
 			while let Some(blob) = blob_stream.next().await {
 				let blob = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
@@ -400,22 +189,7 @@ where
 		&self,
 		_request: tonic::Request<StreamReadLatestRequest>,
 	) -> std::result::Result<tonic::Response<Self::StreamReadLatestStream>, tonic::Status> {
-		let me = Arc::new(self.clone());
-
-		let output = async_stream::try_stream! {
-
-			let mut blob_stream = me.stream_blobs_from_height_on(None).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
-			while let Some(blob) = blob_stream.next().await {
-				let blob = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
-				let response = StreamReadLatestResponse {
-					blob : Some(Self::blob_to_blob_read_response(blob).map_err(|e| tonic::Status::internal(e.to_string()))?)
-				};
-				yield response;
-			}
-
-		};
-
-		Ok(tonic::Response::new(Box::pin(output) as Self::StreamReadLatestStream))
+		unimplemented!()
 	}
 	/// Server streaming response type for the StreamWriteCelestiaBlob method.
 	type StreamWriteBlobStream = std::pin::Pin<
@@ -426,85 +200,21 @@ where
 		&self,
 		request: tonic::Request<tonic::Streaming<StreamWriteBlobRequest>>,
 	) -> std::result::Result<tonic::Response<Self::StreamWriteBlobStream>, tonic::Status> {
-		let mut stream = request.into_inner();
-		let me = Arc::new(self.clone());
-
-		let output = async_stream::try_stream! {
-
-			while let Some(request) = stream.next().await {
-				let request = request?;
-				let blob_data = request.blob.ok_or(tonic::Status::invalid_argument("No blob in request"))?.data;
-
-				let blob = me.submit_blob(blob_data).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
-
-				let write_response = StreamWriteBlobResponse {
-					blob : Some(Self::blob_to_blob_read_response(blob).map_err(|e| tonic::Status::internal(e.to_string()))?)
-				};
-
-				yield write_response;
-
-			}
-		};
-
-		Ok(tonic::Response::new(Box::pin(output) as Self::StreamWriteBlobStream))
+		unimplemented!()
 	}
 	/// Read blobs at a specified height.
 	async fn read_at_height(
 		&self,
 		request: tonic::Request<ReadAtHeightRequest>,
 	) -> std::result::Result<tonic::Response<ReadAtHeightResponse>, tonic::Status> {
-		let height = request.into_inner().height;
-		let blobs = self
-			.get_blobs_at_height(height)
-			.await
-			.map_err(|e| tonic::Status::internal(e.to_string()))?;
-
-		if blobs.is_empty() {
-			return Err(tonic::Status::not_found("No blobs found at the specified height"));
-		}
-
-		let mut blob_responses = Vec::new();
-		for blob in blobs {
-			blob_responses.push(
-				Self::blob_to_blob_read_response(blob)
-					.map_err(|e| tonic::Status::internal(e.to_string()))?,
-			);
-		}
-
-		Ok(tonic::Response::new(ReadAtHeightResponse {
-			// map blobs to the response type
-			blobs: blob_responses,
-		}))
+		unimplemented!()
 	}
 	/// Batch read and write operations for efficiency.
 	async fn batch_read(
 		&self,
 		request: tonic::Request<BatchReadRequest>,
 	) -> std::result::Result<tonic::Response<BatchReadResponse>, tonic::Status> {
-		let heights = request.into_inner().heights;
-		let mut responses = Vec::with_capacity(heights.len());
-		for height in heights {
-			let blobs = self
-				.get_blobs_at_height(height)
-				.await
-				.map_err(|e| tonic::Status::internal(e.to_string()))?;
-
-			if blobs.is_empty() {
-				return Err(tonic::Status::not_found("No blobs found at the specified height"));
-			}
-
-			let mut blob_responses = Vec::new();
-			for blob in blobs {
-				blob_responses.push(
-					Self::blob_to_blob_read_response(blob)
-						.map_err(|e| tonic::Status::internal(e.to_string()))?,
-				);
-			}
-
-			responses.push(ReadAtHeightResponse { blobs: blob_responses })
-		}
-
-		Ok(tonic::Response::new(BatchReadResponse { responses }))
+		unimplemented!()
 	}
 
 	/// Batch write blobs.
@@ -516,6 +226,7 @@ where
 		let mut responses = Vec::with_capacity(blobs.len());
 		for data in blobs {
 			let blob = self
+				.da
 				.submit_blob(data.data)
 				.await
 				.map_err(|e| tonic::Status::internal(e.to_string()))?;
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
index 924832152..d805cc4f4 100644
--- a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
@@ -48,38 +48,38 @@ impl InnerSignedBlobV1 {
 }
 
 #[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum IntermediateBlobRepresentation {
+pub enum DaBlob {
 	SignedV1(InnerSignedBlobV1),
 }
 
-impl From<InnerSignedBlobV1> for IntermediateBlobRepresentation {
+impl From<InnerSignedBlobV1> for DaBlob {
 	fn from(inner: InnerSignedBlobV1) -> Self {
-		IntermediateBlobRepresentation::SignedV1(inner)
+		DaBlob::SignedV1(inner)
 	}
 }
 
-impl IntermediateBlobRepresentation {
+impl DaBlob {
 	pub fn blob(&self) -> &[u8] {
 		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.data.blob.as_slice(),
+			DaBlob::SignedV1(inner) => inner.data.blob.as_slice(),
 		}
 	}
 
 	pub fn signature(&self) -> &[u8] {
 		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.signature.as_slice(),
+			DaBlob::SignedV1(inner) => inner.signature.as_slice(),
 		}
 	}
 
 	pub fn timestamp(&self) -> u64 {
 		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.data.timestamp,
+			DaBlob::SignedV1(inner) => inner.data.timestamp,
 		}
 	}
 
 	pub fn signer(&self) -> &[u8] {
 		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.signer.as_slice(),
+			DaBlob::SignedV1(inner) => inner.signer.as_slice(),
 		}
 	}
 
@@ -89,7 +89,7 @@ impl IntermediateBlobRepresentation {
 
 	pub fn id(&self) -> &[u8] {
 		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.id.as_slice(),
+			DaBlob::SignedV1(inner) => inner.id.as_slice(),
 		}
 	}
 
@@ -102,7 +102,7 @@ impl IntermediateBlobRepresentation {
 		FieldBytesSize<C>: ModulusSize,
 	{
 		match self {
-			IntermediateBlobRepresentation::SignedV1(inner) => inner.try_verify::<C>(),
+			DaBlob::SignedV1(inner) => inner.try_verify::<C>(),
 		}
 	}
 }
diff --git a/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs b/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
deleted file mode 100644
index 3cb396ccc..000000000
--- a/protocol-units/da/movement/protocol/verifier/src/permissioned_signers/mod.rs
+++ /dev/null
@@ -1,80 +0,0 @@
-use crate::{
-	celestia::Verifier as CelestiaVerifier, signed::InKnownSignersVerifier, Error, Verified,
-	VerifierOperations,
-};
-use celestia_rpc::Client;
-use celestia_types::nmt::Namespace;
-use celestia_types::Blob as CelestiaBlob;
-use ecdsa::{
-	elliptic_curve::{
-		generic_array::ArrayLength,
-		ops::Invert,
-		point::PointCompression,
-		sec1::{FromEncodedPoint, ModulusSize, ToEncodedPoint},
-		subtle::CtOption,
-		AffinePoint, CurveArithmetic, FieldBytesSize, PrimeCurve, Scalar,
-	},
-	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
-	SignatureSize,
-};
-use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
-use std::sync::Arc;
-
-/// A verifier of Celestia blobs for permissioned signers
-#[derive(Clone)]
-pub struct Verifier<C>
-where
-	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-	SignatureSize<C>: ArrayLength<u8>,
-	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-	FieldBytesSize<C>: ModulusSize,
-{
-	/// The Celestia veifier
-	pub celestia: CelestiaVerifier,
-	/// The verifier for known signers
-	pub known_signers: InKnownSignersVerifier<C>,
-}
-
-impl<C> Verifier<C>
-where
-	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-	SignatureSize<C>: ArrayLength<u8>,
-	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-	FieldBytesSize<C>: ModulusSize,
-{
-	pub fn new<T>(
-		celestia_client: Arc<Client>,
-		celestia_namespace: Namespace,
-		known_signers_sec1_bytes: T,
-	) -> Self
-	where
-		T: IntoIterator,
-		T::Item: Into<String>,
-	{
-		Self {
-			celestia: CelestiaVerifier::new(celestia_client, celestia_namespace),
-			known_signers: InKnownSignersVerifier::new(known_signers_sec1_bytes),
-		}
-	}
-}
-
-#[tonic::async_trait]
-impl<C> VerifierOperations<CelestiaBlob, IntermediateBlobRepresentation> for Verifier<C>
-where
-	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-	SignatureSize<C>: ArrayLength<u8>,
-	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-	FieldBytesSize<C>: ModulusSize,
-{
-	async fn verify(
-		&self,
-		blob: CelestiaBlob,
-		height: u64,
-	) -> Result<Verified<IntermediateBlobRepresentation>, Error> {
-		let verified_blob = self.celestia.verify(blob, height).await?;
-		self.known_signers.verify(verified_blob.into_inner(), height).await
-	}
-}
diff --git a/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs b/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
index 71cf677ff..52d1529e1 100644
--- a/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/signed/mod.rs
@@ -11,7 +11,7 @@ use ecdsa::{
 	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
 	SignatureSize,
 };
-use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
+use movement_da_util::blob::ir::blob::DaBlob;
 use std::collections::HashSet;
 use tracing::info;
 
@@ -42,7 +42,7 @@ where
 }
 
 #[tonic::async_trait]
-impl<C> VerifierOperations<IntermediateBlobRepresentation, IntermediateBlobRepresentation>
+impl<C> VerifierOperations<DaBlob, DaBlob>
 	for Verifier<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
@@ -53,9 +53,9 @@ where
 {
 	async fn verify(
 		&self,
-		blob: IntermediateBlobRepresentation,
+		blob: DaBlob,
 		_height: u64,
-	) -> Result<Verified<IntermediateBlobRepresentation>, Error> {
+	) -> Result<Verified<DaBlob>, Error> {
 		blob.verify_signature::<C>().map_err(|e| Error::Validation(e.to_string()))?;
 
 		Ok(Verified::new(blob))
@@ -102,7 +102,7 @@ where
 }
 
 #[tonic::async_trait]
-impl<C> VerifierOperations<IntermediateBlobRepresentation, IntermediateBlobRepresentation>
+impl<C> VerifierOperations<DaBlob, DaBlob>
 	for InKnownSignersVerifier<C>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
@@ -113,9 +113,9 @@ where
 {
 	async fn verify(
 		&self,
-		blob: IntermediateBlobRepresentation,
+		blob: DaBlob,
 		height: u64,
-	) -> Result<Verified<IntermediateBlobRepresentation>, Error> {
+	) -> Result<Verified<DaBlob>, Error> {
 		let ir_blob = self.inner_verifier.verify(blob, height).await?;
 		info!("Verified inner blob");
 		let signer = ir_blob.inner().signer_hex();
diff --git a/protocol-units/da/movement/providers/celestia/Cargo.toml b/protocol-units/da/movement/providers/celestia/Cargo.toml
index a1e2ed451..36149f37f 100644
--- a/protocol-units/da/movement/providers/celestia/Cargo.toml
+++ b/protocol-units/da/movement/providers/celestia/Cargo.toml
@@ -12,15 +12,17 @@ rust-version = { workspace = true }
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 
 [dependencies]
-tonic = { workspace = true }
-prost = { workspace = true }
-
-[build-dependencies]
 movement-da-light-node-da = { workspace = true }
 movement-da-util = { workspace = true }
-movement-da-light-node-verifier = { workspace = true }
+# movement-da-light-node-verifier = { workspace = true }
 celestia-rpc = { workspace = true }
 celestia-types = { workspace = true }
+zstd = { workspace = true }
+bcs = { workspace = true }
+async-stream = { workspace = true }
+anyhow = { workspace = true }
+tracing = { workspace = true }
+tonic = { workspace = true }
 
 [features]
 default = []
diff --git a/protocol-units/da/movement/providers/celestia/src/blob/ir.rs b/protocol-units/da/movement/providers/celestia/src/blob/ir.rs
new file mode 100644
index 000000000..14e1e451e
--- /dev/null
+++ b/protocol-units/da/movement/providers/celestia/src/blob/ir.rs
@@ -0,0 +1,39 @@
+use anyhow::Context;
+use celestia_types::{consts::appconsts::AppVersion, nmt::Namespace, Blob as CelestiaBlob};
+use movement_da_util::blob::ir::blob::DaBlob;
+
+/// Converts a [CelestiaBlob] into a [DaBlob].
+pub fn into_da_blob(blob: CelestiaBlob) -> Result<DaBlob, anyhow::Error> {
+	// decompress blob.data with zstd
+	let decompressed =
+		zstd::decode_all(blob.data.as_slice()).context("failed to decompress blob")?;
+
+	// deserialize the decompressed data with bcs
+	let blob = bcs::from_bytes(decompressed.as_slice()).context("failed to deserialize blob")?;
+
+	Ok(blob)
+}
+
+pub struct CelestiaDaBlob(pub DaBlob, pub Namespace);
+
+/// Tries to form a CelestiaBlob from a CelestiaDaBlob
+impl TryFrom<CelestiaDaBlob> for CelestiaBlob {
+	type Error = anyhow::Error;
+
+	fn try_from(ir_blob: CelestiaDaBlob) -> Result<Self, Self::Error> {
+		// Extract the inner blob and namespace
+		let CelestiaDaBlob(ir_blob, namespace) = ir_blob;
+
+		// Serialize the inner blob with bcs
+		let serialized_blob = bcs::to_bytes(&ir_blob).context("failed to serialize blob")?;
+
+		// Compress the serialized data with zstd
+		let compressed_blob =
+			zstd::encode_all(serialized_blob.as_slice(), 0).context("failed to compress blob")?;
+
+		// Construct the final CelestiaBlob by assigning the compressed data
+		// and associating it with the provided namespace
+		Ok(CelestiaBlob::new(namespace, compressed_blob, AppVersion::V2)
+			.map_err(|e| anyhow::anyhow!(e))?)
+	}
+}
diff --git a/protocol-units/da/movement/providers/celestia/src/blob/mod.rs b/protocol-units/da/movement/providers/celestia/src/blob/mod.rs
index e69de29bb..19d2d13f0 100644
--- a/protocol-units/da/movement/providers/celestia/src/blob/mod.rs
+++ b/protocol-units/da/movement/providers/celestia/src/blob/mod.rs
@@ -0,0 +1 @@
+pub mod ir;
diff --git a/protocol-units/da/movement/providers/celestia/src/da/mod.rs b/protocol-units/da/movement/providers/celestia/src/da/mod.rs
index 4a4f6d781..892ab1c3f 100644
--- a/protocol-units/da/movement/providers/celestia/src/da/mod.rs
+++ b/protocol-units/da/movement/providers/celestia/src/da/mod.rs
@@ -1 +1,122 @@
-pub struct Da {}
+use crate::blob::ir::{into_da_blob, CelestiaDaBlob};
+use celestia_rpc::{BlobClient, Client, HeaderClient};
+use celestia_types::{nmt::Namespace, Blob as CelestiaBlob, TxConfig};
+use movement_da_light_node_da::{Certificate, CertificateStream, DaError, DaOperations};
+use movement_da_util::blob::ir::blob::DaBlob;
+use std::future::Future;
+use std::pin::Pin;
+use std::sync::Arc;
+use tracing::error;
+
+#[derive(Clone)]
+pub struct Da {
+	/// The namespace on Celestia which the Da will use.
+	celestia_namespace: Namespace,
+	/// The Celestia RPC client
+	default_client: Arc<Client>,
+}
+
+impl Da {
+	/// Creates a new Da instance with the provided Celestia namespace and RPC client.
+	pub fn new(celestia_namespace: Namespace, default_client: Arc<Client>) -> Self {
+		Self { celestia_namespace, default_client }
+	}
+
+	/// Creates a new signed blob instance with the provided DaBlob data.
+	pub fn create_new_celestia_blob(&self, data: DaBlob) -> Result<CelestiaBlob, anyhow::Error> {
+		// create the celestia blob
+		CelestiaDaBlob(data.into(), self.celestia_namespace.clone()).try_into()
+	}
+
+	/// Submits a CelestiaBlob to the Celestia node.
+	pub async fn submit_celestia_blob(&self, blob: CelestiaBlob) -> Result<u64, anyhow::Error> {
+		let config = TxConfig::default();
+		// config.with_gas(2);
+		let height = self.default_client.blob_submit(&[blob], config).await.map_err(|e| {
+			error!(error = %e, "failed to submit the blob");
+			anyhow::anyhow!("Failed submitting the blob: {}", e)
+		})?;
+
+		Ok(height)
+	}
+}
+
+impl DaOperations for Da {
+	fn submit_blob(
+		&self,
+		data: DaBlob,
+	) -> Pin<Box<dyn Future<Output = Result<(), DaError>> + Send + '_>> {
+		Box::pin(async move {
+			// create the blob
+			let celestia_blob = self
+				.create_new_celestia_blob(data)
+				.map_err(|e| DaError::Internal("failed to create celestia blob".to_string()))?;
+
+			// submit the blob to the celestia node
+			self.submit_celestia_blob(celestia_blob)
+				.await
+				.map_err(|e| DaError::Internal("failed to submit celestia blob".to_string()))?;
+
+			Ok(())
+		})
+	}
+
+	fn get_ir_blobs_at_height(
+		&self,
+		height: u64,
+	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
+		Box::pin(async move {
+			let height = if height == 0 { 1 } else { height };
+
+			match self.default_client.blob_get_all(height, &[self.celestia_namespace]).await {
+				// todo: lots more pattern matching here
+				Err(e) => {
+					error!(error = %e, "failed to get blobs at height {height}");
+					Err(DaError::NonFatalBlobsAtHeight(
+						format!("failed to get blobs at height {height}").into(),
+					))
+				}
+				Ok(blobs) => {
+					let blobs = blobs.unwrap_or_default();
+					let mut da_blobs = Vec::new();
+
+					for blob in blobs {
+						let da_blob = into_da_blob(blob).map_err(|e| {
+							DaError::NonFatalBlobsAtHeight(
+								format!("failed to convert blob: {e}").into(),
+							)
+						})?;
+						da_blobs.push(da_blob);
+					}
+
+					Ok(da_blobs)
+				}
+			}
+		})
+	}
+
+	fn stream_certificates(
+		&self,
+	) -> Pin<Box<dyn Future<Output = Result<CertificateStream, DaError>> + Send + '_>> {
+		let me = self.clone();
+		Box::pin(async move {
+			let mut subscription = me.default_client.header_subscribe().await.map_err(|e| {
+				DaError::Certificate("failed to subscribe to headers".to_string().into())
+			})?;
+			let stream = async_stream::try_stream! {
+
+				while let Some(header_res) = subscription.next().await {
+
+					let header = header_res.map_err(|e| {
+						DaError::NonFatalCertificate(e.into())
+					})?;
+					let height = header.height().into();
+
+					yield Certificate::Height(height);
+
+				}
+			};
+			Ok(Box::pin(stream) as CertificateStream)
+		})
+	}
+}
diff --git a/protocol-units/da/movement/providers/celestia/src/lib.rs b/protocol-units/da/movement/providers/celestia/src/lib.rs
index 42e3357ce..2305b23f3 100644
--- a/protocol-units/da/movement/providers/celestia/src/lib.rs
+++ b/protocol-units/da/movement/providers/celestia/src/lib.rs
@@ -1,3 +1,3 @@
 pub mod blob;
 pub mod da;
-pub mod verifier;
+// pub mod verifier;
diff --git a/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs b/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
index 079a12b0d..e4cf30239 100644
--- a/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
+++ b/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
@@ -1,7 +1,7 @@
 use celestia_rpc::Client;
 use celestia_types::{nmt::Namespace, Blob};
 use movement_da_light_node_verifier::{Error, Verified, VerifierOperations};
-use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
+use movement_da_util::blob::ir::blob::DaBlob;
 use std::sync::Arc;
 
 #[derive(Clone)]
@@ -19,15 +19,15 @@ impl Verifier {
 }
 
 #[tonic::async_trait]
-impl VerifierOperations<Blob, IntermediateBlobRepresentation> for Verifier {
-	/// Verifies a Celestia Blob as a Valid IntermediateBlobRepresentation
+impl VerifierOperations<Blob, DaBlob> for Verifier {
+	/// Verifies a Celestia Blob as a Valid DaBlob
 	async fn verify(
 		&self,
 		blob: Blob,
 		_height: u64,
-	) -> Result<Verified<IntermediateBlobRepresentation>, Error> {
-		// Only assert that we can indeed get an IntermediateBlobRepresentation from the Blob
-		let ir_blob = IntermediateBlobRepresentation::try_from(blob)
+	) -> Result<Verified<DaBlob>, Error> {
+		// Only assert that we can indeed get an DaBlob from the Blob
+		let ir_blob = DaBlob::try_from(blob)
 			.map_err(|e| Error::Internal(e.to_string()))?;
 
 		Ok(Verified::new(ir_blob))
diff --git a/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs b/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
index 8c6e33f1b..99ee743ed 100644
--- a/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
+++ b/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
@@ -1,7 +1,7 @@
 use celestia_rpc::{BlobClient, Client, HeaderClient};
 use celestia_types::{consts::appconsts::AppVersion, nmt::Namespace, Blob};
 use movement_da_light_node_verifier::{Error, Verified, VerifierOperations};
-use movement_da_util::blob::ir::blob::IntermediateBlobRepresentation;
+use movement_da_util::blob::ir::blob::DaBlob;
 use std::sync::Arc;
 
 #[derive(Clone)]
@@ -19,13 +19,13 @@ impl Verifier {
 }
 
 #[tonic::async_trait]
-impl VerifierOperations<Blob, IntermediateBlobRepresentation> for Verifier {
-	/// Verifies a Celestia Blob as a Valid IntermediateBlobRepresentation
+impl VerifierOperations<Blob, DaBlob> for Verifier {
+	/// Verifies a Celestia Blob as a Valid DaBlob
 	async fn verify(
 		&self,
 		blob: Blob,
 		height: u64,
-	) -> Result<Verified<IntermediateBlobRepresentation>, Error> {
+	) -> Result<Verified<DaBlob>, Error> {
 		//@l-monninger: the light node itself does most of the work of verify blobs. The verification under the feature flag below is useful in zero-trust environments.
 
 		blob.validate(AppVersion::V2).map_err(|e| Error::Validation(e.to_string()))?;
@@ -64,7 +64,7 @@ impl VerifierOperations<Blob, IntermediateBlobRepresentation> for Verifier {
 				})?;
 		}
 
-		let ir_blob = IntermediateBlobRepresentation::try_from(blob)
+		let ir_blob = DaBlob::try_from(blob)
 			.map_err(|e| Error::Internal(e.to_string()))?;
 
 		Ok(Verified::new(ir_blob))

From b4dc70662fbc1040b1247e547f4a8b2a5f8e40bd Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Tue, 14 Jan 2025 13:54:16 -0800
Subject: [PATCH 09/43] fix: celestia provider.

---
 protocol-units/da/movement/protocol/verifier/src/lib.rs | 1 -
 1 file changed, 1 deletion(-)

diff --git a/protocol-units/da/movement/protocol/verifier/src/lib.rs b/protocol-units/da/movement/protocol/verifier/src/lib.rs
index d03ea21b0..ca5bfe950 100644
--- a/protocol-units/da/movement/protocol/verifier/src/lib.rs
+++ b/protocol-units/da/movement/protocol/verifier/src/lib.rs
@@ -1,4 +1,3 @@
-pub mod permissioned_signers;
 pub mod signed;
 
 pub use movement_da_light_node_proto::*;

From 40293f965a4c742b9702d753c1db398b60d7ed93 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Tue, 14 Jan 2025 15:16:34 -0800
Subject: [PATCH 10/43] fix: refactor passthrough.

---
 .../da/movement/protocol/da/src/lib.rs        |  5 +-
 .../protocol/light-node/src/passthrough.rs    | 62 +++++++----------
 .../protocol/light-node/src/sequencer.rs      | 32 +++++----
 .../protocol/util/src/blob/ir/blob.rs         | 68 +++++++++++++++++++
 .../protocol/util/src/blob/ir/data.rs         | 11 +++
 5 files changed, 123 insertions(+), 55 deletions(-)

diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 73d9ab84d..2d841a11a 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -9,7 +9,8 @@ use tracing::warn;
 
 pub type CertificateStream<'a> =
 	Pin<Box<dyn Stream<Item = Result<Certificate, DaError>> + Send + 'a>>;
-pub type DaBlobStream<'a> = Pin<Box<dyn Stream<Item = Result<DaBlob, DaError>> + Send + 'a>>;
+pub type DaBlobStream<'a> =
+	Pin<Box<dyn Stream<Item = Result<(DaHeight, DaBlob), DaError>> + Send + 'a>>;
 
 /// A height for a blob on the DA.
 #[derive(Debug, Clone)]
@@ -91,7 +92,7 @@ pub trait DaOperations: Send + Sync {
 				for height in start_height..end_height {
 					let blobs = self.get_ir_blobs_at_height_for_stream(height).await?;
 					for blob in blobs {
-						yield blob;
+						yield (DaHeight(height), blob);
 					}
 				}
 			};
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index e2a9a5506..5633ab63e 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -1,21 +1,13 @@
-use movement_da_util::ir_blob::DaBlob;
 use std::fmt::{self, Debug, Formatter};
 use std::sync::Arc;
 use tokio_stream::{Stream, StreamExt};
-use tracing::{debug, error, info, warn};
-
-use celestia_rpc::{BlobClient, Client, HeaderClient};
-use celestia_types::{nmt::Namespace, Blob as CelestiaBlob, TxConfig};
+use tracing::info;
 
 // FIXME: glob imports are bad style
-use movement_da_light_node_da::{DaBlob, DaOperations};
+use movement_da_light_node_da::DaOperations;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
 use movement_da_light_node_proto::*;
-use movement_da_light_node_verifier::{permissioned_signers::Verifier, VerifierOperations};
-use movement_da_util::{
-	config::Config,
-	ir_blob::{celestia::CelestiaDaBlob, InnerSignedBlobV1Data},
-};
+use movement_da_util::{blob::ir::data::InnerSignedBlobV1Data, config::Config};
 
 use crate::LightNodeRuntime;
 use ecdsa::{
@@ -42,9 +34,7 @@ where
 	Da: DaOperations,
 {
 	pub config: Config,
-	pub verifier: Arc<
-		Box<dyn VerifierOperations<CelestiaBlob, DaBlob> + Send + Sync>,
-	>,
+	// pub verifier: Arc<Box<dyn VerifierOperations<CelestiaBlob, DaBlob> + Send + Sync>>,
 	pub signing_key: SigningKey<C>,
 	pub da: Arc<Da>,
 }
@@ -75,9 +65,7 @@ where
 	Da: DaOperations,
 {
 	/// Tries to create a new LightNode instance from the toml config file.
-	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error> {
-		let client = Arc::new(config.connect_celestia().await?);
-
+	async fn try_from_config(config: Config, da: Da) -> Result<Self, anyhow::Error> {
 		let signing_key_str = config.da_signing_key();
 		let hex_bytes = hex::decode(signing_key_str)?;
 
@@ -86,13 +74,12 @@ where
 
 		Ok(Self {
 			config: config.clone(),
-			celestia_namespace: config.celestia_namespace(),
-			default_client: client.clone(),
-			verifier: Arc::new(Box::new(Verifier::<C>::new(
+			/*verifier: Arc::new(Box::new(Verifier::<C>::new(
 				client,
 				config.celestia_namespace(),
 				config.da_signers_sec1_keys(),
-			))),
+			))),*/
+			da: Arc::new(da),
 			signing_key,
 		})
 	}
@@ -141,7 +128,7 @@ where
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
-	Da: DaOperations,
+	Da: DaOperations + Send + Sync + 'static,
 {
 	/// Server streaming response type for the StreamReadFromHeight method.
 	type StreamReadFromHeightStream = std::pin::Pin<
@@ -165,9 +152,10 @@ where
 			let mut blob_stream = me.da.stream_ir_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
 
 			while let Some(blob) = blob_stream.next().await {
-				let blob = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
+				let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
+				let blob = da_blob.to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?;
 				let response = StreamReadFromHeightResponse {
-					blob : Some(Self::blob_to_blob_read_response(blob).map_err(|e| tonic::Status::internal(e.to_string()))?)
+					blob: Some(blob)
 				};
 				yield response;
 			}
@@ -198,21 +186,21 @@ where
 	/// Stream blobs out, either individually or in batches.
 	async fn stream_write_blob(
 		&self,
-		request: tonic::Request<tonic::Streaming<StreamWriteBlobRequest>>,
+		_request: tonic::Request<tonic::Streaming<StreamWriteBlobRequest>>,
 	) -> std::result::Result<tonic::Response<Self::StreamWriteBlobStream>, tonic::Status> {
 		unimplemented!()
 	}
 	/// Read blobs at a specified height.
 	async fn read_at_height(
 		&self,
-		request: tonic::Request<ReadAtHeightRequest>,
+		_request: tonic::Request<ReadAtHeightRequest>,
 	) -> std::result::Result<tonic::Response<ReadAtHeightResponse>, tonic::Status> {
 		unimplemented!()
 	}
 	/// Batch read and write operations for efficiency.
 	async fn batch_read(
 		&self,
-		request: tonic::Request<BatchReadRequest>,
+		_request: tonic::Request<BatchReadRequest>,
 	) -> std::result::Result<tonic::Response<BatchReadResponse>, tonic::Status> {
 		unimplemented!()
 	}
@@ -225,22 +213,18 @@ where
 		let blobs = request.into_inner().blobs;
 		let mut responses = Vec::with_capacity(blobs.len());
 		for data in blobs {
-			let blob = self
+			let blob = InnerSignedBlobV1Data::now(data.data)
+				.try_to_sign(&self.signing_key)
+				.map_err(|e| tonic::Status::internal(format!("Failed to sign blob: {}", e)))?;
+			let blob_response = self
 				.da
-				.submit_blob(data.data)
+				.submit_blob(blob.into())
 				.await
 				.map_err(|e| tonic::Status::internal(e.to_string()))?;
-			responses.push(blob);
-		}
-
-		let mut blob_responses = Vec::new();
-		for blob in responses {
-			blob_responses.push(
-				Self::blob_to_blob_write_response(blob)
-					.map_err(|e| tonic::Status::internal(e.to_string()))?,
-			);
+			responses.push(blob_response);
 		}
 
-		Ok(tonic::Response::new(BatchWriteResponse { blobs: blob_responses }))
+		// * We are currently not returning any blobs in the response.
+		Ok(tonic::Response::new(BatchWriteResponse { blobs: vec![] }))
 	}
 }
diff --git a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
index b17f83fd4..9f8ad4533 100644
--- a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
@@ -11,6 +11,7 @@ use ecdsa::{
 	hazmat::{DigestPrimitive, SignPrimitive, VerifyPrimitive},
 	SignatureSize,
 };
+use movement_da_light_node_da::DaOperations;
 use movement_da_light_node_prevalidator::{aptos::whitelist::Validator, PrevalidatorOperations};
 use std::boxed::Box;
 use std::fmt::Debug;
@@ -19,14 +20,6 @@ use std::pin::Pin;
 use std::sync::{atomic::AtomicU64, Arc};
 use std::time::Duration;
 
-use tokio::{
-	sync::mpsc::{Receiver, Sender},
-	time::timeout,
-};
-use tokio_stream::Stream;
-use tracing::{debug, info};
-
-use celestia_rpc::HeaderClient;
 use memseq::{Sequencer, Transaction};
 use movement_algs::grouping_heuristic::{
 	apply::ToApply, binpacking::FirstFitBinpacking, drop_success::DropSuccess, skip::SkipFor,
@@ -37,45 +30,54 @@ use movement_da_light_node_proto::blob_response::BlobType;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
 use movement_da_util::config::Config;
 use movement_types::block::Block;
+use tokio::{
+	sync::mpsc::{Receiver, Sender},
+	time::timeout,
+};
+use tokio_stream::Stream;
+use tracing::{debug, info};
 
 use crate::{passthrough::LightNode as LightNodePassThrough, LightNodeRuntime};
 
 const LOGGING_UID: AtomicU64 = AtomicU64::new(0);
 
 #[derive(Clone)]
-pub struct LightNode<C>
+pub struct LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
-	pub pass_through: LightNodePassThrough<C>,
+	pub pass_through: LightNodePassThrough<C, Da>,
 	pub memseq: Arc<memseq::Memseq<memseq::RocksdbMempool>>,
 	pub prevalidator: Option<Arc<Validator>>,
 }
 
-impl<C> Debug for LightNode<C>
+impl<C, Da> Debug for LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
 	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 		f.debug_struct("LightNode").field("pass_through", &self.pass_through).finish()
 	}
 }
 
-impl<C> LightNodeRuntime for LightNode<C>
+impl<C, Da> LightNodeRuntime for LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
 	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error> {
 		info!("Initializing LightNode in sequencer mode from environment.");
@@ -115,13 +117,14 @@ where
 	}
 }
 
-impl<C> LightNode<C>
+impl<C, Da> LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations,
 {
 	async fn tick_build_blocks(&self, sender: Sender<Block>) -> Result<(), anyhow::Error> {
 		let memseq = self.memseq.clone();
@@ -372,13 +375,14 @@ where
 }
 
 #[tonic::async_trait]
-impl<C> LightNodeService for LightNode<C>
+impl<C, Da> LightNodeService for LightNode<C, Da>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
+	Da: DaOperations + Send + Sync + 'static,
 {
 	/// Server streaming response type for the StreamReadFromHeight method.
 	type StreamReadFromHeightStream = Pin<
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
index d805cc4f4..68537805e 100644
--- a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
@@ -13,6 +13,7 @@ use ecdsa::{
 	signature::{digest::Digest, DigestVerifier},
 	SignatureSize, VerifyingKey,
 };
+use movement_da_light_node_proto::*;
 use serde::{Deserialize, Serialize};
 
 #[derive(Debug, Clone, Serialize, Deserialize)]
@@ -105,6 +106,51 @@ impl DaBlob {
 			DaBlob::SignedV1(inner) => inner.try_verify::<C>(),
 		}
 	}
+
+	pub fn to_blob(self, height: u64) -> Result<Blob, anyhow::Error> {
+		Ok(Blob {
+			data: self.blob().to_vec(),
+			signature: self.signature().to_vec(),
+			timestamp: self.timestamp(),
+			signer: self.signer().to_vec(),
+			blob_id: self.id().to_vec(),
+			height,
+		})
+	}
+
+	pub fn blob_to_blob_write_response(blob: Blob) -> Result<BlobResponse, anyhow::Error> {
+		Ok(BlobResponse { blob_type: Some(blob_response::BlobType::PassedThroughBlob(blob)) })
+	}
+
+	/// Converts a [Blob] into a [BlobResponse] with the blob passed through.
+	pub fn blob_to_blob_passed_through_read_response(
+		blob: Blob,
+	) -> Result<BlobResponse, anyhow::Error> {
+		Ok(BlobResponse { blob_type: Some(blob_response::BlobType::PassedThroughBlob(blob)) })
+	}
+
+	/// Converts a [Blob] into a [BlobResponse] with the blob sequenced.
+	pub fn blob_to_blob_sequenced_read_response(blob: Blob) -> Result<BlobResponse, anyhow::Error> {
+		Ok(BlobResponse { blob_type: Some(blob_response::BlobType::SequencedBlobBlock(blob)) })
+	}
+
+	/// Converts a [DaBlob] into a [BlobResponse] with the blob passed through.
+	pub fn to_blob_passed_through_read_response(
+		self,
+		height: u64,
+	) -> Result<BlobResponse, anyhow::Error> {
+		let blob = self.to_blob(height)?;
+		Self::blob_to_blob_passed_through_read_response(blob)
+	}
+
+	/// Converts a [DaBlob] into a [BlobResponse] with the blob sequenced.
+	pub fn to_blob_sequenced_read_response(
+		self,
+		height: u64,
+	) -> Result<BlobResponse, anyhow::Error> {
+		let blob = self.to_blob(height)?;
+		Self::blob_to_blob_sequenced_read_response(blob)
+	}
 }
 
 #[cfg(test)]
@@ -127,3 +173,25 @@ pub mod test {
 		Ok(())
 	}
 }
+
+pub mod stream_read_response {
+
+	use movement_da_light_node_proto::*;
+
+	/// Converts a passed through [BlobResponse] into a sequenced [BlobResponse].
+	pub fn passed_through_to_sequenced(blob_response: BlobResponse) -> BlobResponse {
+		match blob_response.blob_type {
+			Some(blob_response::BlobType::PassedThroughBlob(blob)) => {
+				BlobResponse { blob_type: Some(blob_response::BlobType::SequencedBlobBlock(blob)) }
+			}
+			_ => blob_response,
+		}
+	}
+
+	/// Converts a passed through [StreamReadFromHeightResponse] into a sequenced [StreamReadFromHeightResponse].
+	pub fn passed_through_to_sequenced_response(
+		response: StreamReadFromHeightResponse,
+	) -> StreamReadFromHeightResponse {
+		StreamReadFromHeightResponse { blob: response.blob.map(passed_through_to_sequenced) }
+	}
+}
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
index ea8929caa..cee89e28e 100644
--- a/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
@@ -27,6 +27,17 @@ impl InnerSignedBlobV1Data {
 		Self { blob, timestamp }
 	}
 
+	pub fn now(blob: Vec<u8>) -> Self {
+		Self {
+			blob,
+			// timestamp in microseconds
+			timestamp: std::time::SystemTime::now()
+				.duration_since(std::time::UNIX_EPOCH)
+				.unwrap()
+				.as_micros() as u64,
+		}
+	}
+
 	/// Computes the id of InnerSignedBlobV1Data
 	pub fn compute_id<C>(&self) -> Id
 	where

From 3d4ea57a232be7bdfc82a8ec5292bd7b8ee1a855 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 01:37:34 -0800
Subject: [PATCH 11/43] feat: merge in digest da.

---
 Cargo.lock                                    |  16 ++
 .../da/movement/protocol/da/src/lib.rs        |   8 +-
 .../da/movement/protocol/da/src/mock/mod.rs   |   4 +-
 .../protocol/light-node/src/passthrough.rs    |  37 +--
 .../protocol/light-node/src/sequencer.rs      | 232 +-----------------
 .../da/movement/protocol/util/Cargo.toml      |   1 +
 .../protocol/util/src/blob/ir/blob.rs         |   7 +
 .../protocol/util/src/blob/ir/data.rs         |  33 +++
 .../movement/providers/celestia/src/da/mod.rs |   2 +-
 .../providers/celestia/src/verifier/mod.rs    | 126 ----------
 .../celestia/src/verifier/pessimistic.rs      |  72 ------
 .../providers/digest-store/Cargo.toml         |  33 +++
 .../providers/digest-store/src/blob/mod.rs    |   0
 .../providers/digest-store/src/da/db.rs       |  71 ++++++
 .../providers/digest-store/src/da/mod.rs      |  89 +++++++
 .../providers/digest-store/src/lib.rs         |   3 +
 16 files changed, 271 insertions(+), 463 deletions(-)
 delete mode 100644 protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
 delete mode 100644 protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
 create mode 100644 protocol-units/da/movement/providers/digest-store/Cargo.toml
 create mode 100644 protocol-units/da/movement/providers/digest-store/src/blob/mod.rs
 create mode 100644 protocol-units/da/movement/providers/digest-store/src/da/db.rs
 create mode 100644 protocol-units/da/movement/providers/digest-store/src/da/mod.rs
 create mode 100644 protocol-units/da/movement/providers/digest-store/src/lib.rs

diff --git a/Cargo.lock b/Cargo.lock
index 1cd2bffc8..793279821 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10277,6 +10277,21 @@ dependencies = [
  "tracing",
 ]
 
+[[package]]
+name = "movement-da-light-node-digest-store"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "async-stream",
+ "bcs 0.1.4",
+ "movement-da-light-node-da",
+ "movement-da-util",
+ "rocksdb",
+ "tokio",
+ "tonic 0.12.3",
+ "tracing",
+]
+
 [[package]]
 name = "movement-da-light-node-prevalidator"
 version = "0.0.2"
@@ -10417,6 +10432,7 @@ dependencies = [
  "k256",
  "memseq-util",
  "movement-da-light-node-proto",
+ "movement-types",
  "prost 0.13.3",
  "rand 0.8.5",
  "serde",
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 2d841a11a..027f3cd68 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -57,17 +57,17 @@ pub trait DaOperations: Send + Sync {
 		data: DaBlob,
 	) -> Pin<Box<dyn Future<Output = Result<(), DaError>> + Send + '_>>;
 
-	fn get_ir_blobs_at_height(
+	fn get_da_blobs_at_height(
 		&self,
 		height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>>;
 
-	fn get_ir_blobs_at_height_for_stream(
+	fn get_da_blobs_at_height_for_stream(
 		&self,
 		height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
 		Box::pin(async move {
-			let result = self.get_ir_blobs_at_height(height).await;
+			let result = self.get_da_blobs_at_height(height).await;
 			match result {
 				Ok(blobs) => Ok(blobs),
 				Err(e) => {
@@ -90,7 +90,7 @@ pub trait DaOperations: Send + Sync {
 		let fut = async move {
 			let stream = try_stream! {
 				for height in start_height..end_height {
-					let blobs = self.get_ir_blobs_at_height_for_stream(height).await?;
+					let blobs = self.get_da_blobs_at_height_for_stream(height).await?;
 					for blob in blobs {
 						yield (DaHeight(height), blob);
 					}
diff --git a/protocol-units/da/movement/protocol/da/src/mock/mod.rs b/protocol-units/da/movement/protocol/da/src/mock/mod.rs
index dc60046ed..e4b83d322 100644
--- a/protocol-units/da/movement/protocol/da/src/mock/mod.rs
+++ b/protocol-units/da/movement/protocol/da/src/mock/mod.rs
@@ -12,7 +12,7 @@ pub struct Mock {
 	// A queue for certificates.
 	certificate_queue: Arc<Mutex<VecDeque<Result<Certificate, DaError>>>>,
 
-	// Map for mocking results of `get_ir_blobs_at_height`.
+	// Map for mocking results of `get_da_blobs_at_height`.
 	height_results: Arc<Mutex<HashMap<u64, Result<Vec<DaBlob>, DaError>>>>,
 
 	// Collection to store submitted blobs.
@@ -79,7 +79,7 @@ impl DaOperations for Mock {
 		})
 	}
 
-	fn get_ir_blobs_at_height(
+	fn get_da_blobs_at_height(
 		&self,
 		height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 5633ab63e..056a62759 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -94,32 +94,6 @@ where
 	}
 }
 
-impl<C, Da> LightNode<C, Da>
-where
-	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
-	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
-	SignatureSize<C>: ArrayLength<u8>,
-	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
-	FieldBytesSize<C>: ModulusSize,
-	Da: DaOperations,
-{
-	pub fn blob_to_blob_write_response(blob: Blob) -> Result<BlobResponse, anyhow::Error> {
-		Ok(BlobResponse { blob_type: Some(blob_response::BlobType::PassedThroughBlob(blob)) })
-	}
-
-	pub fn blob_to_blob_read_response(blob: Blob) -> Result<BlobResponse, anyhow::Error> {
-		#[cfg(feature = "sequencer")]
-		{
-			Ok(BlobResponse { blob_type: Some(blob_response::BlobType::SequencedBlobBlock(blob)) })
-		}
-
-		#[cfg(not(feature = "sequencer"))]
-		{
-			Ok(BlobResponse { blob_type: Some(blob_response::BlobType::PassedThroughBlob(blob)) })
-		}
-	}
-}
-
 #[tonic::async_trait]
 impl<C, Da> LightNodeService for LightNode<C, Da>
 where
@@ -144,12 +118,12 @@ where
 	) -> std::result::Result<tonic::Response<Self::StreamReadFromHeightStream>, tonic::Status> {
 		info!("Stream read from height request: {:?}", request);
 
-		let me = Arc::new(self.clone());
+		let da = self.da.clone();
 		let height = request.into_inner().height;
 
 		let output = async_stream::try_stream! {
 
-			let mut blob_stream = me.da.stream_ir_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
+			let mut blob_stream = da.stream_ir_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
 
 			while let Some(blob) = blob_stream.next().await {
 				let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
@@ -211,17 +185,10 @@ where
 		request: tonic::Request<BatchWriteRequest>,
 	) -> std::result::Result<tonic::Response<BatchWriteResponse>, tonic::Status> {
 		let blobs = request.into_inner().blobs;
-		let mut responses = Vec::with_capacity(blobs.len());
 		for data in blobs {
 			let blob = InnerSignedBlobV1Data::now(data.data)
 				.try_to_sign(&self.signing_key)
 				.map_err(|e| tonic::Status::internal(format!("Failed to sign blob: {}", e)))?;
-			let blob_response = self
-				.da
-				.submit_blob(blob.into())
-				.await
-				.map_err(|e| tonic::Status::internal(e.to_string()))?;
-			responses.push(blob_response);
 		}
 
 		// * We are currently not returning any blobs in the response.
diff --git a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
index 9f8ad4533..8777f6977 100644
--- a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
@@ -1,4 +1,3 @@
-use block::WrappedBlock;
 use ecdsa::{
 	elliptic_curve::{
 		generic_array::ArrayLength,
@@ -21,14 +20,10 @@ use std::sync::{atomic::AtomicU64, Arc};
 use std::time::Duration;
 
 use memseq::{Sequencer, Transaction};
-use movement_algs::grouping_heuristic::{
-	apply::ToApply, binpacking::FirstFitBinpacking, drop_success::DropSuccess, skip::SkipFor,
-	splitting::Splitting, GroupingHeuristicStack, GroupingOutcome,
-};
 use movement_da_light_node_proto as grpc;
 use movement_da_light_node_proto::blob_response::BlobType;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
-use movement_da_util::config::Config;
+use movement_da_util::{blob::ir::data::InnerSignedBlobV1Data, config::Config};
 use movement_types::block::Block;
 use tokio::{
 	sync::mpsc::{Receiver, Sender},
@@ -148,84 +143,12 @@ where
 		}
 	}
 
-	async fn submit_blocks(&self, blocks: &Vec<block::WrappedBlock>) -> Result<(), anyhow::Error> {
-		for block in blocks {
-			info!(target: "movement_timing", block_id = %block.block.id(), "inner_submitting_block");
-		}
-		// get references to celestia blobs in the wrapped blocks
-		let block_blobs = blocks
-			.iter()
-			.map(|wrapped_block| &wrapped_block.blob)
-			.cloned() // hopefully, the compiler optimizes this out
-			.collect::<Vec<_>>();
-		// use deref on the wrapped block to get the blob
-		self.pass_through.submit_celestia_blobs(&block_blobs).await?;
+	/// Submits blocks to the pass through.
+	async fn submit_blocks(&self, blocks: Vec<Block>) -> Result<(), anyhow::Error> {
 		for block in blocks {
-			info!(target: "movement_timing", block_id = %block.block.id(), "inner_submitted_block");
-		}
-		Ok(())
-	}
-
-	pub async fn submit_with_heuristic(&self, blocks: Vec<Block>) -> Result<(), anyhow::Error> {
-		for block in &blocks {
-			info!(target: "movement_timing", block_id = %block.id(), "submitting_block");
-		}
-
-		// wrap the blocks in a struct that can be split and compressed
-		// spawn blocking because the compression is blocking and could be slow
-		let pass_through = self.pass_through.clone();
-		let blocks = tokio::task::spawn_blocking(move || {
-			let mut wrapped_blocks = Vec::new();
-			for block in blocks {
-				let block_bytes = bcs::to_bytes(&block)?;
-				let celestia_blob = pass_through.create_new_celestia_blob(block_bytes)?;
-				let wrapped_block = block::WrappedBlock::new(block, celestia_blob);
-				wrapped_blocks.push(wrapped_block);
-			}
-			Ok::<Vec<WrappedBlock>, anyhow::Error>(wrapped_blocks)
-		})
-		.await??;
-
-		let mut heuristic: GroupingHeuristicStack<block::WrappedBlock> =
-			GroupingHeuristicStack::new(vec![
-				DropSuccess::boxed(),
-				ToApply::boxed(),
-				SkipFor::boxed(1, Splitting::boxed(2)),
-				FirstFitBinpacking::boxed(1_700_000),
-			]);
-
-		let start_distribution = GroupingOutcome::new_apply_distribution(blocks);
-		let block_group_results = heuristic
-			.run_async_sequential_with_metadata(
-				start_distribution,
-				|index, grouping, mut flag| async move {
-					if index == 0 {
-						flag = false;
-					}
-
-					// if the flag is set then we are going to change this grouping outcome to failures and not run anything
-					if flag {
-						return Ok((grouping.to_failures_prefer_instrumental(), flag));
-					}
-
-					let blocks = grouping.into_original();
-					let outcome = match self.submit_blocks(&blocks).await {
-						Ok(_) => GroupingOutcome::new_all_success(blocks.len()),
-						Err(_) => {
-							flag = true;
-							GroupingOutcome::new_apply(blocks)
-						}
-					};
-
-					Ok((outcome, flag))
-				},
-				false,
-			)
-			.await?;
-
-		info!("block group results: {:?}", block_group_results);
-		for block_group_result in &block_group_results {
-			info!(target: "movement_timing", block_group_result = ?block_group_result, "block_group_result");
+			let data: InnerSignedBlobV1Data = block.try_into()?;
+			let blob = data.try_to_sign(&self.pass_through.signing_key)?;
+			self.pass_through.da.submit_blob(blob.into()).await?;
 		}
 
 		Ok(())
@@ -285,13 +208,13 @@ where
 		if blocks.is_empty() {
 			return Ok(());
 		}
-		let ids = blocks.iter().map(|b| b.id()).collect::<Vec<_>>();
 
 		// submit the blobs, resizing as needed
+		let ids = blocks.iter().map(|b| b.id()).collect::<Vec<_>>();
 		for block_id in &ids {
 			info!(target: "movement_timing", %block_id, "submitting_block_batch");
 		}
-		self.submit_with_heuristic(blocks).await?;
+		self.submit_blocks(blocks).await?;
 		for block_id in &ids {
 			info!(target: "movement_timing", %block_id, "submitted_block_batch");
 		}
@@ -355,23 +278,6 @@ where
 
 		Ok(grpc::BlobResponse { blob_type: Some(sequenced_block) })
 	}
-
-	pub fn make_sequenced_blob_intent(
-		data: Vec<u8>,
-		height: u64,
-	) -> Result<grpc::BlobResponse, anyhow::Error> {
-		Ok(grpc::BlobResponse {
-			blob_type: Some(BlobType::SequencedBlobIntent(grpc::Blob {
-				data,
-				blob_id: vec![],
-				height,
-				// todo: at some point it would be good to sign these intents, as they can then be used as pre-confirmations against which we can slash
-				signature: vec![],
-				signer: vec![],
-				timestamp: 0,
-			})),
-		})
-	}
 }
 
 #[tonic::async_trait]
@@ -453,18 +359,9 @@ where
 		request: tonic::Request<grpc::BatchWriteRequest>,
 	) -> std::result::Result<tonic::Response<grpc::BatchWriteResponse>, tonic::Status> {
 		let blobs_for_submission = request.into_inner().blobs;
-		let height: u64 = self
-			.pass_through
-			.default_client
-			.header_network_head()
-			.await
-			.map_err(|e| tonic::Status::internal(e.to_string()))?
-			.height()
-			.into();
 
 		// make transactions from the blobs
 		let mut transactions = Vec::new();
-		let mut intents = Vec::new();
 		for blob in blobs_for_submission {
 			let transaction: Transaction = serde_json::from_slice(&blob.data)
 				.map_err(|e| tonic::Status::internal(e.to_string()))?;
@@ -475,10 +372,6 @@ where
 					match prevalidator.prevalidate(transaction).await {
 						Ok(prevalidated) => {
 							transactions.push(prevalidated.into_inner());
-							intents.push(
-								Self::make_sequenced_blob_intent(blob.data, height)
-									.map_err(|e| tonic::Status::internal(e.to_string()))?,
-							);
 						}
 						Err(e) => {
 							match e {
@@ -507,113 +400,6 @@ where
 			.await
 			.map_err(|e| tonic::Status::internal(e.to_string()))?;
 
-		Ok(tonic::Response::new(grpc::BatchWriteResponse { blobs: intents }))
-	}
-}
-
-pub mod block {
-
-	use celestia_types::{consts::appconsts::AppVersion, nmt::Namespace, Blob};
-	use movement_algs::grouping_heuristic::{binpacking::BinpackingWeighted, splitting::Splitable};
-	use movement_types::block::Block;
-
-	/// A wrapped block that can be used with the binpacking heuristic
-	#[derive(Debug, Clone, PartialEq, Eq)]
-	pub struct WrappedBlock {
-		pub block: Block,
-		pub blob: Blob,
-	}
-
-	impl WrappedBlock {
-		/// Create a new wrapped block from a blob and block
-		pub fn new(block: Block, blob: Blob) -> Self {
-			Self { block, blob }
-		}
-
-		/// Create a new wrapped block from a block and a namespace
-		pub fn try_new(block: Block, namespace: Namespace) -> Result<Self, anyhow::Error> {
-			// first serialize the block
-			let block_bytes = bcs::to_bytes(&block)?;
-
-			// then compress the block bytes
-			let compressed_block_bytes = zstd::encode_all(block_bytes.as_slice(), 0)?;
-
-			// then create a blob from the compressed block bytes
-			let blob = Blob::new(namespace, compressed_block_bytes, AppVersion::V2)?;
-
-			Ok(Self { block, blob })
-		}
-	}
-
-	impl Splitable for WrappedBlock {
-		fn split(self, factor: usize) -> Result<Vec<Self>, anyhow::Error> {
-			let namespace = self.blob.namespace;
-			let split_blocks = self.block.split(factor)?;
-			let mut wrapped_blocks = Vec::new();
-			for block in split_blocks {
-				let wrapped_block = WrappedBlock::try_new(block, namespace)?;
-				wrapped_blocks.push(wrapped_block);
-			}
-			Ok(wrapped_blocks)
-		}
-	}
-
-	impl BinpackingWeighted for WrappedBlock {
-		fn weight(&self) -> usize {
-			self.blob.data.len()
-		}
-	}
-
-	#[cfg(test)]
-	pub mod test {
-
-		use super::*;
-		use movement_types::block;
-		use movement_types::transaction::Transaction;
-
-		#[test]
-		fn test_block_splits() -> Result<(), anyhow::Error> {
-			let transactions = vec![
-				Transaction::new(vec![0; 32], 0, 0),
-				Transaction::new(vec![1; 32], 0, 1),
-				Transaction::new(vec![2; 32], 0, 2),
-				Transaction::new(vec![3; 32], 0, 3),
-			];
-
-			let block = Block::new(
-				block::BlockMetadata::default(),
-				block::Id::test(),
-				transactions.into_iter().collect(),
-			);
-			let wrapped_block = WrappedBlock::try_new(block, Namespace::new(0, &[0])?)?;
-			let original_block = wrapped_block.clone();
-			let split_blocks = wrapped_block.split(2)?;
-			assert_eq!(split_blocks.len(), 2);
-
-			// check that block is not the same as the original block
-			assert_ne!(split_blocks[0], original_block);
-			assert_ne!(split_blocks[1], original_block);
-
-			// check that block matches the expected split
-			let expected_transactions =
-				vec![Transaction::new(vec![0; 32], 0, 0), Transaction::new(vec![1; 32], 0, 1)];
-			let expected_block = Block::new(
-				block::BlockMetadata::default(),
-				block::Id::test(),
-				expected_transactions.into_iter().collect(),
-			);
-			assert_eq!(split_blocks[0].block, expected_block);
-
-			let expected_transactions =
-				vec![Transaction::new(vec![2; 32], 0, 2), Transaction::new(vec![3; 32], 0, 3)];
-			let expected_block = Block::new(
-				block::BlockMetadata::default(),
-				block::Id::test(),
-				expected_transactions.into_iter().collect(),
-			);
-			assert_eq!(split_blocks[1].block, expected_block);
-
-			Ok(())
-		}
+		Ok(tonic::Response::new(grpc::BatchWriteResponse { blobs: vec![] }))
 	}
 }
diff --git a/protocol-units/da/movement/protocol/util/Cargo.toml b/protocol-units/da/movement/protocol/util/Cargo.toml
index 2ba85aa27..fbb98d910 100644
--- a/protocol-units/da/movement/protocol/util/Cargo.toml
+++ b/protocol-units/da/movement/protocol/util/Cargo.toml
@@ -45,6 +45,7 @@ k256 = { workspace = true }
 rand = { version = "0.8.5" }
 aptos-account-whitelist = { workspace = true }
 aptos-types = { workspace = true }
+movement-types = { workspace = true }
 
 [dev-dependencies]
 tempfile = { workspace = true }
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
index 68537805e..0c29f362f 100644
--- a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
@@ -51,6 +51,7 @@ impl InnerSignedBlobV1 {
 #[derive(Debug, Clone, Serialize, Deserialize)]
 pub enum DaBlob {
 	SignedV1(InnerSignedBlobV1),
+	DigestV1(Vec<u8>),
 }
 
 impl From<InnerSignedBlobV1> for DaBlob {
@@ -63,24 +64,28 @@ impl DaBlob {
 	pub fn blob(&self) -> &[u8] {
 		match self {
 			DaBlob::SignedV1(inner) => inner.data.blob.as_slice(),
+			DaBlob::DigestV1(digest) => digest.as_slice(),
 		}
 	}
 
 	pub fn signature(&self) -> &[u8] {
 		match self {
 			DaBlob::SignedV1(inner) => inner.signature.as_slice(),
+			DaBlob::DigestV1(_) => &[],
 		}
 	}
 
 	pub fn timestamp(&self) -> u64 {
 		match self {
 			DaBlob::SignedV1(inner) => inner.data.timestamp,
+			DaBlob::DigestV1(_) => 0,
 		}
 	}
 
 	pub fn signer(&self) -> &[u8] {
 		match self {
 			DaBlob::SignedV1(inner) => inner.signer.as_slice(),
+			DaBlob::DigestV1(_) => &[],
 		}
 	}
 
@@ -91,6 +96,7 @@ impl DaBlob {
 	pub fn id(&self) -> &[u8] {
 		match self {
 			DaBlob::SignedV1(inner) => inner.id.as_slice(),
+			DaBlob::DigestV1(digest) => digest.as_slice(),
 		}
 	}
 
@@ -104,6 +110,7 @@ impl DaBlob {
 	{
 		match self {
 			DaBlob::SignedV1(inner) => inner.try_verify::<C>(),
+			DaBlob::DigestV1(_) => Ok(()),
 		}
 	}
 
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
index cee89e28e..fdfba01b3 100644
--- a/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/data.rs
@@ -82,3 +82,36 @@ impl InnerSignedBlobV1Data {
 		})
 	}
 }
+
+pub mod block {
+
+	use super::*;
+	use movement_types::block;
+
+	impl TryFrom<block::Block> for InnerSignedBlobV1Data {
+		type Error = anyhow::Error;
+
+		fn try_from(block: block::Block) -> Result<Self, Self::Error> {
+			let blob = bcs::to_bytes(&block)?;
+			Ok(Self::now(blob))
+		}
+	}
+
+	impl TryFrom<block::Id> for InnerSignedBlobV1Data {
+		type Error = anyhow::Error;
+
+		fn try_from(id: block::Id) -> Result<Self, Self::Error> {
+			let blob = id.as_bytes().to_vec();
+			Ok(Self::now(blob))
+		}
+	}
+
+	impl TryFrom<Vec<block::Id>> for InnerSignedBlobV1Data {
+		type Error = anyhow::Error;
+
+		fn try_from(ids: Vec<block::Id>) -> Result<Self, Self::Error> {
+			let blob = bcs::to_bytes(&ids)?;
+			Ok(Self::now(blob))
+		}
+	}
+}
diff --git a/protocol-units/da/movement/providers/celestia/src/da/mod.rs b/protocol-units/da/movement/providers/celestia/src/da/mod.rs
index 892ab1c3f..39595be62 100644
--- a/protocol-units/da/movement/providers/celestia/src/da/mod.rs
+++ b/protocol-units/da/movement/providers/celestia/src/da/mod.rs
@@ -61,7 +61,7 @@ impl DaOperations for Da {
 		})
 	}
 
-	fn get_ir_blobs_at_height(
+	fn get_da_blobs_at_height(
 		&self,
 		height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
diff --git a/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs b/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
deleted file mode 100644
index e4cf30239..000000000
--- a/protocol-units/da/movement/providers/celestia/src/verifier/mod.rs
+++ /dev/null
@@ -1,126 +0,0 @@
-use celestia_rpc::Client;
-use celestia_types::{nmt::Namespace, Blob};
-use movement_da_light_node_verifier::{Error, Verified, VerifierOperations};
-use movement_da_util::blob::ir::blob::DaBlob;
-use std::sync::Arc;
-
-#[derive(Clone)]
-pub struct Verifier {
-	/// The Celestia RPC client
-	pub client: Arc<Client>,
-	/// The namespace of the Celestia Blob
-	pub namespace: Namespace,
-}
-
-impl Verifier {
-	pub fn new(client: Arc<Client>, namespace: Namespace) -> Self {
-		Self { client, namespace }
-	}
-}
-
-#[tonic::async_trait]
-impl VerifierOperations<Blob, DaBlob> for Verifier {
-	/// Verifies a Celestia Blob as a Valid DaBlob
-	async fn verify(
-		&self,
-		blob: Blob,
-		_height: u64,
-	) -> Result<Verified<DaBlob>, Error> {
-		// Only assert that we can indeed get an DaBlob from the Blob
-		let ir_blob = DaBlob::try_from(blob)
-			.map_err(|e| Error::Internal(e.to_string()))?;
-
-		Ok(Verified::new(ir_blob))
-	}
-}
-
-pub mod pessimistic;
-#[cfg(all(test, feature = "integration-tests"))]
-mod tests {
-	use super::*;
-	use celestia_types::blob::GasPrice;
-
-	/// todo: Investigate why this test sporadically fails.
-	#[tokio::test]
-	pub async fn test_valid_verifies() -> Result<(), anyhow::Error> {
-		let dot_movement = dot_movement::DotMovement::try_from_env()?;
-		let config = dot_movement
-			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
-
-		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
-		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
-
-		let height = client.blob_submit(&[blob], GasPrice::default()).await?;
-
-		let included = verifier.verify(VerificationMode::Cowboy, &data, height).await?;
-
-		assert!(included);
-
-		Ok(())
-	}
-
-	#[tokio::test]
-	pub async fn test_absent_does_not_verify() -> Result<(), anyhow::Error> {
-		let dot_movement = dot_movement::DotMovement::try_from_env()?;
-		let config = dot_movement
-			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
-		let client = Arc::new(config.connect_celestia().await?);
-		let celestia_namespace = config.celestia_namespace();
-
-		let verifier = Verifier { client: client.clone(), namespace: celestia_namespace.clone() };
-
-		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
-		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
-
-		let height = client.blob_submit(&[blob], GasPrice::default()).await?;
-
-		let included = verifier.verify(VerificationMode::Cowboy, &data, height).await?;
-
-		assert!(included);
-
-		let absent_data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 7];
-
-		let absent_included = verifier.verify(VerificationMode::Cowboy, &absent_data, height).await;
-
-		match absent_included {
-			Ok(_) => {
-				assert!(false, "Should not have verified")
-			}
-			Err(_) => {}
-		}
-
-		Ok(())
-	}
-
-	#[tokio::test]
-	pub async fn test_wrong_height_does_not_verify() -> Result<(), anyhow::Error> {
-		let dot_movement = dot_movement::DotMovement::try_from_env()?;
-		let config = dot_movement
-			.try_get_config_from_json::<movement_da_util::CelestiaDaLightNodeConfig>()?;
-		let client = Arc::new(config.connect_celestia().await?);
-		let celestia_namespace = config.celestia_namespace();
-
-		let verifier = Verifier { client: client.clone(), namespace: celestia_namespace.clone() };
-
-		let data = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
-		let blob = Blob::new(celestia_namespace.clone(), data.clone())?;
-
-		let height = client.blob_submit(&[blob], GasPrice::default()).await?;
-
-		let included = verifier.verify(VerificationMode::Cowboy, &data, height).await?;
-
-		assert!(included);
-
-		let wrong_height_included =
-			verifier.verify(VerificationMode::Cowboy, &data, height + 1).await;
-
-		match wrong_height_included {
-			Ok(_) => {
-				assert!(false, "Should not have verified")
-			}
-			Err(_) => {}
-		}
-
-		Ok(())
-	}
-}
diff --git a/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs b/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
deleted file mode 100644
index 99ee743ed..000000000
--- a/protocol-units/da/movement/providers/celestia/src/verifier/pessimistic.rs
+++ /dev/null
@@ -1,72 +0,0 @@
-use celestia_rpc::{BlobClient, Client, HeaderClient};
-use celestia_types::{consts::appconsts::AppVersion, nmt::Namespace, Blob};
-use movement_da_light_node_verifier::{Error, Verified, VerifierOperations};
-use movement_da_util::blob::ir::blob::DaBlob;
-use std::sync::Arc;
-
-#[derive(Clone)]
-pub struct Verifier {
-	/// The Celestia RPC client
-	pub client: Arc<Client>,
-	/// The namespace of the Celestia Blob
-	pub namespace: Namespace,
-}
-
-impl Verifier {
-	pub fn new(client: Arc<Client>, namespace: Namespace) -> Self {
-		Self { client, namespace }
-	}
-}
-
-#[tonic::async_trait]
-impl VerifierOperations<Blob, DaBlob> for Verifier {
-	/// Verifies a Celestia Blob as a Valid DaBlob
-	async fn verify(
-		&self,
-		blob: Blob,
-		height: u64,
-	) -> Result<Verified<DaBlob>, Error> {
-		//@l-monninger: the light node itself does most of the work of verify blobs. The verification under the feature flag below is useful in zero-trust environments.
-
-		blob.validate(AppVersion::V2).map_err(|e| Error::Validation(e.to_string()))?;
-
-		// wait for the header to be at the correct height
-		self.client
-			.header_wait_for_height(height)
-			.await
-			.map_err(|e| Error::Internal(e.to_string()))?;
-
-		// get the root
-		let dah = self
-			.client
-			.header_get_by_height(height)
-			.await
-			.map_err(|e| Error::Internal(e.to_string()))?
-			.dah;
-		let root_hash = dah.row_root(0).ok_or(Error::Validation("No root hash".to_string()))?;
-
-		// get the proof
-		let proofs = self
-			.client
-			.blob_get_proof(height, self.namespace.clone(), blob.commitment)
-			.await
-			.map_err(|e| Error::Internal(e.to_string()))?;
-
-		// get the leaves
-		let leaves = blob.to_shares().map_err(|e| Error::Internal(e.to_string()))?;
-
-		// check if included
-		for proof in proofs.iter() {
-			proof
-				.verify_complete_namespace(&root_hash, &leaves, self.namespace.into())
-				.map_err(|_e| {
-					Error::Validation("failed to verify complete namespace".to_string())
-				})?;
-		}
-
-		let ir_blob = DaBlob::try_from(blob)
-			.map_err(|e| Error::Internal(e.to_string()))?;
-
-		Ok(Verified::new(ir_blob))
-	}
-}
diff --git a/protocol-units/da/movement/providers/digest-store/Cargo.toml b/protocol-units/da/movement/providers/digest-store/Cargo.toml
new file mode 100644
index 000000000..0495612f5
--- /dev/null
+++ b/protocol-units/da/movement/providers/digest-store/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+name = "movement-da-light-node-digest-store"
+version = { workspace = true }
+edition = { workspace = true }
+license = { workspace = true }
+authors = { workspace = true }
+repository = { workspace = true }
+homepage = { workspace = true }
+publish = { workspace = true }
+rust-version = { workspace = true }
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+movement-da-light-node-da = { workspace = true }
+movement-da-util = { workspace = true }
+# movement-da-light-node-verifier = { workspace = true }
+bcs = { workspace = true }
+async-stream = { workspace = true }
+anyhow = { workspace = true }
+tracing = { workspace = true }
+tonic = { workspace = true }
+rocksdb = { workspace = true }
+tokio = { workspace = true }
+
+[features]
+default = []
+client = []
+server = []
+
+
+[lints]
+workspace = true
diff --git a/protocol-units/da/movement/providers/digest-store/src/blob/mod.rs b/protocol-units/da/movement/providers/digest-store/src/blob/mod.rs
new file mode 100644
index 000000000..e69de29bb
diff --git a/protocol-units/da/movement/providers/digest-store/src/da/db.rs b/protocol-units/da/movement/providers/digest-store/src/da/db.rs
new file mode 100644
index 000000000..7d05e0cca
--- /dev/null
+++ b/protocol-units/da/movement/providers/digest-store/src/da/db.rs
@@ -0,0 +1,71 @@
+use movement_da_util::blob::ir::blob::DaBlob;
+use rocksdb::{ColumnFamilyDescriptor, Options, DB};
+use std::path::Path;
+use std::sync::Arc;
+
+mod column_families {
+	pub const DIGESTED_BLOBS: &str = "digested_blobs";
+}
+use column_families::*;
+
+/// Simple data store for locally recorded DA events.
+///
+/// An async access API is provided to avoid blocking async tasks.
+/// The methods must be executed in the context of a Tokio runtime.
+#[derive(Clone, Debug)]
+pub struct DaDB {
+	inner: Arc<DB>,
+}
+
+impl DaDB {
+	pub fn open(path: impl AsRef<Path>) -> anyhow::Result<Self> {
+		let mut options = Options::default();
+		options.create_if_missing(true);
+		options.create_missing_column_families(true);
+
+		let synced_height = ColumnFamilyDescriptor::new(DIGESTED_BLOBS, Options::default());
+
+		let db = DB::open_cf_descriptors(&options, path, vec![synced_height])
+			.map_err(|e| anyhow::anyhow!("Failed to open DA DB: {:?}", e))?;
+		Ok(Self { inner: Arc::new(db) })
+	}
+
+	/// Adds a digested blob to the database.
+	pub async fn add_digested_blob(&self, id: Vec<u8>, blob: DaBlob) -> Result<(), anyhow::Error> {
+		let da_db = self.inner.clone();
+		tokio::task::spawn_blocking(move || {
+			let cf = da_db
+				.cf_handle(DIGESTED_BLOBS)
+				.ok_or(anyhow::anyhow!("No digested_blobs column family"))?;
+			let blob = bcs::to_bytes(&blob)
+				.map_err(|e| anyhow::anyhow!("Failed to serialize digested blob: {:?}", e))?;
+			da_db
+				.put_cf(&cf, id.clone(), blob)
+				.map_err(|e| anyhow::anyhow!("Failed to add digested blob: {:?}", e))
+		})
+		.await??;
+		Ok(())
+	}
+
+	/// Gets a digested blob from the database.
+	pub async fn get_digested_blob(&self, id: Vec<u8>) -> Result<Option<DaBlob>, anyhow::Error> {
+		let da_db = self.inner.clone();
+		let blob = tokio::task::spawn_blocking(move || {
+			let cf = da_db
+				.cf_handle(DIGESTED_BLOBS)
+				.ok_or(anyhow::anyhow!("No digested_blobs column family"))?;
+			let blob = da_db
+				.get_cf(&cf, id)
+				.map_err(|e| anyhow::anyhow!("Failed to get digested blob: {:?}", e))?;
+			let blob = match blob {
+				Some(blob) => Some(bcs::from_bytes(&blob).map_err(|e| {
+					anyhow::anyhow!("Failed to deserialize digested blob: {:?}", e)
+				})?),
+				None => None,
+			};
+			Ok::<Option<DaBlob>, anyhow::Error>(blob)
+		})
+		.await??;
+		Ok(blob)
+	}
+}
diff --git a/protocol-units/da/movement/providers/digest-store/src/da/mod.rs b/protocol-units/da/movement/providers/digest-store/src/da/mod.rs
new file mode 100644
index 000000000..a11deee53
--- /dev/null
+++ b/protocol-units/da/movement/providers/digest-store/src/da/mod.rs
@@ -0,0 +1,89 @@
+pub mod db;
+
+use movement_da_light_node_da::{CertificateStream, DaError, DaOperations};
+use movement_da_util::blob::ir::blob::DaBlob;
+use std::future::Future;
+use std::path::Path;
+use std::pin::Pin;
+use std::sync::Arc;
+
+#[derive(Clone)]
+pub struct Da<D>
+where
+	D: DaOperations,
+{
+	/// The namespace on Celestia which the Da will use.
+	inner: Arc<D>,
+	/// The RocksDB instance.
+	db: db::DaDB,
+}
+
+impl<D> Da<D>
+where
+	D: DaOperations,
+{
+	/// Creates a new Da instance with the provided Celestia namespace and RPC client.
+	pub fn try_new(inner: D, db_path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
+		Ok(Self { inner: Arc::new(inner), db: db::DaDB::open(db_path)? })
+	}
+}
+
+impl<D> DaOperations for Da<D>
+where
+	D: DaOperations,
+{
+	fn submit_blob(
+		&self,
+		data: DaBlob,
+	) -> Pin<Box<dyn Future<Output = Result<(), DaError>> + Send + '_>> {
+		Box::pin(async move {
+			// get the digest
+			let digest = data.id().to_vec();
+
+			// store the digested blob
+			self.db
+				.add_digested_blob(digest.clone(), data)
+				.await
+				.map_err(|e| DaError::Internal(format!("failed to store digested blob: {}", e)))?;
+
+			// create a digest blob
+			let digest_blob = DaBlob::DigestV1(digest);
+
+			// submit the digest blob to the inner da
+			self.inner.submit_blob(digest_blob).await?;
+
+			Ok(())
+		})
+	}
+
+	fn get_da_blobs_at_height(
+		&self,
+		height: u64,
+	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
+		Box::pin(async move {
+			// get the blobs from the inner da
+			let inner_blobs = self.inner.get_da_blobs_at_height(height).await?;
+
+			let mut blobs = Vec::new();
+			for inner_blob in inner_blobs {
+				if let Some(blob) =
+					self.db.get_digested_blob(inner_blob.id().to_vec()).await.map_err(|e| {
+						DaError::NonFatalBlobsAtHeight(
+							format!("failed to get digested blob: {}", e).into(),
+						)
+					})? {
+					blobs.push(blob);
+				}
+			}
+
+			Ok(blobs)
+		})
+	}
+
+	fn stream_certificates(
+		&self,
+	) -> Pin<Box<dyn Future<Output = Result<CertificateStream, DaError>> + Send + '_>> {
+		// simply pass through to streaming the underlying DA certificates
+		self.inner.stream_certificates()
+	}
+}
diff --git a/protocol-units/da/movement/providers/digest-store/src/lib.rs b/protocol-units/da/movement/providers/digest-store/src/lib.rs
new file mode 100644
index 000000000..2305b23f3
--- /dev/null
+++ b/protocol-units/da/movement/providers/digest-store/src/lib.rs
@@ -0,0 +1,3 @@
+pub mod blob;
+pub mod da;
+// pub mod verifier;

From 881b7d444e16161d87f60534f5f43ad60ee22541 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 02:00:45 -0800
Subject: [PATCH 12/43] feat: light node refactor.

---
 Cargo.lock                                    |  2 ++
 Cargo.toml                                    |  2 ++
 .../movement/protocol/light-node/Cargo.toml   |  2 ++
 .../movement/protocol/light-node/src/main.rs  |  5 +++-
 .../protocol/light-node/src/manager.rs        |  8 +++++--
 .../protocol/light-node/src/passthrough.rs    | 22 +++++++----------
 .../protocol/light-node/src/sequencer.rs      |  5 ++--
 .../util/src/config/local/digest_store.rs     | 24 +++++++++++++++++++
 .../protocol/util/src/config/local/mod.rs     |  6 +++++
 .../movement/protocol/util/src/config/mod.rs  |  9 +++++++
 10 files changed, 67 insertions(+), 18 deletions(-)
 create mode 100644 protocol-units/da/movement/protocol/util/src/config/local/digest_store.rs

diff --git a/Cargo.lock b/Cargo.lock
index 793279821..82c323d5e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10212,7 +10212,9 @@ dependencies = [
  "k256",
  "memseq",
  "movement-algs",
+ "movement-da-light-node-celestia",
  "movement-da-light-node-da",
+ "movement-da-light-node-digest-store",
  "movement-da-light-node-prevalidator",
  "movement-da-light-node-proto",
  "movement-da-light-node-verifier",
diff --git a/Cargo.toml b/Cargo.toml
index a2a6be450..4ccd5e026 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -81,6 +81,8 @@ movement-da-light-node-setup = { path = "protocol-units/da/movement/protocol/set
 movement-da-light-node-verifier = { path = "protocol-units/da/movement/protocol/verifier" }
 movement-da-light-node-prevalidator = { path = "protocol-units/da/movement/protocol/prevalidator" }
 movement-da-light-node-da = { path = "protocol-units/da/movement/protocol/da" }
+movement-da-light-node-celestia = { path = "protocol-units/da/movement/providers/celestia" }
+movement-da-light-node-digest-store = { path = "protocol-units/da/movement/providers/digest-store" }
 ## execution
 maptos-dof-execution = { path = "protocol-units/execution/maptos/dof" }
 maptos-opt-executor = { path = "protocol-units/execution/maptos/opt-executor" }
diff --git a/protocol-units/da/movement/protocol/light-node/Cargo.toml b/protocol-units/da/movement/protocol/light-node/Cargo.toml
index a1df45a9f..707e152b0 100644
--- a/protocol-units/da/movement/protocol/light-node/Cargo.toml
+++ b/protocol-units/da/movement/protocol/light-node/Cargo.toml
@@ -43,6 +43,8 @@ ecdsa = { workspace = true }
 k256 = { workspace = true }
 thiserror = { workspace = true }
 movement-da-light-node-da = { workspace = true }
+movement-da-light-node-celestia = { workspace = true }
+movement-da-light-node-digest-store = { workspace = true }
 
 # sequencer
 memseq = { workspace = true, optional = true }
diff --git a/protocol-units/da/movement/protocol/light-node/src/main.rs b/protocol-units/da/movement/protocol/light-node/src/main.rs
index 098736b38..65e117dcf 100644
--- a/protocol-units/da/movement/protocol/light-node/src/main.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/main.rs
@@ -1,5 +1,7 @@
 use k256::Secp256k1;
 use movement_da_light_node::{LightNode, Manager};
+use movement_da_light_node_celestia::da::Da as CelestiaDa;
+use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
 
 use std::env;
 
@@ -15,7 +17,8 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
 	let config_path = dot_movement.get_config_json_path();
 	let config_file = tokio::fs::File::open(config_path).await?;
 	// todo: consider whether LightNode implementation should encapsulate signing type
-	let manager = Manager::<LightNode<Secp256k1>>::new(config_file).await?;
+	let manager =
+		Manager::<LightNode<Secp256k1, DigestStoreDa<CelestiaDa>>>::new(config_file).await?;
 	manager.try_run().await?;
 
 	Ok(())
diff --git a/protocol-units/da/movement/protocol/light-node/src/manager.rs b/protocol-units/da/movement/protocol/light-node/src/manager.rs
index 45fdb5f94..0b236e0fa 100644
--- a/protocol-units/da/movement/protocol/light-node/src/manager.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/manager.rs
@@ -12,6 +12,8 @@ use ecdsa::{
 	SignatureSize,
 };
 use godfig::{backend::config_file::ConfigFile, Godfig};
+use movement_da_light_node_celestia::da::Da as CelestiaDa;
+use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
 use movement_da_util::config::Config;
 
 #[derive(Clone)]
@@ -24,7 +26,7 @@ where
 }
 
 // Implements a very simple manager using a marker strategy pattern.
-impl<C> Manager<LightNode<C>>
+impl<C> Manager<LightNode<C, DigestStoreDa<CelestiaDa>>>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -42,7 +44,9 @@ where
 		Ok(Self { godfig, _marker: std::marker::PhantomData })
 	}
 
-	pub async fn try_light_node(&self) -> Result<LightNode<C>, anyhow::Error>
+	pub async fn try_light_node(
+		&self,
+	) -> Result<LightNode<C, DigestStoreDa<CelestiaDa>>, anyhow::Error>
 	where
 		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 056a62759..704e4e9de 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -4,7 +4,9 @@ use tokio_stream::{Stream, StreamExt};
 use tracing::info;
 
 // FIXME: glob imports are bad style
+use movement_da_light_node_celestia::da::Da as CelestiaDa;
 use movement_da_light_node_da::DaOperations;
+use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
 use movement_da_light_node_proto::*;
 use movement_da_util::{blob::ir::data::InnerSignedBlobV1Data, config::Config};
@@ -55,33 +57,27 @@ where
 	}
 }
 
-impl<C, Da> LightNodeRuntime for LightNode<C, Da>
+impl<C> LightNodeRuntime for LightNode<C, DigestStoreDa<CelestiaDa>>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
-	Da: DaOperations,
 {
 	/// Tries to create a new LightNode instance from the toml config file.
-	async fn try_from_config(config: Config, da: Da) -> Result<Self, anyhow::Error> {
+	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error> {
 		let signing_key_str = config.da_signing_key();
 		let hex_bytes = hex::decode(signing_key_str)?;
 
 		let signing_key = SigningKey::from_bytes(hex_bytes.as_slice().try_into()?)
 			.map_err(|e| anyhow::anyhow!("Failed to create signing key: {}", e))?;
 
-		Ok(Self {
-			config: config.clone(),
-			/*verifier: Arc::new(Box::new(Verifier::<C>::new(
-				client,
-				config.celestia_namespace(),
-				config.da_signers_sec1_keys(),
-			))),*/
-			da: Arc::new(da),
-			signing_key,
-		})
+		let client = Arc::new(config.connect_celestia().await?);
+		let celestia_da = CelestiaDa::new(config.celestia_namespace(), client);
+		let digest_store_da = DigestStoreDa::try_new(celestia_da, config.digest_store_db_path())?;
+
+		Ok(Self { config: config.clone(), da: Arc::new(digest_store_da), signing_key })
 	}
 
 	fn try_service_address(&self) -> Result<String, anyhow::Error> {
diff --git a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
index 8777f6977..a3b064cab 100644
--- a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
@@ -20,6 +20,8 @@ use std::sync::{atomic::AtomicU64, Arc};
 use std::time::Duration;
 
 use memseq::{Sequencer, Transaction};
+use movement_da_light_node_celestia::da::Da as CelestiaDa;
+use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
 use movement_da_light_node_proto as grpc;
 use movement_da_light_node_proto::blob_response::BlobType;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
@@ -65,14 +67,13 @@ where
 	}
 }
 
-impl<C, Da> LightNodeRuntime for LightNode<C, Da>
+impl<C> LightNodeRuntime for LightNode<C, DigestStoreDa<CelestiaDa>>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
 	SignatureSize<C>: ArrayLength<u8>,
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
-	Da: DaOperations,
 {
 	async fn try_from_config(config: Config) -> Result<Self, anyhow::Error> {
 		info!("Initializing LightNode in sequencer mode from environment.");
diff --git a/protocol-units/da/movement/protocol/util/src/config/local/digest_store.rs b/protocol-units/da/movement/protocol/util/src/config/local/digest_store.rs
new file mode 100644
index 000000000..5af870a5b
--- /dev/null
+++ b/protocol-units/da/movement/protocol/util/src/config/local/digest_store.rs
@@ -0,0 +1,24 @@
+use serde::{Deserialize, Serialize};
+use std::path::PathBuf;
+
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
+pub struct Config {
+	/// The path to the sequencer database
+	#[serde(default = "default_sequencer_database_path")]
+	pub digest_store_db_path: PathBuf,
+}
+
+pub fn default_sequencer_database_path() -> PathBuf {
+	// check if DIGEST_STORE_DB_PATH is set otherwise randomly generate in /tmp
+	std::env::var("DIGEST_STORE_DB_PATH").map(PathBuf::from).unwrap_or_else(|_| {
+		let mut path = std::env::temp_dir();
+		path.push("digest_store_db");
+		path
+	})
+}
+
+impl Default for Config {
+	fn default() -> Self {
+		Self { digest_store_db_path: default_sequencer_database_path() }
+	}
+}
diff --git a/protocol-units/da/movement/protocol/util/src/config/local/mod.rs b/protocol-units/da/movement/protocol/util/src/config/local/mod.rs
index 5fd4fe296..6b09006ab 100644
--- a/protocol-units/da/movement/protocol/util/src/config/local/mod.rs
+++ b/protocol-units/da/movement/protocol/util/src/config/local/mod.rs
@@ -1,6 +1,7 @@
 pub mod appd;
 pub mod bridge;
 pub mod da_light_node;
+pub mod digest_store;
 use crate::config::common::{default_celestia_force_new_chain, default_da_light_node_is_initial};
 use aptos_account_whitelist::config::Config as WhitelistConfig;
 use memseq_util::Config as MemseqConfig;
@@ -34,6 +35,10 @@ pub struct Config {
 	/// The access control config
 	#[serde(default)]
 	pub access_control: WhitelistConfig,
+
+	/// The digest store configuration
+	#[serde(default)]
+	pub digest_store: digest_store::Config,
 }
 
 impl Default for Config {
@@ -46,6 +51,7 @@ impl Default for Config {
 			memseq: MemseqConfig::default(),
 			da_light_node_is_initial: default_da_light_node_is_initial(),
 			access_control: WhitelistConfig::default(),
+			digest_store: digest_store::Config::default(),
 		}
 	}
 }
diff --git a/protocol-units/da/movement/protocol/util/src/config/mod.rs b/protocol-units/da/movement/protocol/util/src/config/mod.rs
index 82e3a0eec..decac94ee 100644
--- a/protocol-units/da/movement/protocol/util/src/config/mod.rs
+++ b/protocol-units/da/movement/protocol/util/src/config/mod.rs
@@ -4,6 +4,7 @@ use celestia_rpc::Client;
 use celestia_types::nmt::Namespace;
 use serde::{Deserialize, Serialize};
 use std::collections::HashSet;
+use std::path::PathBuf;
 
 pub mod common;
 pub mod local;
@@ -248,6 +249,14 @@ impl Config {
 			Config::Mocha(local) => local.access_control.whitelisted_accounts(),
 		}
 	}
+
+	pub fn digest_store_db_path(&self) -> PathBuf {
+		match self {
+			Config::Local(local) => local.digest_store.digest_store_db_path.clone(),
+			Config::Arabica(local) => local.digest_store.digest_store_db_path.clone(),
+			Config::Mocha(local) => local.digest_store.digest_store_db_path.clone(),
+		}
+	}
 }
 
 /// The M1 DA Light Node configuration as should be read from file.

From d32fdf7c1ee8178f6ad6b4531267010e165d2b44 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 02:03:21 -0800
Subject: [PATCH 13/43] feat: accept both sequenced and passed through blobs.

---
 .../movement-full-node/src/node/tasks/execute_settle.rs    | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
index 903544993..974a82d39 100644
--- a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
+++ b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
@@ -104,12 +104,15 @@ where
 			.blob_type
 			.ok_or(anyhow::anyhow!("No blob type in response"))?
 		{
+			// To allow for DA migrations we accept both sequenced and passed through blobs
 			blob_response::BlobType::SequencedBlobBlock(blob) => {
 				(blob.data, blob.timestamp, blob.blob_id, blob.height)
 			}
-			_ => {
-				anyhow::bail!("Invalid blob type in response")
+			// To allow for DA migrations we accept both sequenced and passed through blobs
+			blob_response::BlobType::PassedThroughBlob(blob) => {
+				(blob.data, blob.timestamp, blob.blob_id, blob.height)
 			}
+			_ => anyhow::bail!("Invalid blob type"),
 		};
 
 		info!(

From 71c0ecf23f929fc014ae4f22f6df0629441ab98c Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 02:10:25 -0800
Subject: [PATCH 14/43] fix: runners.

---
 Cargo.lock                                    | 122 +++++++++---------
 .../{runners => celestia-runners}/Cargo.toml  |   2 +-
 .../src/bin/celestia-appd.rs                  |   2 +-
 .../src/bin/celestia-bridge.rs                |   2 +-
 .../src/bin/celestia-light.rs                 |   2 +-
 .../src/celestia_appd/local.rs                |   0
 .../src/celestia_appd/mod.rs                  |   0
 .../src/celestia_bridge/local.rs              |   0
 .../src/celestia_bridge/mod.rs                |   0
 .../src/celestia_light/arabica.rs             |   0
 .../src/celestia_light/mocha.rs               |   0
 .../src/celestia_light/mod.rs                 |   0
 .../{runners => celestia-runners}/src/lib.rs  |   0
 .../movement/protocol/light-node/Cargo.toml   |   2 +-
 .../movement/protocol/light-node/src/main.rs  |   2 +-
 15 files changed, 67 insertions(+), 67 deletions(-)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/Cargo.toml (95%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/bin/celestia-appd.rs (90%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/bin/celestia-bridge.rs (90%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/bin/celestia-light.rs (90%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/celestia_appd/local.rs (100%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/celestia_appd/mod.rs (100%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/celestia_bridge/local.rs (100%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/celestia_bridge/mod.rs (100%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/celestia_light/arabica.rs (100%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/celestia_light/mocha.rs (100%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/celestia_light/mod.rs (100%)
 rename protocol-units/da/movement/protocol/{runners => celestia-runners}/src/lib.rs (100%)

diff --git a/Cargo.lock b/Cargo.lock
index 82c323d5e..e65bedbd4 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10127,6 +10127,67 @@ dependencies = [
  "tokio",
 ]
 
+[[package]]
+name = "movement-celestia-da-light-node"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "async-stream",
+ "bcs 0.1.4",
+ "celestia-rpc",
+ "celestia-types",
+ "chrono",
+ "dot-movement",
+ "ecdsa 0.16.9",
+ "futures",
+ "godfig",
+ "hex",
+ "k256",
+ "memseq",
+ "movement-algs",
+ "movement-da-light-node-celestia",
+ "movement-da-light-node-da",
+ "movement-da-light-node-digest-store",
+ "movement-da-light-node-prevalidator",
+ "movement-da-light-node-proto",
+ "movement-da-light-node-verifier",
+ "movement-da-util",
+ "movement-tracing",
+ "movement-types",
+ "prost 0.13.3",
+ "serde",
+ "serde_json",
+ "tempfile",
+ "thiserror 1.0.69",
+ "tokio",
+ "tokio-stream",
+ "tonic 0.12.3",
+ "tonic-reflection 0.12.3",
+ "tracing",
+ "zstd 0.13.2",
+]
+
+[[package]]
+name = "movement-celestia-da-light-node-runners"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "commander",
+ "dot-movement",
+ "godfig",
+ "hex",
+ "movement-da-util",
+ "rand 0.7.3",
+ "reqwest 0.12.9",
+ "serde",
+ "serde_json",
+ "tempfile",
+ "tokio",
+ "tokio-stream",
+ "tracing",
+ "tracing-subscriber 0.3.18",
+]
+
 [[package]]
 name = "movement-client"
 version = "0.0.2"
@@ -10194,46 +10255,6 @@ dependencies = [
  "tracing",
 ]
 
-[[package]]
-name = "movement-da-light-node"
-version = "0.0.2"
-dependencies = [
- "anyhow",
- "async-stream",
- "bcs 0.1.4",
- "celestia-rpc",
- "celestia-types",
- "chrono",
- "dot-movement",
- "ecdsa 0.16.9",
- "futures",
- "godfig",
- "hex",
- "k256",
- "memseq",
- "movement-algs",
- "movement-da-light-node-celestia",
- "movement-da-light-node-da",
- "movement-da-light-node-digest-store",
- "movement-da-light-node-prevalidator",
- "movement-da-light-node-proto",
- "movement-da-light-node-verifier",
- "movement-da-util",
- "movement-tracing",
- "movement-types",
- "prost 0.13.3",
- "serde",
- "serde_json",
- "tempfile",
- "thiserror 1.0.69",
- "tokio",
- "tokio-stream",
- "tonic 0.12.3",
- "tonic-reflection 0.12.3",
- "tracing",
- "zstd 0.13.2",
-]
-
 [[package]]
 name = "movement-da-light-node-celestia"
 version = "0.0.2"
@@ -10330,27 +10351,6 @@ dependencies = [
  "tonic-build",
 ]
 
-[[package]]
-name = "movement-da-light-node-runners"
-version = "0.0.2"
-dependencies = [
- "anyhow",
- "commander",
- "dot-movement",
- "godfig",
- "hex",
- "movement-da-util",
- "rand 0.7.3",
- "reqwest 0.12.9",
- "serde",
- "serde_json",
- "tempfile",
- "tokio",
- "tokio-stream",
- "tracing",
- "tracing-subscriber 0.3.18",
-]
-
 [[package]]
 name = "movement-da-light-node-setup"
 version = "0.0.2"
diff --git a/protocol-units/da/movement/protocol/runners/Cargo.toml b/protocol-units/da/movement/protocol/celestia-runners/Cargo.toml
similarity index 95%
rename from protocol-units/da/movement/protocol/runners/Cargo.toml
rename to protocol-units/da/movement/protocol/celestia-runners/Cargo.toml
index 452a6beae..992127f4d 100644
--- a/protocol-units/da/movement/protocol/runners/Cargo.toml
+++ b/protocol-units/da/movement/protocol/celestia-runners/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-da-light-node-runners"
+name = "movement-celestia-da-light-node-runners"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
diff --git a/protocol-units/da/movement/protocol/runners/src/bin/celestia-appd.rs b/protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-appd.rs
similarity index 90%
rename from protocol-units/da/movement/protocol/runners/src/bin/celestia-appd.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-appd.rs
index 143b7a5e3..fcb90d30f 100644
--- a/protocol-units/da/movement/protocol/runners/src/bin/celestia-appd.rs
+++ b/protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-appd.rs
@@ -1,5 +1,5 @@
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_da_light_node_runners::{celestia_appd::CelestiaAppd, Runner};
+use movement_celestia_da_light_node_runners::{celestia_appd::CelestiaAppd, Runner};
 use movement_da_util::CelestiaDaLightNodeConfig;
 
 #[tokio::main]
diff --git a/protocol-units/da/movement/protocol/runners/src/bin/celestia-bridge.rs b/protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-bridge.rs
similarity index 90%
rename from protocol-units/da/movement/protocol/runners/src/bin/celestia-bridge.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-bridge.rs
index d2471ee6d..1c24dfdd0 100644
--- a/protocol-units/da/movement/protocol/runners/src/bin/celestia-bridge.rs
+++ b/protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-bridge.rs
@@ -1,5 +1,5 @@
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_da_light_node_runners::{celestia_bridge::CelestiaBridge, Runner};
+use movement_celestia_da_light_node_runners::{celestia_bridge::CelestiaBridge, Runner};
 use movement_da_util::CelestiaDaLightNodeConfig;
 
 #[tokio::main]
diff --git a/protocol-units/da/movement/protocol/runners/src/bin/celestia-light.rs b/protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-light.rs
similarity index 90%
rename from protocol-units/da/movement/protocol/runners/src/bin/celestia-light.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-light.rs
index 3e9fe2390..ed38500b7 100644
--- a/protocol-units/da/movement/protocol/runners/src/bin/celestia-light.rs
+++ b/protocol-units/da/movement/protocol/celestia-runners/src/bin/celestia-light.rs
@@ -1,5 +1,5 @@
 use godfig::{backend::config_file::ConfigFile, Godfig};
-use movement_da_light_node_runners::{celestia_light::CelestiaLight, Runner};
+use movement_celestia_da_light_node_runners::{celestia_light::CelestiaLight, Runner};
 use movement_da_util::CelestiaDaLightNodeConfig;
 
 #[tokio::main]
diff --git a/protocol-units/da/movement/protocol/runners/src/celestia_appd/local.rs b/protocol-units/da/movement/protocol/celestia-runners/src/celestia_appd/local.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/celestia_appd/local.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/celestia_appd/local.rs
diff --git a/protocol-units/da/movement/protocol/runners/src/celestia_appd/mod.rs b/protocol-units/da/movement/protocol/celestia-runners/src/celestia_appd/mod.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/celestia_appd/mod.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/celestia_appd/mod.rs
diff --git a/protocol-units/da/movement/protocol/runners/src/celestia_bridge/local.rs b/protocol-units/da/movement/protocol/celestia-runners/src/celestia_bridge/local.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/celestia_bridge/local.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/celestia_bridge/local.rs
diff --git a/protocol-units/da/movement/protocol/runners/src/celestia_bridge/mod.rs b/protocol-units/da/movement/protocol/celestia-runners/src/celestia_bridge/mod.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/celestia_bridge/mod.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/celestia_bridge/mod.rs
diff --git a/protocol-units/da/movement/protocol/runners/src/celestia_light/arabica.rs b/protocol-units/da/movement/protocol/celestia-runners/src/celestia_light/arabica.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/celestia_light/arabica.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/celestia_light/arabica.rs
diff --git a/protocol-units/da/movement/protocol/runners/src/celestia_light/mocha.rs b/protocol-units/da/movement/protocol/celestia-runners/src/celestia_light/mocha.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/celestia_light/mocha.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/celestia_light/mocha.rs
diff --git a/protocol-units/da/movement/protocol/runners/src/celestia_light/mod.rs b/protocol-units/da/movement/protocol/celestia-runners/src/celestia_light/mod.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/celestia_light/mod.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/celestia_light/mod.rs
diff --git a/protocol-units/da/movement/protocol/runners/src/lib.rs b/protocol-units/da/movement/protocol/celestia-runners/src/lib.rs
similarity index 100%
rename from protocol-units/da/movement/protocol/runners/src/lib.rs
rename to protocol-units/da/movement/protocol/celestia-runners/src/lib.rs
diff --git a/protocol-units/da/movement/protocol/light-node/Cargo.toml b/protocol-units/da/movement/protocol/light-node/Cargo.toml
index 707e152b0..92f59ee81 100644
--- a/protocol-units/da/movement/protocol/light-node/Cargo.toml
+++ b/protocol-units/da/movement/protocol/light-node/Cargo.toml
@@ -1,5 +1,5 @@
 [package]
-name = "movement-da-light-node"
+name = "movement-celestia-da-light-node"
 version = { workspace = true }
 edition = { workspace = true }
 license = { workspace = true }
diff --git a/protocol-units/da/movement/protocol/light-node/src/main.rs b/protocol-units/da/movement/protocol/light-node/src/main.rs
index 65e117dcf..0004d3b97 100644
--- a/protocol-units/da/movement/protocol/light-node/src/main.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/main.rs
@@ -1,5 +1,5 @@
 use k256::Secp256k1;
-use movement_da_light_node::{LightNode, Manager};
+use movement_celestia_da_light_node::{LightNode, Manager};
 use movement_da_light_node_celestia::da::Da as CelestiaDa;
 use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
 

From a3d808a8ef219615f785f688fea7a07b36307be7 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 02:17:41 -0800
Subject: [PATCH 15/43] fix: update build scripts.

---
 scripts/services/bridge/build                       | 2 +-
 scripts/services/movement-full-node/build           | 2 +-
 scripts/services/wait-for-celestia-light-node/build | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/scripts/services/bridge/build b/scripts/services/bridge/build
index b25be85a3..43ad46f3d 100755
--- a/scripts/services/bridge/build
+++ b/scripts/services/bridge/build
@@ -24,7 +24,7 @@ cargo build $CARGO_PROFILE_FLAGS -p movement-full-node-setup
 echo "Built movement-full-node-setup!"
 
 echo "Building wait-for-celestia-light-node..."
-cargo build $CARGO_PROFILE_FLAGS -p movement-celestia-da-util --bin wait-for-celestia-light-node
+cargo build $CARGO_PROFILE_FLAGS -p movement-da-util --bin wait-for-celestia-light-node
 echo "Built wait-for-celestia-light-node!"
 
 echo "Building Bridge..."
diff --git a/scripts/services/movement-full-node/build b/scripts/services/movement-full-node/build
index 74624bf46..cb33e4c3b 100755
--- a/scripts/services/movement-full-node/build
+++ b/scripts/services/movement-full-node/build
@@ -25,5 +25,5 @@ echo "Built movement-full-node-setup!"
 echo "Bulding movement-util..."
 cargo build $CARGO_PROFILE_FLAGS -p movement-util
 echo "Built movement-util!"
-cargo build $CARGO_PROFILE_FLAGS -p movement-celestia-da-util --bin wait-for-celestia-light-node
+cargo build $CARGO_PROFILE_FLAGS -p movement-da-util --bin wait-for-celestia-light-node
 echo "Built wait-for-celestia-light-node!"
diff --git a/scripts/services/wait-for-celestia-light-node/build b/scripts/services/wait-for-celestia-light-node/build
index 1d24ceb48..82e789ba0 100755
--- a/scripts/services/wait-for-celestia-light-node/build
+++ b/scripts/services/wait-for-celestia-light-node/build
@@ -13,5 +13,5 @@ else
 fi
 
 echo "Building wait-for-celestia-light-node..."
-cargo build $CARGO_PROFILE_FLAGS -p movement-celestia-da-util --bin wait-for-celestia-light-node
+cargo build $CARGO_PROFILE_FLAGS -p movement-da-util --bin wait-for-celestia-light-node
 echo "Built wait-for-celestia-light-node!"

From b708ae89ade1cf7a4c05a990011f212467b3adec Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 02:23:17 -0800
Subject: [PATCH 16/43] fix: remove cachix.

---
 .github/workflows/checks-all.yml | 36 --------------------------------
 1 file changed, 36 deletions(-)

diff --git a/.github/workflows/checks-all.yml b/.github/workflows/checks-all.yml
index 8c041ea50..0488a0425 100755
--- a/.github/workflows/checks-all.yml
+++ b/.github/workflows/checks-all.yml
@@ -32,11 +32,6 @@ jobs:
     - name: Install Nix
       uses: DeterminateSystems/nix-installer-action@main
 
-    - uses: cachix/cachix-action@v15
-      with:
-        name: movementlabs
-        authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
-
     - name: Run Cargo Check in nix environment
       run: |
         nix develop --command bash  -c "cargo check --all-targets"  
@@ -92,11 +87,6 @@ jobs:
     - name: Install Nix
       uses: DeterminateSystems/nix-installer-action@main
 
-    - uses: cachix/cachix-action@v15
-      with:
-        name: movementlabs
-        authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
-
     - name: Run Movement Full Node Tests Against Local ETH and Local Celestia
       env:
         CELESTIA_LOG_LEVEL: FATAL # adjust the log level while debugging
@@ -205,11 +195,6 @@ jobs:
     - name: Install Nix
       uses: DeterminateSystems/nix-installer-action@main
 
-    - uses: cachix/cachix-action@v15
-      with:
-        name: movementlabs
-        authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
-
     - name: Run Movement Full Node Tests Against Holesky and Local Celestia
       env: 
         CELESTIA_LOG_LEVEL: FATAL # adjust the log level while debugging
@@ -236,11 +221,6 @@ jobs:
     - name: Install Nix
       uses: DeterminateSystems/nix-installer-action@main
 
-    - uses: cachix/cachix-action@v15
-      with:
-        name: movementlabs
-        authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
-
     - name: Run Movement DA Light Node tests in nix environment
       # adjust the log level while debugging
       run: CELESTIA_LOG_LEVEL=FATAL nix develop --command bash  -c "just movement-celestia-da-light-node native build.setup.test.local -t=false"  
@@ -267,12 +247,6 @@ jobs:
     - name: Install Nix
       uses: DeterminateSystems/nix-installer-action@main
 
-
-    - uses: cachix/cachix-action@v15
-      with:
-        name: movementlabs
-        authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
-
     - name: Run MCR Client Tests
       run: nix develop --command bash  -c "just mcr-client native build.local.test -t=false"
 
@@ -299,11 +273,6 @@ jobs:
     - name: Install Nix
       uses: DeterminateSystems/nix-installer-action@main
 
-    - uses: cachix/cachix-action@v15
-      with:
-        name: movementlabs
-        authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
-
     - name: Run Aptos Tests
       run: |
         nix develop --command bash -c "
@@ -333,11 +302,6 @@ jobs:
     - name: Install Nix
       uses: DeterminateSystems/nix-installer-action@main
 
-    - uses: cachix/cachix-action@v15
-      with:
-        name: movementlabs
-        authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
-
     - name: Run foundry tests
       run: |
         nix develop --command bash -c "

From 2628c734699439ebd7eced7a53314f62cf5696c2 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 03:14:43 -0800
Subject: [PATCH 17/43] feat: merge disk-fifo.

---
 Cargo.lock                                    |  16 +++
 .../providers/digest-store/src/blob/mod.rs    |   0
 .../providers/digest-store/src/lib.rs         |   2 -
 .../movement/providers/disk-fifo/Cargo.toml   |  34 +++++
 .../movement/providers/disk-fifo/src/da/db.rs | 132 ++++++++++++++++++
 .../providers/disk-fifo/src/da/mod.rs         |  86 ++++++++++++
 .../movement/providers/disk-fifo/src/lib.rs   |   1 +
 7 files changed, 269 insertions(+), 2 deletions(-)
 delete mode 100644 protocol-units/da/movement/providers/digest-store/src/blob/mod.rs
 create mode 100644 protocol-units/da/movement/providers/disk-fifo/Cargo.toml
 create mode 100644 protocol-units/da/movement/providers/disk-fifo/src/da/db.rs
 create mode 100644 protocol-units/da/movement/providers/disk-fifo/src/da/mod.rs
 create mode 100644 protocol-units/da/movement/providers/disk-fifo/src/lib.rs

diff --git a/Cargo.lock b/Cargo.lock
index e65bedbd4..589a223af 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10315,6 +10315,22 @@ dependencies = [
  "tracing",
 ]
 
+[[package]]
+name = "movement-da-light-node-disk-fifo"
+version = "0.0.2"
+dependencies = [
+ "anyhow",
+ "async-stream",
+ "bcs 0.1.4",
+ "movement-da-light-node-da",
+ "movement-da-util",
+ "rocksdb",
+ "tokio",
+ "tokio-stream",
+ "tonic 0.12.3",
+ "tracing",
+]
+
 [[package]]
 name = "movement-da-light-node-prevalidator"
 version = "0.0.2"
diff --git a/protocol-units/da/movement/providers/digest-store/src/blob/mod.rs b/protocol-units/da/movement/providers/digest-store/src/blob/mod.rs
deleted file mode 100644
index e69de29bb..000000000
diff --git a/protocol-units/da/movement/providers/digest-store/src/lib.rs b/protocol-units/da/movement/providers/digest-store/src/lib.rs
index 2305b23f3..f0a4622da 100644
--- a/protocol-units/da/movement/providers/digest-store/src/lib.rs
+++ b/protocol-units/da/movement/providers/digest-store/src/lib.rs
@@ -1,3 +1 @@
-pub mod blob;
 pub mod da;
-// pub mod verifier;
diff --git a/protocol-units/da/movement/providers/disk-fifo/Cargo.toml b/protocol-units/da/movement/providers/disk-fifo/Cargo.toml
new file mode 100644
index 000000000..0fd2e332e
--- /dev/null
+++ b/protocol-units/da/movement/providers/disk-fifo/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "movement-da-light-node-disk-fifo"
+version = { workspace = true }
+edition = { workspace = true }
+license = { workspace = true }
+authors = { workspace = true }
+repository = { workspace = true }
+homepage = { workspace = true }
+publish = { workspace = true }
+rust-version = { workspace = true }
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+movement-da-light-node-da = { workspace = true }
+movement-da-util = { workspace = true }
+# movement-da-light-node-verifier = { workspace = true }
+bcs = { workspace = true }
+async-stream = { workspace = true }
+anyhow = { workspace = true }
+tracing = { workspace = true }
+tonic = { workspace = true }
+rocksdb = { workspace = true }
+tokio = { workspace = true }
+tokio-stream = { workspace = true }
+
+[features]
+default = []
+client = []
+server = []
+
+
+[lints]
+workspace = true
diff --git a/protocol-units/da/movement/providers/disk-fifo/src/da/db.rs b/protocol-units/da/movement/providers/disk-fifo/src/da/db.rs
new file mode 100644
index 000000000..ad5eddaa4
--- /dev/null
+++ b/protocol-units/da/movement/providers/disk-fifo/src/da/db.rs
@@ -0,0 +1,132 @@
+use movement_da_util::blob::ir::blob::DaBlob;
+use rocksdb::{ColumnFamilyDescriptor, Options, TransactionDB, TransactionDBOptions};
+use std::path::Path;
+use std::sync::Arc;
+use tokio::task;
+
+mod column_families {
+	pub const BLOBS: &str = "blobs";
+	pub const LAST_HEIGHT: &str = "last_height";
+}
+use column_families::*;
+
+/// Simple data store for locally recorded DA events with height tracking.
+///
+/// Methods are designed to work within a Tokio runtime.
+#[derive(Clone)]
+pub struct DaDb {
+	inner: Arc<TransactionDB>,
+}
+
+impl DaDb {
+	/// Opens or creates the transactional database at the given path.
+	pub fn open(path: impl AsRef<Path>) -> anyhow::Result<Self> {
+		let mut options = Options::default();
+		options.create_if_missing(true);
+		options.create_missing_column_families(true);
+
+		let blobs_cf = ColumnFamilyDescriptor::new(BLOBS, Options::default());
+		let last_height_cf = ColumnFamilyDescriptor::new(LAST_HEIGHT, Options::default());
+
+		let db = TransactionDB::open_cf_descriptors(
+			&options,
+			&TransactionDBOptions::default(),
+			path,
+			vec![blobs_cf, last_height_cf],
+		)
+		.map_err(|e| anyhow::anyhow!("Failed to open transactional database: {:?}", e))?;
+
+		Ok(Self { inner: Arc::new(db) })
+	}
+
+	/// Adds a blob at the next height, using a transaction to ensure consistency.
+	pub async fn add_blob(&self, blob: DaBlob) -> anyhow::Result<u64> {
+		let db = self.inner.clone();
+
+		task::spawn_blocking(move || {
+			let transaction = db.transaction();
+
+			// Retrieve the current height
+			let last_height_cf = db
+				.cf_handle(LAST_HEIGHT)
+				.ok_or_else(|| anyhow::anyhow!("Missing column family: {}", LAST_HEIGHT))?;
+
+			let current_height: u64 = transaction
+				.get_cf(&last_height_cf, b"last_height")
+				.unwrap_or_else(|_| Some(vec![0]))
+				.and_then(|v| String::from_utf8(v).ok())
+				.and_then(|s| s.parse::<u64>().ok())
+				.unwrap_or(0);
+
+			// Serialize the blob
+			let blobs_cf = db
+				.cf_handle(BLOBS)
+				.ok_or_else(|| anyhow::anyhow!("Missing column family: {}", BLOBS))?;
+
+			let blob_bytes = bcs::to_bytes(&blob)
+				.map_err(|e| anyhow::anyhow!("Failed to serialize blob: {:?}", e))?;
+
+			// Store the blob at the current height
+			transaction
+				.put_cf(&blobs_cf, current_height.to_be_bytes(), blob_bytes)
+				.map_err(|e| anyhow::anyhow!("Failed to store blob: {:?}", e))?;
+
+			// Update the height
+			let next_height = current_height + 1;
+			transaction
+				.put_cf(&last_height_cf, b"last_height", next_height.to_string().as_bytes())
+				.map_err(|e| anyhow::anyhow!("Failed to update height: {:?}", e))?;
+
+			// Commit the transaction
+			transaction
+				.commit()
+				.map_err(|e| anyhow::anyhow!("Transaction failed: {:?}", e))?;
+
+			Ok(current_height)
+		})
+		.await?
+	}
+
+	/// Retrieves a blob at the specified height.
+	pub async fn get_blob_at_height(&self, height: u64) -> anyhow::Result<Option<DaBlob>> {
+		let db = self.inner.clone();
+
+		task::spawn_blocking(move || {
+			let blobs_cf = db
+				.cf_handle(BLOBS)
+				.ok_or_else(|| anyhow::anyhow!("Missing column family: {}", BLOBS))?;
+
+			match db.get_cf(&blobs_cf, height.to_be_bytes()) {
+				Ok(Some(blob_bytes)) => {
+					let blob = bcs::from_bytes(&blob_bytes)
+						.map_err(|e| anyhow::anyhow!("Failed to deserialize blob: {:?}", e))?;
+					Ok(Some(blob))
+				}
+				Ok(None) => Ok(None),
+				Err(e) => Err(anyhow::anyhow!("Failed to retrieve blob: {:?}", e)),
+			}
+		})
+		.await?
+	}
+
+	/// Gets the current height.
+	pub async fn current_height(&self) -> anyhow::Result<u64> {
+		let db = self.inner.clone();
+
+		task::spawn_blocking(move || {
+			let last_height_cf = db
+				.cf_handle(LAST_HEIGHT)
+				.ok_or_else(|| anyhow::anyhow!("Missing column family: {}", LAST_HEIGHT))?;
+
+			let current_height: u64 = db
+				.get_cf(&last_height_cf, b"last_height")
+				.unwrap_or_else(|_| Some(vec![0]))
+				.and_then(|v| String::from_utf8(v).ok())
+				.and_then(|s| s.parse::<u64>().ok())
+				.unwrap_or(0);
+
+			Ok(current_height)
+		})
+		.await?
+	}
+}
diff --git a/protocol-units/da/movement/providers/disk-fifo/src/da/mod.rs b/protocol-units/da/movement/providers/disk-fifo/src/da/mod.rs
new file mode 100644
index 000000000..faec7b69e
--- /dev/null
+++ b/protocol-units/da/movement/providers/disk-fifo/src/da/mod.rs
@@ -0,0 +1,86 @@
+pub mod db;
+
+use movement_da_light_node_da::{Certificate, CertificateStream, DaError, DaOperations};
+use movement_da_util::blob::ir::blob::DaBlob;
+use std::future::Future;
+use std::path::Path;
+use std::pin::Pin;
+use std::sync::Arc;
+use tokio::sync::broadcast;
+use tokio_stream::wrappers::BroadcastStream;
+use tokio_stream::StreamExt;
+
+#[derive(Clone)]
+pub struct Da {
+	/// The RocksDB instance.
+	db: db::DaDb,
+	/// The broadcast channel for certificate notifications.
+	cert_tx: Arc<broadcast::Sender<Certificate>>,
+}
+
+impl Da {
+	/// Creates a new Da instance with the provided Celestia namespace and RPC client.
+	pub fn try_new(db_path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
+		let (cert_tx, _cert_rx) = broadcast::channel(100); // Create a broadcast channel with a buffer size of 100
+		Ok(Self { db: db::DaDb::open(db_path)?, cert_tx: Arc::new(cert_tx) })
+	}
+}
+
+impl DaOperations for Da {
+	fn submit_blob(
+		&self,
+		data: DaBlob,
+	) -> Pin<Box<dyn Future<Output = Result<(), DaError>> + Send + '_>> {
+		let db = self.db.clone();
+		let cert_tx = self.cert_tx.clone();
+
+		Box::pin(async move {
+			// Add the blob to the database at the next available height
+			let current_height =
+				db.add_blob(data).await.map_err(|e| DaError::Internal(e.to_string()))?;
+
+			// Broadcast the certificate for the new height
+			if let Err(e) = cert_tx.send(Certificate::Height(current_height)) {
+				tracing::warn!(
+					"Failed to broadcast certificate for height {}: {:?}",
+					current_height,
+					e
+				);
+			}
+
+			Ok(())
+		})
+	}
+
+	fn get_da_blobs_at_height(
+		&self,
+		height: u64,
+	) -> Pin<Box<dyn Future<Output = Result<Vec<DaBlob>, DaError>> + Send + '_>> {
+		let db = self.db.clone();
+
+		Box::pin(async move {
+			let blob = db
+				.get_blob_at_height(height)
+				.await
+				.map_err(|e| DaError::NonFatalBlobsAtHeight(e.into()))?;
+			Ok(blob.map_or_else(Vec::new, |b| vec![b]))
+		})
+	}
+
+	fn stream_certificates(
+		&self,
+	) -> Pin<Box<dyn Future<Output = Result<CertificateStream, DaError>> + Send + '_>> {
+		let cert_rx = self.cert_tx.subscribe();
+
+		Box::pin(async move {
+			// Wrap the broadcast receiver into a stream
+			let stream = BroadcastStream::new(cert_rx).filter_map(|result| match result {
+				Ok(height) => Some(Ok(height)), // Pass valid heights
+				Err(e) => Some(Err(DaError::Internal(e.to_string()))), // Convert to DaError
+			}); // Convert to DaError
+
+			// Box the stream and return it
+			Ok(Box::pin(stream) as CertificateStream)
+		})
+	}
+}
diff --git a/protocol-units/da/movement/providers/disk-fifo/src/lib.rs b/protocol-units/da/movement/providers/disk-fifo/src/lib.rs
new file mode 100644
index 000000000..f0a4622da
--- /dev/null
+++ b/protocol-units/da/movement/providers/disk-fifo/src/lib.rs
@@ -0,0 +1 @@
+pub mod da;

From c008e2912c83cfdecfbe359b7ac603a8dc2759f0 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 03:27:30 -0800
Subject: [PATCH 18/43] fix: reintegrate inknown signers verifier.

---
 .../movement/protocol/light-node/src/main.rs  |  7 +++--
 .../protocol/light-node/src/manager.rs        |  5 ++--
 .../protocol/light-node/src/passthrough.rs    | 27 +++++++++++++------
 .../protocol/light-node/src/sequencer.rs      | 22 ++++++++++-----
 4 files changed, 42 insertions(+), 19 deletions(-)

diff --git a/protocol-units/da/movement/protocol/light-node/src/main.rs b/protocol-units/da/movement/protocol/light-node/src/main.rs
index 0004d3b97..85c6c9403 100644
--- a/protocol-units/da/movement/protocol/light-node/src/main.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/main.rs
@@ -2,6 +2,7 @@ use k256::Secp256k1;
 use movement_celestia_da_light_node::{LightNode, Manager};
 use movement_da_light_node_celestia::da::Da as CelestiaDa;
 use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
+use movement_da_light_node_verifier::signed::InKnownSignersVerifier;
 
 use std::env;
 
@@ -17,8 +18,10 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
 	let config_path = dot_movement.get_config_json_path();
 	let config_file = tokio::fs::File::open(config_path).await?;
 	// todo: consider whether LightNode implementation should encapsulate signing type
-	let manager =
-		Manager::<LightNode<Secp256k1, DigestStoreDa<CelestiaDa>>>::new(config_file).await?;
+	let manager = Manager::<
+		LightNode<Secp256k1, DigestStoreDa<CelestiaDa>, InKnownSignersVerifier<Secp256k1>>,
+	>::new(config_file)
+	.await?;
 	manager.try_run().await?;
 
 	Ok(())
diff --git a/protocol-units/da/movement/protocol/light-node/src/manager.rs b/protocol-units/da/movement/protocol/light-node/src/manager.rs
index 0b236e0fa..4d2b3b4ff 100644
--- a/protocol-units/da/movement/protocol/light-node/src/manager.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/manager.rs
@@ -14,6 +14,7 @@ use ecdsa::{
 use godfig::{backend::config_file::ConfigFile, Godfig};
 use movement_da_light_node_celestia::da::Da as CelestiaDa;
 use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
+use movement_da_light_node_verifier::signed::InKnownSignersVerifier;
 use movement_da_util::config::Config;
 
 #[derive(Clone)]
@@ -26,7 +27,7 @@ where
 }
 
 // Implements a very simple manager using a marker strategy pattern.
-impl<C> Manager<LightNode<C, DigestStoreDa<CelestiaDa>>>
+impl<C> Manager<LightNode<C, DigestStoreDa<CelestiaDa>, InKnownSignersVerifier<C>>>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -46,7 +47,7 @@ where
 
 	pub async fn try_light_node(
 		&self,
-	) -> Result<LightNode<C, DigestStoreDa<CelestiaDa>>, anyhow::Error>
+	) -> Result<LightNode<C, DigestStoreDa<CelestiaDa>, InKnownSignersVerifier<C>>, anyhow::Error>
 	where
 		C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 		Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 704e4e9de..a1e92433f 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -9,7 +9,11 @@ use movement_da_light_node_da::DaOperations;
 use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
 use movement_da_light_node_proto::*;
-use movement_da_util::{blob::ir::data::InnerSignedBlobV1Data, config::Config};
+use movement_da_light_node_verifier::signed::InKnownSignersVerifier;
+use movement_da_light_node_verifier::VerifierOperations;
+use movement_da_util::{
+	blob::ir::blob::DaBlob, blob::ir::data::InnerSignedBlobV1Data, config::Config,
+};
 
 use crate::LightNodeRuntime;
 use ecdsa::{
@@ -26,7 +30,7 @@ use ecdsa::{
 };
 
 #[derive(Clone)]
-pub struct LightNode<C, Da>
+pub struct LightNode<C, Da, V>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -34,14 +38,15 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 	Da: DaOperations,
+	V: VerifierOperations<DaBlob, DaBlob>,
 {
 	pub config: Config,
-	// pub verifier: Arc<Box<dyn VerifierOperations<CelestiaBlob, DaBlob> + Send + Sync>>,
 	pub signing_key: SigningKey<C>,
 	pub da: Arc<Da>,
+	pub verifier: Arc<V>,
 }
 
-impl<C, Da> Debug for LightNode<C, Da>
+impl<C, Da, V> Debug for LightNode<C, Da, V>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -49,6 +54,7 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 	Da: DaOperations,
+	V: VerifierOperations<DaBlob, DaBlob>,
 {
 	fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 		f.debug_struct("LightNode")
@@ -57,7 +63,7 @@ where
 	}
 }
 
-impl<C> LightNodeRuntime for LightNode<C, DigestStoreDa<CelestiaDa>>
+impl<C> LightNodeRuntime for LightNode<C, DigestStoreDa<CelestiaDa>, InKnownSignersVerifier<C>>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -77,7 +83,9 @@ where
 		let celestia_da = CelestiaDa::new(config.celestia_namespace(), client);
 		let digest_store_da = DigestStoreDa::try_new(celestia_da, config.digest_store_db_path())?;
 
-		Ok(Self { config: config.clone(), da: Arc::new(digest_store_da), signing_key })
+		let verifier = Arc::new(InKnownSignersVerifier::<C>::new(config.da_signers_sec1_keys()));
+
+		Ok(Self { config: config.clone(), da: Arc::new(digest_store_da), signing_key, verifier })
 	}
 
 	fn try_service_address(&self) -> Result<String, anyhow::Error> {
@@ -91,7 +99,7 @@ where
 }
 
 #[tonic::async_trait]
-impl<C, Da> LightNodeService for LightNode<C, Da>
+impl<C, Da, V> LightNodeService for LightNode<C, Da, V>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -99,6 +107,7 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 	Da: DaOperations + Send + Sync + 'static,
+	V: VerifierOperations<DaBlob, DaBlob> + Send + Sync + 'static,
 {
 	/// Server streaming response type for the StreamReadFromHeight method.
 	type StreamReadFromHeightStream = std::pin::Pin<
@@ -115,6 +124,7 @@ where
 		info!("Stream read from height request: {:?}", request);
 
 		let da = self.da.clone();
+		let verifier = self.verifier.clone();
 		let height = request.into_inner().height;
 
 		let output = async_stream::try_stream! {
@@ -123,7 +133,8 @@ where
 
 			while let Some(blob) = blob_stream.next().await {
 				let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
-				let blob = da_blob.to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?;
+				let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
+				let blob = verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?;
 				let response = StreamReadFromHeightResponse {
 					blob: Some(blob)
 				};
diff --git a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
index a3b064cab..9e0b4b940 100644
--- a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
@@ -25,7 +25,11 @@ use movement_da_light_node_digest_store::da::Da as DigestStoreDa;
 use movement_da_light_node_proto as grpc;
 use movement_da_light_node_proto::blob_response::BlobType;
 use movement_da_light_node_proto::light_node_service_server::LightNodeService;
-use movement_da_util::{blob::ir::data::InnerSignedBlobV1Data, config::Config};
+use movement_da_light_node_verifier::{signed::InKnownSignersVerifier, VerifierOperations};
+use movement_da_util::{
+	blob::ir::{blob::DaBlob, data::InnerSignedBlobV1Data},
+	config::Config,
+};
 use movement_types::block::Block;
 use tokio::{
 	sync::mpsc::{Receiver, Sender},
@@ -39,7 +43,7 @@ use crate::{passthrough::LightNode as LightNodePassThrough, LightNodeRuntime};
 const LOGGING_UID: AtomicU64 = AtomicU64::new(0);
 
 #[derive(Clone)]
-pub struct LightNode<C, Da>
+pub struct LightNode<C, Da, V>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -47,13 +51,14 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 	Da: DaOperations,
+	V: VerifierOperations<DaBlob, DaBlob>,
 {
-	pub pass_through: LightNodePassThrough<C, Da>,
+	pub pass_through: LightNodePassThrough<C, Da, V>,
 	pub memseq: Arc<memseq::Memseq<memseq::RocksdbMempool>>,
 	pub prevalidator: Option<Arc<Validator>>,
 }
 
-impl<C, Da> Debug for LightNode<C, Da>
+impl<C, Da, V> Debug for LightNode<C, Da, V>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -61,13 +66,14 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 	Da: DaOperations,
+	V: VerifierOperations<DaBlob, DaBlob>,
 {
 	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 		f.debug_struct("LightNode").field("pass_through", &self.pass_through).finish()
 	}
 }
 
-impl<C> LightNodeRuntime for LightNode<C, DigestStoreDa<CelestiaDa>>
+impl<C> LightNodeRuntime for LightNode<C, DigestStoreDa<CelestiaDa>, InKnownSignersVerifier<C>>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -113,7 +119,7 @@ where
 	}
 }
 
-impl<C, Da> LightNode<C, Da>
+impl<C, Da, V> LightNode<C, Da, V>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -121,6 +127,7 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 	Da: DaOperations,
+	V: VerifierOperations<DaBlob, DaBlob>,
 {
 	async fn tick_build_blocks(&self, sender: Sender<Block>) -> Result<(), anyhow::Error> {
 		let memseq = self.memseq.clone();
@@ -282,7 +289,7 @@ where
 }
 
 #[tonic::async_trait]
-impl<C, Da> LightNodeService for LightNode<C, Da>
+impl<C, Da, V> LightNodeService for LightNode<C, Da, V>
 where
 	C: PrimeCurve + CurveArithmetic + DigestPrimitive + PointCompression,
 	Scalar<C>: Invert<Output = CtOption<Scalar<C>>> + SignPrimitive<C>,
@@ -290,6 +297,7 @@ where
 	AffinePoint<C>: FromEncodedPoint<C> + ToEncodedPoint<C> + VerifyPrimitive<C>,
 	FieldBytesSize<C>: ModulusSize,
 	Da: DaOperations + Send + Sync + 'static,
+	V: VerifierOperations<DaBlob, DaBlob> + Send + Sync + 'static,
 {
 	/// Server streaming response type for the StreamReadFromHeight method.
 	type StreamReadFromHeightStream = Pin<

From 1e288a386ea35062d40f42ee676405bfd406c91c Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 06:50:41 -0800
Subject: [PATCH 19/43] fix: use http1.

---
 .../movement/movement-full-node/src/da/stream_blocks/mod.rs    | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
index 98e09673f..b3e3bd3d0 100644
--- a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
+++ b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
@@ -18,8 +18,7 @@ impl StreamBlocks {
 	pub async fn execute(&self) -> Result<(), anyhow::Error> {
 		// Get the config
 
-		let mut client = MovementDaLightNodeClient::try_http2(self.light_node_url.as_str())
-			.await
+		let mut client = MovementDaLightNodeClient::try_http1(self.light_node_url.as_str())
 			.context("Failed to connect to light node")?;
 
 		let mut blocks_from_da = client

From 54e27faf0b1bf18c4608df2e465148469da55d69 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 10:47:35 -0800
Subject: [PATCH 20/43] debug: additional certificate logging.

---
 protocol-units/da/movement/protocol/da/src/lib.rs  | 14 +++++++++-----
 .../protocol/light-node/src/passthrough.rs         |  2 +-
 2 files changed, 10 insertions(+), 6 deletions(-)

diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 027f3cd68..92699afb2 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -5,7 +5,7 @@ use movement_da_util::blob::ir::blob::DaBlob;
 use std::future::Future;
 use std::pin::Pin;
 use tokio_stream::{Stream, StreamExt};
-use tracing::warn;
+use tracing::{info, warn};
 
 pub type CertificateStream<'a> =
 	Pin<Box<dyn Stream<Item = Result<Certificate, DaError>> + Send + 'a>>;
@@ -82,11 +82,12 @@ pub trait DaOperations: Send + Sync {
 		&self,
 	) -> Pin<Box<dyn Future<Output = Result<CertificateStream, DaError>> + Send + '_>>;
 
-	fn stream_ir_blobs_between_heights(
+	fn stream_da_blobs_between_heights(
 		&self,
 		start_height: u64,
 		end_height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<DaBlobStream, DaError>> + Send + '_>> {
+		info!("streaming IR blobs between heights {} and {}", start_height, end_height);
 		let fut = async move {
 			let stream = try_stream! {
 				for height in start_height..end_height {
@@ -101,7 +102,7 @@ pub trait DaOperations: Send + Sync {
 		Box::pin(fut)
 	}
 
-	fn stream_ir_blobs_from_height(
+	fn stream_da_blobs_from_height(
 		&self,
 		start_height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<DaBlobStream, DaError>> + Send + '_>> {
@@ -112,10 +113,13 @@ pub trait DaOperations: Send + Sync {
 				let mut certificate_stream = certificate_stream;
 
 				while let Some(certificate) = certificate_stream.next().await {
+
+					info!("certificate: {:?}", certificate);
+
 					match certificate {
 						Ok(Certificate::Height(height)) if height > last_height => {
 							let blob_stream = self
-								.stream_ir_blobs_between_heights(last_height, height)
+								.stream_da_blobs_between_heights(last_height, height)
 								.await?;
 							tokio::pin!(blob_stream);
 
@@ -138,7 +142,7 @@ pub trait DaOperations: Send + Sync {
 						}
 						// If height is less than last height, ignore
 						_ => {
-							warn!("ignoring certificate with height less than last height");
+							warn!("ignoring certificate");
 						}
 					}
 				}
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index a1e92433f..6b555a2c3 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -129,7 +129,7 @@ where
 
 		let output = async_stream::try_stream! {
 
-			let mut blob_stream = da.stream_ir_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
+			let mut blob_stream = da.stream_da_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
 
 			while let Some(blob) = blob_stream.next().await {
 				let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;

From 09af3809f763b16496f9962cf57dd6c8206fb4f9 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 10:53:21 -0800
Subject: [PATCH 21/43] fix: enhance logging.

---
 .../movement-full-node/src/da/stream_blocks/mod.rs         | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
index b3e3bd3d0..44750a028 100644
--- a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
+++ b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
@@ -4,6 +4,7 @@ use clap::Parser;
 use movement_da_light_node_client::MovementDaLightNodeClient;
 use movement_da_light_node_proto::{blob_response, StreamReadFromHeightRequest};
 use tokio_stream::StreamExt;
+use tracing::info;
 
 #[derive(Debug, Parser, Clone)]
 #[clap(rename_all = "kebab-case", about = "Streams the DA blocks")]
@@ -26,7 +27,7 @@ impl StreamBlocks {
 			.await
 			.context("Failed to stream blocks from DA")?;
 
-		println!("Streaming blocks from DA");
+		info!("streaming blocks from DA");
 
 		while let Some(block_res) = blocks_from_da.next().await {
 			let response = block_res.context("Failed to get block")?;
@@ -43,10 +44,10 @@ impl StreamBlocks {
 					anyhow::bail!("Invalid blob type in response")
 				}
 			};
-			println!("{} {}  {}", hex::encode(block_id), block_timestamp, da_height);
+			info!("{} {}  {}", hex::encode(block_id), block_timestamp, da_height);
 		}
 
-		println!("Finished streaming blocks from DA");
+		info!("Finished streaming blocks from DA");
 
 		Ok(())
 	}

From 6b43c1355735ddef4291091269f5ec31a5df6e59 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 12:18:52 -0800
Subject: [PATCH 22/43] fix: enhance logging.

---
 process-compose/movement-full-node/process-compose.gas-dos.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/process-compose/movement-full-node/process-compose.gas-dos.yml b/process-compose/movement-full-node/process-compose.gas-dos.yml
index 6f7c5de56..68f1aaddc 100644
--- a/process-compose/movement-full-node/process-compose.gas-dos.yml
+++ b/process-compose/movement-full-node/process-compose.gas-dos.yml
@@ -4,6 +4,7 @@ environment:
 
 processes:
 
+  # Test whether the full node is resistant to Gas DOS
   gas-dos-test:
     command: |
       cargo run --bin movement-tests-gas-dos

From 81b761f504ec51abc2e9d64d1b45736d90e7fae5 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 12:32:32 -0800
Subject: [PATCH 23/43] fix: containers workflow.

---
 .github/workflows/build-push-containers-all.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/build-push-containers-all.yml b/.github/workflows/build-push-containers-all.yml
index e1832b4b2..74e1c9477 100644
--- a/.github/workflows/build-push-containers-all.yml
+++ b/.github/workflows/build-push-containers-all.yml
@@ -786,7 +786,7 @@ jobs:
 ### Unchecked containers
 
   bridge-service-build:
-    # if: github.event.label.name == 'cicd:bridge-containers' ||  github.ref == 'refs/heads/main'
+    if: github.event.label.name == 'cicd:bridge-containers' ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -845,7 +845,7 @@ jobs:
           ./scripts/movement/manifest bridge-service
 
   bridge-setup-build:
-    # if: github.event.label.name == 'cicd:bridge-containers' ||  github.ref == 'refs/heads/main'
+    if: github.event.label.name == 'cicd:bridge-containers' ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write

From 28c91f8000d366038084c78de54592787b7010f3 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 12:39:20 -0800
Subject: [PATCH 24/43] fix: use pr labels contains.

---
 .../workflows/build-push-containers-all.yml   | 24 +++++++++----------
 1 file changed, 12 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/build-push-containers-all.yml b/.github/workflows/build-push-containers-all.yml
index 74e1c9477..450fc8e76 100644
--- a/.github/workflows/build-push-containers-all.yml
+++ b/.github/workflows/build-push-containers-all.yml
@@ -13,7 +13,7 @@ on:
 jobs:
 
   movement-celestia-da-light-node-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -78,7 +78,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-da-light-node
 
   movement-celestia-appd-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -143,7 +143,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-appd
 
   movement-celestia-bridge-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -208,7 +208,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-bridge
 
   movement-full-node-setup-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -273,7 +273,7 @@ jobs:
           ./scripts/movement/manifest movement-full-node-setup
 
   wait-for-celestia-light-node-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -338,7 +338,7 @@ jobs:
           ./scripts/movement/manifest wait-for-celestia-light-node
 
   movement-full-node-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -403,7 +403,7 @@ jobs:
           ./scripts/movement/manifest movement-full-node
 
   movement-faucet-service-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -468,7 +468,7 @@ jobs:
           ./scripts/movement/manifest movement-faucet-service
 
   movement-tests-e2e-simple-interaction-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -533,7 +533,7 @@ jobs:
           ./scripts/movement/manifest movement-tests-e2e-simple-interaction
 
   movement-indexer-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -598,7 +598,7 @@ jobs:
           ./scripts/movement/manifest movement-indexer
 
   movement-tests-e2e-followers-consistent-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -664,7 +664,7 @@ jobs:
 
 
   movement-util-build:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     permissions:
       contents: read
       packages: write
@@ -730,7 +730,7 @@ jobs:
 
   
   container-checks:
-    if: github.event.label.name == 'cicd:movement-containers' ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
     runs-on: buildjet-8vcpu-ubuntu-2204
     needs: 
       - movement-celestia-da-light-node-manifest

From c6a85935a3323e46529931559a05f2c5c001fee5 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 12:52:41 -0800
Subject: [PATCH 25/43] debug: log event.

---
 .../workflows/build-push-containers-all.yml   | 30 +++++++++++--------
 1 file changed, 18 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/build-push-containers-all.yml b/.github/workflows/build-push-containers-all.yml
index 450fc8e76..ea7adc8ac 100644
--- a/.github/workflows/build-push-containers-all.yml
+++ b/.github/workflows/build-push-containers-all.yml
@@ -12,8 +12,14 @@ on:
 
 jobs:
 
+  log-event:
+    runs-on: ubuntu-latest
+    steps:
+      - name: Log the event
+        run: echo ${{ github.event }}
+
   movement-celestia-da-light-node-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if:  contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -78,7 +84,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-da-light-node
 
   movement-celestia-appd-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -143,7 +149,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-appd
 
   movement-celestia-bridge-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -208,7 +214,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-bridge
 
   movement-full-node-setup-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -273,7 +279,7 @@ jobs:
           ./scripts/movement/manifest movement-full-node-setup
 
   wait-for-celestia-light-node-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -338,7 +344,7 @@ jobs:
           ./scripts/movement/manifest wait-for-celestia-light-node
 
   movement-full-node-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -403,7 +409,7 @@ jobs:
           ./scripts/movement/manifest movement-full-node
 
   movement-faucet-service-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -468,7 +474,7 @@ jobs:
           ./scripts/movement/manifest movement-faucet-service
 
   movement-tests-e2e-simple-interaction-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -533,7 +539,7 @@ jobs:
           ./scripts/movement/manifest movement-tests-e2e-simple-interaction
 
   movement-indexer-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -598,7 +604,7 @@ jobs:
           ./scripts/movement/manifest movement-indexer
 
   movement-tests-e2e-followers-consistent-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -664,7 +670,7 @@ jobs:
 
 
   movement-util-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -730,7 +736,7 @@ jobs:
 
   
   container-checks:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main'
+    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     runs-on: buildjet-8vcpu-ubuntu-2204
     needs: 
       - movement-celestia-da-light-node-manifest

From 9ff43379de50dfaf8cc963124dffac65cc9fc5ac Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 12:55:06 -0800
Subject: [PATCH 26/43] debug: log containers.

---
 .github/workflows/build-push-containers-all.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/build-push-containers-all.yml b/.github/workflows/build-push-containers-all.yml
index ea7adc8ac..c6f27cc6d 100644
--- a/.github/workflows/build-push-containers-all.yml
+++ b/.github/workflows/build-push-containers-all.yml
@@ -16,7 +16,7 @@ jobs:
     runs-on: ubuntu-latest
     steps:
       - name: Log the event
-        run: echo ${{ github.event }}
+        run: echo '${{ toJson(github.event) }}'
 
   movement-celestia-da-light-node-build:
     if:  contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'

From 3969f77ed37e0c7fe2427d60006c18d018719f44 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 13:03:14 -0800
Subject: [PATCH 27/43] fix: label filter.

---
 .../workflows/build-push-containers-all.yml   | 22 +++++++++----------
 1 file changed, 11 insertions(+), 11 deletions(-)

diff --git a/.github/workflows/build-push-containers-all.yml b/.github/workflows/build-push-containers-all.yml
index c6f27cc6d..9d7fed652 100644
--- a/.github/workflows/build-push-containers-all.yml
+++ b/.github/workflows/build-push-containers-all.yml
@@ -19,7 +19,7 @@ jobs:
         run: echo '${{ toJson(github.event) }}'
 
   movement-celestia-da-light-node-build:
-    if:  contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if:  contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -84,7 +84,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-da-light-node
 
   movement-celestia-appd-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -149,7 +149,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-appd
 
   movement-celestia-bridge-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -214,7 +214,7 @@ jobs:
           ./scripts/movement/manifest movement-celestia-bridge
 
   movement-full-node-setup-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -279,7 +279,7 @@ jobs:
           ./scripts/movement/manifest movement-full-node-setup
 
   wait-for-celestia-light-node-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -344,7 +344,7 @@ jobs:
           ./scripts/movement/manifest wait-for-celestia-light-node
 
   movement-full-node-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -409,7 +409,7 @@ jobs:
           ./scripts/movement/manifest movement-full-node
 
   movement-faucet-service-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -474,7 +474,7 @@ jobs:
           ./scripts/movement/manifest movement-faucet-service
 
   movement-tests-e2e-simple-interaction-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -539,7 +539,7 @@ jobs:
           ./scripts/movement/manifest movement-tests-e2e-simple-interaction
 
   movement-indexer-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -604,7 +604,7 @@ jobs:
           ./scripts/movement/manifest movement-indexer
 
   movement-tests-e2e-followers-consistent-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write
@@ -670,7 +670,7 @@ jobs:
 
 
   movement-util-build:
-    if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
+    # if: contains(github.event.pull_request.labels.*.name, 'cicd:movement-containers') ||  github.ref == 'refs/heads/main' || github.event.label.name == 'cicd:movement-containers'
     permissions:
       contents: read
       packages: write

From 00adf06c330aa4c96058eab3a780fbe88ba8f2ea Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 14:53:31 -0800
Subject: [PATCH 28/43] docs: http1 warning.

---
 .../run/manual/follower-node/README.md            |  7 +++++--
 .../src/da/stream_blocks/mod.rs                   | 15 +++++++++------
 protocol-units/da/movement/protocol/da/src/lib.rs |  3 ++-
 .../movement/protocol/util/src/config/common.rs   |  2 +-
 4 files changed, 17 insertions(+), 10 deletions(-)

diff --git a/docs/movement-node/run/manual/follower-node/README.md b/docs/movement-node/run/manual/follower-node/README.md
index 3f6c9eeae..339021b87 100644
--- a/docs/movement-node/run/manual/follower-node/README.md
+++ b/docs/movement-node/run/manual/follower-node/README.md
@@ -30,7 +30,7 @@ DOT_MOVEMENT_PATH=./.movement
 MAPTOS_CHAIN_ID=250 # change this to the chain id of the network you are running
 MOVEMENT_SYNC="follower::mtnet-l-sync-bucket-sync<=>{maptos,maptos-storage,movement-da-db}/**" # change to the sync bucket for the network you are running
 MOVEMENT_DA_LIGHT_NODE_CONNECTION_PROTOCOL=https
-MOVEMENT_DA_LIGHT_NODE_CONNECTION_HOSTNAME="movement-celestia-da-light-node.testnet.bardock.movementlabs.xyz" # change this to the hostname of the Movement DA Light Node service on network you are running
+MOVEMENT_DA_LIGHT_NODE_CONNECTION_HOSTNAME="m1-da-light-node.testnet.bardock.movementlabs.xyz" # change this to the hostname of the Movement DA Light Node service on network you are running
 MOVEMENT_DA_LIGHT_NODE_CONNECTION_PORT=443
 # you may need to provide AWS credentials for the Amazon SDK to properly interact with the sync bucket
 # often this will be picked up appropriately if your environment is configured to use AWS
@@ -66,12 +66,15 @@ You should see a `ledger_version` field CLOSE to the other values on the network
 For deployment and advanced usage, we recommend you use our [provided Ansible scripts](../../ansible/follower-node/README.md).
 
 ## Transports
-By default, Movement Full Node Followers will use `http1` for the connection to the light node service. You can toggle `http1` or `http2` with:
+By default, Movement Full Node Followers will use `http2` for the connection to the light node service. You can toggle `http1` or `http2` with:
 
 ```shell
 MOVEMENT_DA_LIGHT_NODE_HTTP1=true
 ```
 
+> [!WARNING]
+> We have found an issue with streams over `http1` that can cause the follower node connection with the light node service to drop unexpectedly. We recommend using `http2` for the time being.
+
 ## Movement Mainnet
 Movement Mainnet Follower operations are the same as other networks. However, we encourage you to also sync the `default_signer_address_whitelist` which is available to reject disallowed signers in the mempool instead of at the DA. 
 
diff --git a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
index 44750a028..30f2d87e3 100644
--- a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
+++ b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
@@ -4,7 +4,6 @@ use clap::Parser;
 use movement_da_light_node_client::MovementDaLightNodeClient;
 use movement_da_light_node_proto::{blob_response, StreamReadFromHeightRequest};
 use tokio_stream::StreamExt;
-use tracing::info;
 
 #[derive(Debug, Parser, Clone)]
 #[clap(rename_all = "kebab-case", about = "Streams the DA blocks")]
@@ -19,7 +18,8 @@ impl StreamBlocks {
 	pub async fn execute(&self) -> Result<(), anyhow::Error> {
 		// Get the config
 
-		let mut client = MovementDaLightNodeClient::try_http1(self.light_node_url.as_str())
+		let mut client = MovementDaLightNodeClient::try_http2(self.light_node_url.as_str())
+			.await
 			.context("Failed to connect to light node")?;
 
 		let mut blocks_from_da = client
@@ -27,7 +27,7 @@ impl StreamBlocks {
 			.await
 			.context("Failed to stream blocks from DA")?;
 
-		info!("streaming blocks from DA");
+		println!("streaming blocks from DA");
 
 		while let Some(block_res) = blocks_from_da.next().await {
 			let response = block_res.context("Failed to get block")?;
@@ -40,14 +40,17 @@ impl StreamBlocks {
 				blob_response::BlobType::SequencedBlobBlock(blob) => {
 					(blob.data, blob.timestamp, blob.blob_id, blob.height)
 				}
+				blob_response::BlobType::PassedThroughBlob(blob) => {
+					(blob.data, blob.timestamp, blob.blob_id, blob.height)
+				}
 				_ => {
-					anyhow::bail!("Invalid blob type in response")
+					return Err(anyhow::anyhow!("Unknown blob type"));
 				}
 			};
-			info!("{} {}  {}", hex::encode(block_id), block_timestamp, da_height);
+			println!("{} {}  {}", hex::encode(block_id), block_timestamp, da_height);
 		}
 
-		info!("Finished streaming blocks from DA");
+		println!("Finished streaming blocks from DA");
 
 		Ok(())
 	}
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 92699afb2..876fe479b 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -87,10 +87,11 @@ pub trait DaOperations: Send + Sync {
 		start_height: u64,
 		end_height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<DaBlobStream, DaError>> + Send + '_>> {
-		info!("streaming IR blobs between heights {} and {}", start_height, end_height);
+		info!("streaming da blobs between heights {} and {}", start_height, end_height);
 		let fut = async move {
 			let stream = try_stream! {
 				for height in start_height..end_height {
+					info!("getting blobs at height {}", height);
 					let blobs = self.get_da_blobs_at_height_for_stream(height).await?;
 					for blob in blobs {
 						yield (DaHeight(height), blob);
diff --git a/protocol-units/da/movement/protocol/util/src/config/common.rs b/protocol-units/da/movement/protocol/util/src/config/common.rs
index 17cf5acfb..8ec56bbc5 100644
--- a/protocol-units/da/movement/protocol/util/src/config/common.rs
+++ b/protocol-units/da/movement/protocol/util/src/config/common.rs
@@ -156,4 +156,4 @@ pub fn default_celestia_bridge_replace_args() -> Vec<String> {
 env_default!(default_da_light_node_is_initial, "MOVEMENT_DA_LIGHT_NODE_IS_INITIAL", bool, true);
 
 // Whether to use http1 for Movement Light Node Connections
-env_default!(default_movement_da_light_node_http1, "MOVEMENT_DA_LIGHT_NODE_HTTP1", bool, true);
+env_default!(default_movement_da_light_node_http1, "MOVEMENT_DA_LIGHT_NODE_HTTP1", bool, false);

From 09f5c5ef472840c160e0e98ec8e73f7b7f46c17e Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 14:56:58 -0800
Subject: [PATCH 29/43] fix: update default to http2.

---
 docker/compose/movement-full-node/docker-compose.follower.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/docker/compose/movement-full-node/docker-compose.follower.yml b/docker/compose/movement-full-node/docker-compose.follower.yml
index 9f71c34d8..2293198ef 100644
--- a/docker/compose/movement-full-node/docker-compose.follower.yml
+++ b/docker/compose/movement-full-node/docker-compose.follower.yml
@@ -14,6 +14,7 @@ services:
       AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
       MOVEMENT_SYNC: ${MOVEMENT_SYNC:?MOVEMENT_SYNC is not set}
       MAYBE_RUN_LOCAL: "false"
+      MOVEMENT_DA_LIGHT_NODE_HTTP1: ${MOVEMENT_DA_LIGHT_NODE_HTTP1}
       RUST_LOG: info,aws_sdk_s3=debug
     volumes:
       - ${DOT_MOVEMENT_PATH}:/.movement

From 0be83130453ef79ac95ba11d98420513386a28a5 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 17:56:04 -0800
Subject: [PATCH 30/43] fix: http2 support first class; http1 deprecated.

---
 Cargo.lock                                    | 52 ++++++++++++++++---
 docker/build/movement-full-node/Dockerfile    |  2 +
 .../da/movement/protocol/client/Cargo.toml    |  2 +-
 .../da/movement/protocol/client/src/http2.rs  | 17 +++++-
 .../da/movement/protocol/client/src/lib.rs    |  4 ++
 5 files changed, 67 insertions(+), 10 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 589a223af..d3fd9564c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -5182,6 +5182,16 @@ dependencies = [
  "libc",
 ]
 
+[[package]]
+name = "core-foundation"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
 [[package]]
 name = "core-foundation-sys"
 version = "0.8.7"
@@ -10769,7 +10779,7 @@ dependencies = [
  "openssl-probe",
  "openssl-sys",
  "schannel",
- "security-framework",
+ "security-framework 2.11.1",
  "security-framework-sys",
  "tempfile",
 ]
@@ -13363,7 +13373,7 @@ dependencies = [
  "openssl-probe",
  "rustls-pemfile 1.0.4",
  "schannel",
- "security-framework",
+ "security-framework 2.11.1",
 ]
 
 [[package]]
@@ -13376,7 +13386,19 @@ dependencies = [
  "rustls-pemfile 2.2.0",
  "rustls-pki-types",
  "schannel",
- "security-framework",
+ "security-framework 2.11.1",
+]
+
+[[package]]
+name = "rustls-native-certs"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3"
+dependencies = [
+ "openssl-probe",
+ "rustls-pki-types",
+ "schannel",
+ "security-framework 3.0.1",
 ]
 
 [[package]]
@@ -13412,7 +13434,7 @@ version = "0.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490"
 dependencies = [
- "core-foundation",
+ "core-foundation 0.9.4",
  "core-foundation-sys",
  "jni",
  "log",
@@ -13421,7 +13443,7 @@ dependencies = [
  "rustls-native-certs 0.7.3",
  "rustls-platform-verifier-android",
  "rustls-webpki 0.102.8",
- "security-framework",
+ "security-framework 2.11.1",
  "security-framework-sys",
  "webpki-roots 0.26.6",
  "winapi 0.3.9",
@@ -13605,13 +13627,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
 dependencies = [
  "bitflags 2.6.0",
- "core-foundation",
+ "core-foundation 0.9.4",
  "core-foundation-sys",
  "libc",
  "num-bigint 0.4.6",
  "security-framework-sys",
 ]
 
+[[package]]
+name = "security-framework"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8"
+dependencies = [
+ "bitflags 2.6.0",
+ "core-foundation 0.10.0",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
 [[package]]
 name = "security-framework-sys"
 version = "2.12.1"
@@ -14634,7 +14669,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
 dependencies = [
  "bitflags 1.3.2",
- "core-foundation",
+ "core-foundation 0.9.4",
  "system-configuration-sys 0.5.0",
 ]
 
@@ -14645,7 +14680,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
 dependencies = [
  "bitflags 2.6.0",
- "core-foundation",
+ "core-foundation 0.9.4",
  "system-configuration-sys 0.6.0",
 ]
 
@@ -15268,6 +15303,7 @@ dependencies = [
  "percent-encoding",
  "pin-project 1.1.7",
  "prost 0.13.3",
+ "rustls-native-certs 0.8.1",
  "rustls-pemfile 2.2.0",
  "socket2 0.5.7",
  "tokio",
diff --git a/docker/build/movement-full-node/Dockerfile b/docker/build/movement-full-node/Dockerfile
index 59c794160..a2a052f08 100644
--- a/docker/build/movement-full-node/Dockerfile
+++ b/docker/build/movement-full-node/Dockerfile
@@ -21,6 +21,8 @@ FROM alpine:latest
 COPY --from=builder /tmp/build/target/release/movement-full-node /app/movement-full-node
 COPY --from=builder /tmp/runtime/nix/store /nix/store
 
+RUN apk add --upgrade --no-cache ca-certificates && update-ca-certificates
+
 # Set the binary as the entrypoint
 ENTRYPOINT ["/app/movement-full-node"]
 
diff --git a/protocol-units/da/movement/protocol/client/Cargo.toml b/protocol-units/da/movement/protocol/client/Cargo.toml
index 62acd8002..fcf7b44ee 100644
--- a/protocol-units/da/movement/protocol/client/Cargo.toml
+++ b/protocol-units/da/movement/protocol/client/Cargo.toml
@@ -13,7 +13,7 @@ rust-version = { workspace = true }
 
 [dependencies]
 movement-da-light-node-proto = { workspace = true, features = ["client"] }
-tonic = { workspace = true}
+tonic = { workspace = true, features = ["tls", "tls-roots"]}
 tonic-web = { workspace = true }
 hyper-util = { workspace = true }
 tower = { workspace = true }
diff --git a/protocol-units/da/movement/protocol/client/src/http2.rs b/protocol-units/da/movement/protocol/client/src/http2.rs
index ae4ca06ac..4df0ec8ca 100644
--- a/protocol-units/da/movement/protocol/client/src/http2.rs
+++ b/protocol-units/da/movement/protocol/client/src/http2.rs
@@ -1,4 +1,6 @@
 use movement_da_light_node_proto::light_node_service_client::LightNodeServiceClient;
+use std::time::Duration;
+use tonic::transport::{Channel, ClientTlsConfig};
 
 #[derive(Debug, Clone)]
 pub struct Http2 {
@@ -8,7 +10,20 @@ pub struct Http2 {
 impl Http2 {
 	/// Connects to a light node service using the given connection string.
 	pub async fn connect(connection_string: &str) -> Result<Self, anyhow::Error> {
-		let client = LightNodeServiceClient::connect(connection_string.to_string()).await?;
+		let endpoint = Channel::from_shared(connection_string.to_string())?;
+
+		// Dynamically configure TLS based on the scheme (http or https)
+		let endpoint = if connection_string.starts_with("https://") {
+			endpoint
+				.tls_config(ClientTlsConfig::new().with_native_roots())?
+				.http2_keep_alive_interval(Duration::from_secs(10))
+		} else {
+			endpoint
+		};
+
+		let channel = endpoint.connect().await?;
+		let client = LightNodeServiceClient::new(channel);
+
 		Ok(Http2 { client })
 	}
 
diff --git a/protocol-units/da/movement/protocol/client/src/lib.rs b/protocol-units/da/movement/protocol/client/src/lib.rs
index a52577be2..10a4efad4 100644
--- a/protocol-units/da/movement/protocol/client/src/lib.rs
+++ b/protocol-units/da/movement/protocol/client/src/lib.rs
@@ -14,6 +14,10 @@ pub enum MovementDaLightNodeClient {
 
 impl MovementDaLightNodeClient {
 	/// Creates an http1 connection to the light node service.
+	#[deprecated(
+		since = "0.1.0",
+		note = "Please use `try_http2` instead. This function will remain deprecated until [tonic_web] stream bugs are fixed."
+	)]
 	pub fn try_http1(connection_string: &str) -> Result<Self, anyhow::Error> {
 		Ok(Self::Http1(http1::Http1::try_new(connection_string)?))
 	}

From 39b8755b0e538d7b0aecfb1d1ebca9b5268529a0 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 22:47:54 -0800
Subject: [PATCH 31/43] feat: switch to webpki.

---
 Cargo.lock                                    | 53 ++++---------------
 docker/build/movement-full-node/Dockerfile    |  2 -
 .../movement/movement-full-node/Cargo.toml    |  1 +
 .../src/da/stream_blocks/mod.rs               | 13 ++++-
 .../movement-full-node/src/node/partial.rs    | 44 +++++++--------
 .../src/node/tasks/execute_settle.rs          | 10 ++--
 .../da/movement/protocol/client/Cargo.toml    |  2 +-
 .../da/movement/protocol/client/src/http2.rs  |  2 +-
 8 files changed, 50 insertions(+), 77 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index d3fd9564c..aed0551dd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -5182,16 +5182,6 @@ dependencies = [
  "libc",
 ]
 
-[[package]]
-name = "core-foundation"
-version = "0.10.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63"
-dependencies = [
- "core-foundation-sys",
- "libc",
-]
-
 [[package]]
 name = "core-foundation-sys"
 version = "0.8.7"
@@ -10498,6 +10488,7 @@ version = "0.0.2"
 dependencies = [
  "anyhow",
  "bcs 0.1.4",
+ "chrono",
  "clap 4.5.21",
  "console-subscriber",
  "dot-movement",
@@ -10779,7 +10770,7 @@ dependencies = [
  "openssl-probe",
  "openssl-sys",
  "schannel",
- "security-framework 2.11.1",
+ "security-framework",
  "security-framework-sys",
  "tempfile",
 ]
@@ -13373,7 +13364,7 @@ dependencies = [
  "openssl-probe",
  "rustls-pemfile 1.0.4",
  "schannel",
- "security-framework 2.11.1",
+ "security-framework",
 ]
 
 [[package]]
@@ -13386,19 +13377,7 @@ dependencies = [
  "rustls-pemfile 2.2.0",
  "rustls-pki-types",
  "schannel",
- "security-framework 2.11.1",
-]
-
-[[package]]
-name = "rustls-native-certs"
-version = "0.8.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3"
-dependencies = [
- "openssl-probe",
- "rustls-pki-types",
- "schannel",
- "security-framework 3.0.1",
+ "security-framework",
 ]
 
 [[package]]
@@ -13434,7 +13413,7 @@ version = "0.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490"
 dependencies = [
- "core-foundation 0.9.4",
+ "core-foundation",
  "core-foundation-sys",
  "jni",
  "log",
@@ -13443,7 +13422,7 @@ dependencies = [
  "rustls-native-certs 0.7.3",
  "rustls-platform-verifier-android",
  "rustls-webpki 0.102.8",
- "security-framework 2.11.1",
+ "security-framework",
  "security-framework-sys",
  "webpki-roots 0.26.6",
  "winapi 0.3.9",
@@ -13627,26 +13606,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
 dependencies = [
  "bitflags 2.6.0",
- "core-foundation 0.9.4",
+ "core-foundation",
  "core-foundation-sys",
  "libc",
  "num-bigint 0.4.6",
  "security-framework-sys",
 ]
 
-[[package]]
-name = "security-framework"
-version = "3.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8"
-dependencies = [
- "bitflags 2.6.0",
- "core-foundation 0.10.0",
- "core-foundation-sys",
- "libc",
- "security-framework-sys",
-]
-
 [[package]]
 name = "security-framework-sys"
 version = "2.12.1"
@@ -14669,7 +14635,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7"
 dependencies = [
  "bitflags 1.3.2",
- "core-foundation 0.9.4",
+ "core-foundation",
  "system-configuration-sys 0.5.0",
 ]
 
@@ -14680,7 +14646,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
 dependencies = [
  "bitflags 2.6.0",
- "core-foundation 0.9.4",
+ "core-foundation",
  "system-configuration-sys 0.6.0",
 ]
 
@@ -15303,7 +15269,6 @@ dependencies = [
  "percent-encoding",
  "pin-project 1.1.7",
  "prost 0.13.3",
- "rustls-native-certs 0.8.1",
  "rustls-pemfile 2.2.0",
  "socket2 0.5.7",
  "tokio",
diff --git a/docker/build/movement-full-node/Dockerfile b/docker/build/movement-full-node/Dockerfile
index a2a052f08..59c794160 100644
--- a/docker/build/movement-full-node/Dockerfile
+++ b/docker/build/movement-full-node/Dockerfile
@@ -21,8 +21,6 @@ FROM alpine:latest
 COPY --from=builder /tmp/build/target/release/movement-full-node /app/movement-full-node
 COPY --from=builder /tmp/runtime/nix/store /nix/store
 
-RUN apk add --upgrade --no-cache ca-certificates && update-ca-certificates
-
 # Set the binary as the entrypoint
 ENTRYPOINT ["/app/movement-full-node"]
 
diff --git a/networks/movement/movement-full-node/Cargo.toml b/networks/movement/movement-full-node/Cargo.toml
index daac9aa5d..8b895d1bd 100644
--- a/networks/movement/movement-full-node/Cargo.toml
+++ b/networks/movement/movement-full-node/Cargo.toml
@@ -42,6 +42,7 @@ hex = { workspace = true }
 mcr-settlement-config = { workspace = true }
 clap = { workspace =  true }
 movement-da-light-node-client = { workspace = true}
+chrono = { workspace = true }
 
 [features]
 default = []
diff --git a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
index 30f2d87e3..340397f87 100644
--- a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
+++ b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
@@ -27,7 +27,7 @@ impl StreamBlocks {
 			.await
 			.context("Failed to stream blocks from DA")?;
 
-		println!("streaming blocks from DA");
+		println!("Streaming blocks from DA");
 
 		while let Some(block_res) = blocks_from_da.next().await {
 			let response = block_res.context("Failed to get block")?;
@@ -47,7 +47,16 @@ impl StreamBlocks {
 					return Err(anyhow::anyhow!("Unknown blob type"));
 				}
 			};
-			println!("{} {}  {}", hex::encode(block_id), block_timestamp, da_height);
+
+			// pretty print (with labels) the block_id, block_timestamp, and da_height
+			println!(
+				"Block ID: {}, Block Timestamp: {}, DA Height: {}",
+				hex::encode(block_id),
+				// unix date string from the block timestamp which is in microseconds
+				chrono::DateTime::from_timestamp((block_timestamp / 1_000_000) as i64, 0)
+					.context("Failed to convert timestamp to date")?,
+				da_height
+			);
 		}
 
 		println!("Finished streaming blocks from DA");
diff --git a/networks/movement/movement-full-node/src/node/partial.rs b/networks/movement/movement-full-node/src/node/partial.rs
index ceab1d07b..c5120fd99 100644
--- a/networks/movement/movement-full-node/src/node/partial.rs
+++ b/networks/movement/movement-full-node/src/node/partial.rs
@@ -11,7 +11,7 @@ use movement_rest::MovementRest;
 use anyhow::Context;
 use tokio::sync::mpsc;
 use tokio::try_join;
-use tracing::debug;
+use tracing::{debug, info};
 
 pub struct MovementPartialNode<T> {
 	executor: T,
@@ -114,30 +114,26 @@ impl MovementPartialNode<Executor> {
 			.celestia_da_light_node_config
 			.movement_da_light_node_http1()
 		{
-			debug!("Creating the http1 client");
-			MovementDaLightNodeClient::try_http1(
-				format!(
-					"{}://{}:{}",
-					light_node_connection_protocol,
-					light_node_connection_hostname,
-					light_node_connection_port
-				)
-				.as_str(),
-			)
-			.context("Failed to connect to light node")?
+			let connection_string = format!(
+				"{}://{}:{}",
+				light_node_connection_protocol,
+				light_node_connection_hostname,
+				light_node_connection_port
+			);
+			info!("Creating the http1 client {}", connection_string);
+			MovementDaLightNodeClient::try_http1(connection_string.as_str())
+				.context("Failed to connect to light node")?
 		} else {
-			debug!("Creating the http2 client");
-			MovementDaLightNodeClient::try_http2(
-				format!(
-					"{}://{}:{}",
-					light_node_connection_protocol,
-					light_node_connection_hostname,
-					light_node_connection_port
-				)
-				.as_str(),
-			)
-			.await
-			.context("Failed to connect to light node")?
+			let connection_string = format!(
+				"{}://{}:{}",
+				light_node_connection_protocol,
+				light_node_connection_hostname,
+				light_node_connection_port
+			);
+			info!("Creating the http2 client {}", connection_string);
+			MovementDaLightNodeClient::try_http2(connection_string.as_str())
+				.await
+				.context("Failed to connect to light node")?
 		};
 
 		debug!("Creating the executor");
diff --git a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
index 974a82d39..c7d2dd1c5 100644
--- a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
+++ b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
@@ -73,18 +73,22 @@ where
 		let mut blocks_from_da = self
 			.da_light_node_client
 			.stream_read_from_height(StreamReadFromHeightRequest { height: synced_height })
-			.await?;
+			.await
+			.map_err(|e| {
+				error!("Failed to stream blocks from DA: {:?}", e);
+				e
+			})?;
 
 		loop {
 			select! {
 				Some(res) = blocks_from_da.next() => {
 					let response = res.context("failed to get next block from DA")?;
-					debug!("Received block from DA");
+					info!("Received block from DA");
 					self.process_block_from_da(response).await?;
 				}
 				Some(res) = self.commitment_events.next() => {
 					let event = res.context("failed to get commitment event")?;
-					debug!("Received commitment event");
+					info!("Received commitment event");
 					self.process_commitment_event(event).await?;
 				}
 				else => break,
diff --git a/protocol-units/da/movement/protocol/client/Cargo.toml b/protocol-units/da/movement/protocol/client/Cargo.toml
index fcf7b44ee..72a12fdaf 100644
--- a/protocol-units/da/movement/protocol/client/Cargo.toml
+++ b/protocol-units/da/movement/protocol/client/Cargo.toml
@@ -13,7 +13,7 @@ rust-version = { workspace = true }
 
 [dependencies]
 movement-da-light-node-proto = { workspace = true, features = ["client"] }
-tonic = { workspace = true, features = ["tls", "tls-roots"]}
+tonic = { workspace = true, features = ["tls", "tls-webpki-roots"]}
 tonic-web = { workspace = true }
 hyper-util = { workspace = true }
 tower = { workspace = true }
diff --git a/protocol-units/da/movement/protocol/client/src/http2.rs b/protocol-units/da/movement/protocol/client/src/http2.rs
index 4df0ec8ca..2d76fd7c0 100644
--- a/protocol-units/da/movement/protocol/client/src/http2.rs
+++ b/protocol-units/da/movement/protocol/client/src/http2.rs
@@ -15,7 +15,7 @@ impl Http2 {
 		// Dynamically configure TLS based on the scheme (http or https)
 		let endpoint = if connection_string.starts_with("https://") {
 			endpoint
-				.tls_config(ClientTlsConfig::new().with_native_roots())?
+				.tls_config(ClientTlsConfig::new().with_enabled_roots())?
 				.http2_keep_alive_interval(Duration::from_secs(10))
 		} else {
 			endpoint

From 5ea357bbc01ff1b4b79b3ed09b2113565c11ca5b Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Wed, 15 Jan 2025 23:47:29 -0800
Subject: [PATCH 32/43] fix: connection protocol not passed through.

---
 .../movement/protocol/util/src/config/local/da_light_node.rs | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs b/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
index d996b338f..e1555797a 100644
--- a/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
+++ b/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
@@ -2,7 +2,8 @@ use crate::config::common::{
 	default_celestia_rpc_connection_hostname, default_celestia_rpc_connection_port,
 	default_celestia_rpc_connection_protocol, default_celestia_websocket_connection_hostname,
 	default_celestia_websocket_connection_port, default_movement_da_light_node_connection_hostname,
-	default_movement_da_light_node_connection_port, default_movement_da_light_node_http1,
+	default_movement_da_light_node_connection_port,
+	default_movement_da_light_node_connection_protocol, default_movement_da_light_node_http1,
 	default_movement_da_light_node_listen_hostname, default_movement_da_light_node_listen_port,
 };
 use ecdsa::SigningKey;
@@ -122,7 +123,7 @@ pub struct Config {
 	pub movement_da_light_node_listen_port: u16,
 
 	/// The protocol for movement-celestia-da-light-node connection
-	#[serde(default = "default_celestia_rpc_connection_protocol")]
+	#[serde(default = "default_movement_da_light_node_connection_protocol")]
 	pub movement_da_light_node_connection_protocol: String,
 
 	/// The hostname for movement-celestia-da-light-node connection

From 922db0725d028a9fd9a6a855b8ea3d68c5918e3a Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Thu, 16 Jan 2025 00:39:22 -0800
Subject: [PATCH 33/43] fix: connection protocol.

---
 .../da/movement/protocol/util/src/config/local/da_light_node.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs b/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
index e1555797a..9643556b7 100644
--- a/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
+++ b/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
@@ -147,7 +147,7 @@ impl Default for Config {
 	fn default() -> Self {
 		Self {
 			movement_da_light_node_connection_protocol: default_celestia_rpc_connection_protocol(),
-			celestia_rpc_connection_protocol: default_celestia_rpc_connection_protocol(),
+			celestia_rpc_connection_protocol: default_movement_da_light_node_connection_protocol(),
 			celestia_rpc_connection_hostname: default_celestia_rpc_connection_hostname(),
 			celestia_rpc_connection_port: default_celestia_rpc_connection_port(),
 			celestia_websocket_connection_hostname: default_celestia_websocket_connection_hostname(

From 1913e58f91432bbd9aa9400309379c50ecc9e3a0 Mon Sep 17 00:00:00 2001
From: Liam Monninger <l.mak.monninger@gmail.com>
Date: Thu, 16 Jan 2025 01:29:08 -0800
Subject: [PATCH 34/43] fix: connection protocol.

---
 .../movement/protocol/util/src/config/local/da_light_node.rs | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs b/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
index 9643556b7..2a694b4bd 100644
--- a/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
+++ b/protocol-units/da/movement/protocol/util/src/config/local/da_light_node.rs
@@ -146,8 +146,9 @@ pub struct Config {
 impl Default for Config {
 	fn default() -> Self {
 		Self {
-			movement_da_light_node_connection_protocol: default_celestia_rpc_connection_protocol(),
-			celestia_rpc_connection_protocol: default_movement_da_light_node_connection_protocol(),
+			movement_da_light_node_connection_protocol:
+				default_movement_da_light_node_connection_protocol(),
+			celestia_rpc_connection_protocol: default_celestia_rpc_connection_protocol(),
 			celestia_rpc_connection_hostname: default_celestia_rpc_connection_hostname(),
 			celestia_rpc_connection_port: default_celestia_rpc_connection_port(),
 			celestia_websocket_connection_hostname: default_celestia_websocket_connection_hostname(

From 69f4d9a6f3aa148820de6e9d83b921e4da22f57a Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Fri, 17 Jan 2025 16:48:52 +0100
Subject: [PATCH 35/43] add heartbeat blob on grpc connection

---
 .../src/da/stream_blocks/mod.rs               |  8 +++++--
 .../src/node/tasks/execute_settle.rs          |  4 ++++
 .../process-compose.test-followers.yml        |  9 ++++++++
 .../da/light_node/v1beta1.proto               |  1 +
 .../da/movement/protocol/da/src/lib.rs        | 22 ++++++++++++++++---
 .../da/movement/protocol/da/src/mock/mod.rs   |  1 +
 .../protocol/light-node/src/passthrough.rs    | 10 +++++++--
 .../protocol/light-node/src/sequencer.rs      |  1 +
 .../protocol/util/src/blob/ir/blob.rs         | 14 ++++++++++++
 9 files changed, 63 insertions(+), 7 deletions(-)

diff --git a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
index 340397f87..ee0c6723d 100644
--- a/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
+++ b/networks/movement/movement-full-node/src/da/stream_blocks/mod.rs
@@ -27,7 +27,7 @@ impl StreamBlocks {
 			.await
 			.context("Failed to stream blocks from DA")?;
 
-		println!("Streaming blocks from DA");
+		tracing::info!("Streaming blocks from DA");
 
 		while let Some(block_res) = blocks_from_da.next().await {
 			let response = block_res.context("Failed to get block")?;
@@ -43,13 +43,17 @@ impl StreamBlocks {
 				blob_response::BlobType::PassedThroughBlob(blob) => {
 					(blob.data, blob.timestamp, blob.blob_id, blob.height)
 				}
+				blob_response::BlobType::HeartbeatBlob(_) => {
+					tracing::info!("Receive heartbeat blob");
+					continue;
+				}
 				_ => {
 					return Err(anyhow::anyhow!("Unknown blob type"));
 				}
 			};
 
 			// pretty print (with labels) the block_id, block_timestamp, and da_height
-			println!(
+			tracing::info!(
 				"Block ID: {}, Block Timestamp: {}, DA Height: {}",
 				hex::encode(block_id),
 				// unix date string from the block timestamp which is in microseconds
diff --git a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
index c7d2dd1c5..a42cf37c0 100644
--- a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
+++ b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
@@ -116,6 +116,10 @@ where
 			blob_response::BlobType::PassedThroughBlob(blob) => {
 				(blob.data, blob.timestamp, blob.blob_id, blob.height)
 			}
+			blob_response::BlobType::HeartbeatBlob(_) => {
+				// Do nothing.
+				return Ok(());
+			}
 			_ => anyhow::bail!("Invalid blob type"),
 		};
 
diff --git a/process-compose/movement-full-node/process-compose.test-followers.yml b/process-compose/movement-full-node/process-compose.test-followers.yml
index 89352ae81..f081d8969 100644
--- a/process-compose/movement-full-node/process-compose.test-followers.yml
+++ b/process-compose/movement-full-node/process-compose.test-followers.yml
@@ -3,6 +3,15 @@ version: "3"
 processes:
 
   setup:
+    environment:
+      - "ETH_RPC_CONNECTION_PROTOCOL=http"
+      - "ETH_RPC_CONNECTION_HOSTNAME=0.0.0.0"
+      - "ETH_RPC_CONNECTION_PORT=8090"
+      - "ETH_WS_CONNECTION_PROTOCOL=ws"
+      - "ETH_WS_CONNECTION_HOSTNAME=0.0.0.0"
+      - "ETH_WS_CONNECTION_PORT=8090"
+      - "MAYBE_RUN_LOCAL=true"
+      - "MOVEMENT_DA_LIGHT_NODE_HTTP1=true"
     command: |
       export AWS_REGION=us-west-2
       export MOVEMENT_SYNC="leader::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
diff --git a/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto b/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto
index 1925dd20c..8d79c5a9e 100644
--- a/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto
+++ b/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto
@@ -17,6 +17,7 @@ message BlobResponse {
       Blob passed_through_blob = 1;
       Blob sequenced_blob_intent = 2;
       Blob sequenced_blob_block = 3;
+      Blob heartbeat_blob = 4;
     }
 }
 
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 876fe479b..89289ea04 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -130,6 +130,22 @@ pub trait DaOperations: Send + Sync {
 
 							last_height = height;
 						}
+						// Already executed Height are use to send Heartbeat.
+						Ok(Certificate::Height(height)) => {
+							//old certificate, use to send Heartbeat block.
+							let blob_stream = self
+								.stream_da_blobs_between_heights(height, height)
+								.await?;
+							tokio::pin!(blob_stream);
+
+							while let Some(blob_res) = blob_stream.next().await {
+								let (_, blob) = blob_res?;
+								// Ack use heigth zero to identify heart beat block.
+								// Should be changed to a type.
+								let heart_blob = (DaHeight(0u64), blob);
+								yield heart_blob;
+							}
+						}
 						Ok(Certificate::Nolo) => {
 							// Ignore Nolo
 						}
@@ -142,9 +158,9 @@ pub trait DaOperations: Send + Sync {
 							yield Err(e)?;
 						}
 						// If height is less than last height, ignore
-						_ => {
-							warn!("ignoring certificate");
-						}
+						// _ => {
+						// 	warn!("ignoring certificate");
+						// }
 					}
 				}
 			};
diff --git a/protocol-units/da/movement/protocol/da/src/mock/mod.rs b/protocol-units/da/movement/protocol/da/src/mock/mod.rs
index e4b83d322..89857edb7 100644
--- a/protocol-units/da/movement/protocol/da/src/mock/mod.rs
+++ b/protocol-units/da/movement/protocol/da/src/mock/mod.rs
@@ -210,6 +210,7 @@ pub mod test {
 			vec![
 				Ok(1),                                          // First certificate
 				Err("internal error: fatal error".to_string()), // Fatal error
+				Ok(2),
 			]
 		);
 
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 6b555a2c3..a7cdd570c 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -133,8 +133,14 @@ where
 
 			while let Some(blob) = blob_stream.next().await {
 				let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
-				let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
-				let blob = verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?;
+				let blob = if height.as_u64() == 0 {
+					//Heart beat blob
+					// No need to verify the data are removed.
+					da_blob.to_blob_heartbeat_response()
+				} else {
+					let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
+					verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?
+				};
 				let response = StreamReadFromHeightResponse {
 					blob: Some(blob)
 				};
diff --git a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
index 9e0b4b940..0a681ad80 100644
--- a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
@@ -279,6 +279,7 @@ where
 		let sequenced_block = match blob_type {
 			BlobType::PassedThroughBlob(blob) => BlobType::SequencedBlobBlock(blob),
 			BlobType::SequencedBlobBlock(blob) => BlobType::SequencedBlobBlock(blob),
+			BlobType::HeartbeatBlob(blob) => BlobType::HeartbeatBlob(blob),
 			_ => {
 				anyhow::bail!("Invalid blob type")
 			}
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
index 0c29f362f..97bce0f6e 100644
--- a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
@@ -141,6 +141,20 @@ impl DaBlob {
 		Ok(BlobResponse { blob_type: Some(blob_response::BlobType::SequencedBlobBlock(blob)) })
 	}
 
+	/// Converts a [DaBlob] into a [BlobResponse] with the blob passed through.
+	pub fn to_blob_heartbeat_response(self) -> BlobResponse {
+		//for heartbeat blob the data are removed.
+		let blob = Blob {
+			data: vec![],
+			signature: self.signature().to_vec(),
+			timestamp: self.timestamp(),
+			signer: self.signer().to_vec(),
+			blob_id: self.id().to_vec(),
+			height: 0,
+		};
+		BlobResponse { blob_type: Some(blob_response::BlobType::HeartbeatBlob(blob)) }
+	}
+
 	/// Converts a [DaBlob] into a [BlobResponse] with the blob passed through.
 	pub fn to_blob_passed_through_read_response(
 		self,

From acfd786fec22883279598b39c45dbed7b16b1925 Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Mon, 20 Jan 2025 11:55:45 +0100
Subject: [PATCH 36/43] create v1beta2.proto and set heatbeat with a bool

---
 Cargo.lock                                    |  1 +
 .../movement-client/bin/basic_alice_bob.rs    |  2 +-
 .../da/light_node/v1beta1.proto               |  1 -
 .../da/light_node/v1beta2.proto               | 99 +++++++++++++++++++
 .../protocol/light-node/src/passthrough.rs    |  9 +-
 .../protocol/light-node/src/sequencer.rs      |  2 +-
 .../da/movement/protocol/proto/build.rs       |  5 +-
 .../da/movement/protocol/proto/src/lib.rs     | 15 ++-
 .../protocol/util/src/blob/ir/blob.rs         | 14 ---
 9 files changed, 122 insertions(+), 26 deletions(-)
 create mode 100644 proto/movementlabs/protocol_units/da/light_node/v1beta2.proto

diff --git a/Cargo.lock b/Cargo.lock
index aed0551dd..eae848d1c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10203,6 +10203,7 @@ dependencies = [
  "commander",
  "dot-movement",
  "futures",
+ "hex",
  "itertools 0.12.1",
  "maptos-execution-util",
  "mcr-settlement-client",
diff --git a/networks/movement/movement-client/bin/basic_alice_bob.rs b/networks/movement/movement-client/bin/basic_alice_bob.rs
index 3c800c6fe..ffa4ec3fd 100644
--- a/networks/movement/movement-client/bin/basic_alice_bob.rs
+++ b/networks/movement/movement-client/bin/basic_alice_bob.rs
@@ -4,9 +4,9 @@ use aptos_sdk::{
 	rest_client::{Client, FaucetClient},
 	types::LocalAccount,
 };
+use movement_client::load_soak_testing::{execute_test, init_test, ExecutionConfig, Scenario};
 use std::str::FromStr;
 use std::sync::Arc;
-use movement_client::load_soak_testing::{execute_test, init_test, ExecutionConfig, Scenario};
 use url::Url;
 
 fn main() {
diff --git a/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto b/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto
index 8d79c5a9e..1925dd20c 100644
--- a/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto
+++ b/proto/movementlabs/protocol_units/da/light_node/v1beta1.proto
@@ -17,7 +17,6 @@ message BlobResponse {
       Blob passed_through_blob = 1;
       Blob sequenced_blob_intent = 2;
       Blob sequenced_blob_block = 3;
-      Blob heartbeat_blob = 4;
     }
 }
 
diff --git a/proto/movementlabs/protocol_units/da/light_node/v1beta2.proto b/proto/movementlabs/protocol_units/da/light_node/v1beta2.proto
new file mode 100644
index 000000000..c1eed543b
--- /dev/null
+++ b/proto/movementlabs/protocol_units/da/light_node/v1beta2.proto
@@ -0,0 +1,99 @@
+syntax = "proto3";
+package movementlabs.protocol_units.da.light_node.v1beta2;
+
+
+// Request and response messages
+message Blob {
+    bytes blob_id = 1;
+    bytes data = 2;
+    uint64 height = 3;
+    bytes signature = 4;
+    uint64 timestamp = 5;
+    bytes signer = 6;
+}
+
+message BlobResponse {
+    oneof blob_type {
+      Blob passed_through_blob = 1;
+      Blob sequenced_blob_intent = 2;
+      Blob sequenced_blob_block = 3;
+      bool heartbeat_blob = 4;
+    }
+}
+
+message BlobWrite {
+    bytes data = 1;
+}
+
+// StreamReadAtHeight
+message StreamReadFromHeightRequest {
+    uint64 height = 1;
+}
+
+message StreamReadFromHeightResponse {
+    BlobResponse blob = 1;
+}
+
+// StreamReadLatest
+message StreamReadLatestRequest {
+    
+}
+
+message StreamReadLatestResponse {
+    BlobResponse blob = 1;
+}
+
+// StreamWriteBlob
+message StreamWriteBlobRequest {
+    BlobWrite blob = 1;
+}
+
+message StreamWriteBlobResponse {
+    BlobResponse blob = 1;
+}
+
+// ReadAtHeight
+message ReadAtHeightRequest {
+    uint64 height = 1;
+}
+  
+message ReadAtHeightResponse {
+    repeated BlobResponse blobs = 1;
+}
+
+// BatchRead
+message BatchReadRequest {
+    repeated uint64 heights = 1;
+}
+  
+message BatchReadResponse {
+    repeated ReadAtHeightResponse responses = 1;
+}
+  
+message BatchWriteRequest {
+    repeated BlobWrite blobs = 1;
+}
+  
+message BatchWriteResponse {
+    repeated BlobResponse blobs = 1;
+}
+  
+
+
+// LightNode service definition
+service LightNodeService {
+  // Stream blobs from a specified height or from the latest height.
+  rpc StreamReadFromHeight (StreamReadFromHeightRequest) returns (stream StreamReadFromHeightResponse);
+  rpc StreamReadLatest (StreamReadLatestRequest) returns (stream StreamReadLatestResponse);
+  
+  // Stream blobs out, either individually or in batches.
+  rpc StreamWriteBlob (stream StreamWriteBlobRequest) returns (stream StreamWriteBlobResponse);
+  
+  // Read blobs at a specified height.
+  rpc ReadAtHeight (ReadAtHeightRequest) returns (ReadAtHeightResponse);
+  
+  // Batch read and write operations for efficiency.
+  rpc BatchRead (BatchReadRequest) returns (BatchReadResponse);
+  rpc BatchWrite (BatchWriteRequest) returns (BatchWriteResponse);
+  
+}
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index a7cdd570c..5798a7406 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -133,16 +133,15 @@ where
 
 			while let Some(blob) = blob_stream.next().await {
 				let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
-				let blob = if height.as_u64() == 0 {
-					//Heart beat blob
-					// No need to verify the data are removed.
-					da_blob.to_blob_heartbeat_response()
+				let response_content = if height.as_u64() == 0 {
+					//Heart beat. The value can be use to indicate some status.
+					BlobResponse { blob_type: Some(movement_da_light_node_proto::blob_response::BlobType::HeartbeatBlob(true)) }
 				} else {
 					let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
 					verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?
 				};
 				let response = StreamReadFromHeightResponse {
-					blob: Some(blob)
+					blob: Some(response_content)
 				};
 				yield response;
 			}
diff --git a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
index 0a681ad80..e21b6a7b2 100644
--- a/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/sequencer.rs
@@ -279,7 +279,7 @@ where
 		let sequenced_block = match blob_type {
 			BlobType::PassedThroughBlob(blob) => BlobType::SequencedBlobBlock(blob),
 			BlobType::SequencedBlobBlock(blob) => BlobType::SequencedBlobBlock(blob),
-			BlobType::HeartbeatBlob(blob) => BlobType::HeartbeatBlob(blob),
+			BlobType::HeartbeatBlob(val) => BlobType::HeartbeatBlob(val),
 			_ => {
 				anyhow::bail!("Invalid blob type")
 			}
diff --git a/protocol-units/da/movement/protocol/proto/build.rs b/protocol-units/da/movement/protocol/proto/build.rs
index 0631f8820..ba94fa22e 100644
--- a/protocol-units/da/movement/protocol/proto/build.rs
+++ b/protocol-units/da/movement/protocol/proto/build.rs
@@ -1 +1,4 @@
-buildtime::proto_build_main!("movementlabs/protocol_units/da/light_node/v1beta1.proto");
+buildtime::proto_build_main!(
+	"movementlabs/protocol_units/da/light_node/v1beta1.proto",
+	"movementlabs/protocol_units/da/light_node/v1beta2.proto"
+);
diff --git a/protocol-units/da/movement/protocol/proto/src/lib.rs b/protocol-units/da/movement/protocol/proto/src/lib.rs
index 4defff541..019dc7457 100644
--- a/protocol-units/da/movement/protocol/proto/src/lib.rs
+++ b/protocol-units/da/movement/protocol/proto/src/lib.rs
@@ -1,8 +1,17 @@
-pub mod v1beta1 {
-	tonic::include_proto!("movementlabs.protocol_units.da.light_node.v1beta1"); // The string specified here
+// pub mod v1beta1 {
+// 	tonic::include_proto!("movementlabs.protocol_units.da.light_node.v1beta1"); // The string specified here
+// 	pub const FILE_DESCRIPTOR_SET: &[u8] =
+// 		tonic::include_file_descriptor_set!("movement-da-light-node-proto-descriptor");
+// }
+
+// Re-export the latest version at the crate root
+//pub use v1beta1::*;
+
+pub mod v1beta2 {
+	tonic::include_proto!("movementlabs.protocol_units.da.light_node.v1beta2"); // The string specified here
 	pub const FILE_DESCRIPTOR_SET: &[u8] =
 		tonic::include_file_descriptor_set!("movement-da-light-node-proto-descriptor");
 }
 
 // Re-export the latest version at the crate root
-pub use v1beta1::*;
+pub use v1beta2::*;
diff --git a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
index 97bce0f6e..0c29f362f 100644
--- a/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
+++ b/protocol-units/da/movement/protocol/util/src/blob/ir/blob.rs
@@ -141,20 +141,6 @@ impl DaBlob {
 		Ok(BlobResponse { blob_type: Some(blob_response::BlobType::SequencedBlobBlock(blob)) })
 	}
 
-	/// Converts a [DaBlob] into a [BlobResponse] with the blob passed through.
-	pub fn to_blob_heartbeat_response(self) -> BlobResponse {
-		//for heartbeat blob the data are removed.
-		let blob = Blob {
-			data: vec![],
-			signature: self.signature().to_vec(),
-			timestamp: self.timestamp(),
-			signer: self.signer().to_vec(),
-			blob_id: self.id().to_vec(),
-			height: 0,
-		};
-		BlobResponse { blob_type: Some(blob_response::BlobType::HeartbeatBlob(blob)) }
-	}
-
 	/// Converts a [DaBlob] into a [BlobResponse] with the blob passed through.
 	pub fn to_blob_passed_through_read_response(
 		self,

From 5f6de960024039a7fe2436fa935096d2c2dca4f3 Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Tue, 21 Jan 2025 18:14:50 +0100
Subject: [PATCH 37/43] add some logs to see heartbeats

---
 Cargo.lock                                                  | 1 -
 .../movement-full-node/src/node/tasks/execute_settle.rs     | 1 +
 protocol-units/da/movement/protocol/da/src/lib.rs           | 6 ++++++
 .../da/movement/protocol/light-node/src/passthrough.rs      | 1 +
 4 files changed, 8 insertions(+), 1 deletion(-)

diff --git a/Cargo.lock b/Cargo.lock
index eae848d1c..aed0551dd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -10203,7 +10203,6 @@ dependencies = [
  "commander",
  "dot-movement",
  "futures",
- "hex",
  "itertools 0.12.1",
  "maptos-execution-util",
  "mcr-settlement-client",
diff --git a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
index a42cf37c0..8f42d9b23 100644
--- a/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
+++ b/networks/movement/movement-full-node/src/node/tasks/execute_settle.rs
@@ -117,6 +117,7 @@ where
 				(blob.data, blob.timestamp, blob.blob_id, blob.height)
 			}
 			blob_response::BlobType::HeartbeatBlob(_) => {
+				tracing::info!("Receive heartbeat blob");
 				// Do nothing.
 				return Ok(());
 			}
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 89289ea04..2e0e7a12a 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -107,6 +107,7 @@ pub trait DaOperations: Send + Sync {
 		&self,
 		start_height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<DaBlobStream, DaError>> + Send + '_>> {
+		tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height start");
 		let fut = async move {
 			let certificate_stream = self.stream_certificates().await?;
 			let stream = try_stream! {
@@ -119,12 +120,14 @@ pub trait DaOperations: Send + Sync {
 
 					match certificate {
 						Ok(Certificate::Height(height)) if height > last_height => {
+							tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got new accepted blob at height:{height}");
 							let blob_stream = self
 								.stream_da_blobs_between_heights(last_height, height)
 								.await?;
 							tokio::pin!(blob_stream);
 
 							while let Some(blob) = blob_stream.next().await {
+								tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height send new accepted blob at height:{height}");
 								yield blob?;
 							}
 
@@ -132,6 +135,7 @@ pub trait DaOperations: Send + Sync {
 						}
 						// Already executed Height are use to send Heartbeat.
 						Ok(Certificate::Height(height)) => {
+							tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got old height:{height}");
 							//old certificate, use to send Heartbeat block.
 							let blob_stream = self
 								.stream_da_blobs_between_heights(height, height)
@@ -143,10 +147,12 @@ pub trait DaOperations: Send + Sync {
 								// Ack use heigth zero to identify heart beat block.
 								// Should be changed to a type.
 								let heart_blob = (DaHeight(0u64), blob);
+								tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got old height:{height} send heartbeat.");
 								yield heart_blob;
 							}
 						}
 						Ok(Certificate::Nolo) => {
+							tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got Certificate::Nolo");
 							// Ignore Nolo
 						}
 						// Warn log non-fatal certificate errors
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 5798a7406..a6f32a6ce 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -143,6 +143,7 @@ where
 				let response = StreamReadFromHeightResponse {
 					blob: Some(response_content)
 				};
+				tracing::info!("TEST passthrough LightNode stream_read_from_height send blob");
 				yield response;
 			}
 

From 94c8e063f2a6f2a8fe8f7cc2f03d2c957be1c75d Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Wed, 22 Jan 2025 10:44:51 +0100
Subject: [PATCH 38/43] use a timer to generate heartbreak

---
 .../da/movement/protocol/da/src/lib.rs        | 95 +++++++++----------
 1 file changed, 46 insertions(+), 49 deletions(-)

diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 2e0e7a12a..93009f0c2 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -4,6 +4,7 @@ use async_stream::try_stream;
 use movement_da_util::blob::ir::blob::DaBlob;
 use std::future::Future;
 use std::pin::Pin;
+use tokio::time::{self, Duration};
 use tokio_stream::{Stream, StreamExt};
 use tracing::{info, warn};
 
@@ -110,63 +111,59 @@ pub trait DaOperations: Send + Sync {
 		tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height start");
 		let fut = async move {
 			let certificate_stream = self.stream_certificates().await?;
+			// Tick interval for generating HeartBeat.
+			let mut tick_interval = time::interval(Duration::from_secs(10));
+
 			let stream = try_stream! {
 				let mut last_height = start_height;
 				let mut certificate_stream = certificate_stream;
 
-				while let Some(certificate) = certificate_stream.next().await {
-
-					info!("certificate: {:?}", certificate);
-
-					match certificate {
-						Ok(Certificate::Height(height)) if height > last_height => {
-							tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got new accepted blob at height:{height}");
-							let blob_stream = self
-								.stream_da_blobs_between_heights(last_height, height)
-								.await?;
-							tokio::pin!(blob_stream);
 
-							while let Some(blob) = blob_stream.next().await {
-								tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height send new accepted blob at height:{height}");
-								yield blob?;
+				loop {
+					tokio::select! {
+						// Yield from the data stream
+						Some(certificate) = certificate_stream.next() => {
+							info!("certificate: {:?}", certificate);
+							 match certificate {
+								Ok(Certificate::Height(height)) if height > last_height => {
+									tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got new accepted blob at height:{height}");
+									let blob_stream = self
+										.stream_da_blobs_between_heights(last_height, height)
+										.await.unwrap(); // TODO remove the unwrap()
+									tokio::pin!(blob_stream);
+
+									while let Some(blob) = blob_stream.next().await {
+										tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height send new accepted blob at height:{height}");
+										yield blob?;
+									}
+
+									last_height = height;
+								}
+								Ok(Certificate::Nolo) => {
+									tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got Certificate::Nolo");
+									// Ignore Nolo
+								}
+								// Warn log non-fatal certificate errors
+								Err(DaError::NonFatalCertificate(e)) => {
+									warn!("non-fatal certificate error: {}", e);
+								}
+								// Exit on all other errors
+								Err(e) => {
+									yield Err(e)?;
+								}
+								// If height is less than last height, ignore
+								_ => {
+									warn!("ignoring certificate");
+								}
 							}
-
-							last_height = height;
 						}
-						// Already executed Height are use to send Heartbeat.
-						Ok(Certificate::Height(height)) => {
-							tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got old height:{height}");
-							//old certificate, use to send Heartbeat block.
-							let blob_stream = self
-								.stream_da_blobs_between_heights(height, height)
-								.await?;
-							tokio::pin!(blob_stream);
-
-							while let Some(blob_res) = blob_stream.next().await {
-								let (_, blob) = blob_res?;
-								// Ack use heigth zero to identify heart beat block.
-								// Should be changed to a type.
-								let heart_blob = (DaHeight(0u64), blob);
-								tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got old height:{height} send heartbeat.");
-								yield heart_blob;
-							}
-						}
-						Ok(Certificate::Nolo) => {
-							tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got Certificate::Nolo");
-							// Ignore Nolo
-						}
-						// Warn log non-fatal certificate errors
-						Err(DaError::NonFatalCertificate(e)) => {
-							warn!("non-fatal certificate error: {}", e);
-						}
-						// Exit on all other errors
-						Err(e) => {
-							yield Err(e)?;
+
+						// Yield the periodic tick
+						_ = tick_interval.tick() => {
+							let heart_blob = (DaHeight(0u64), DaBlob::DigestV1(Vec::new()));
+							tracing::info!("TEST Da lib DaOperations stream_da_blobs_yield heartbeat.");
+							yield heart_blob;
 						}
-						// If height is less than last height, ignore
-						// _ => {
-						// 	warn!("ignoring certificate");
-						// }
 					}
 				}
 			};

From f8594da520203a35e8d98204779fec7824905a1e Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Wed, 22 Jan 2025 13:05:54 +0100
Subject: [PATCH 39/43] move heartbeat in the grpc send side

---
 .../process-compose.test-followers.yml        |  4 +-
 .../da/movement/protocol/da/src/lib.rs        | 74 +++++++------------
 .../protocol/light-node/src/passthrough.rs    | 61 ++++++++++-----
 3 files changed, 74 insertions(+), 65 deletions(-)

diff --git a/process-compose/movement-full-node/process-compose.test-followers.yml b/process-compose/movement-full-node/process-compose.test-followers.yml
index f081d8969..8e0b4033c 100644
--- a/process-compose/movement-full-node/process-compose.test-followers.yml
+++ b/process-compose/movement-full-node/process-compose.test-followers.yml
@@ -11,7 +11,7 @@ processes:
       - "ETH_WS_CONNECTION_HOSTNAME=0.0.0.0"
       - "ETH_WS_CONNECTION_PORT=8090"
       - "MAYBE_RUN_LOCAL=true"
-      - "MOVEMENT_DA_LIGHT_NODE_HTTP1=true"
+      - "MOVEMENT_DA_LIGHT_NODE_HTTP1=false"
     command: |
       export AWS_REGION=us-west-2
       export MOVEMENT_SYNC="leader::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
@@ -35,7 +35,7 @@ processes:
       - "ETH_WS_CONNECTION_HOSTNAME=0.0.0.0"
       - "ETH_WS_CONNECTION_PORT=8090"
       - "MAYBE_RUN_LOCAL=true"
-      - "MOVEMENT_DA_LIGHT_NODE_HTTP1=true"
+      - "MOVEMENT_DA_LIGHT_NODE_HTTP1=false"
     command: |
       sleep 30
       export AWS_REGION=us-west-2
diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index 93009f0c2..ea0003002 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -4,7 +4,6 @@ use async_stream::try_stream;
 use movement_da_util::blob::ir::blob::DaBlob;
 use std::future::Future;
 use std::pin::Pin;
-use tokio::time::{self, Duration};
 use tokio_stream::{Stream, StreamExt};
 use tracing::{info, warn};
 
@@ -111,58 +110,41 @@ pub trait DaOperations: Send + Sync {
 		tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height start");
 		let fut = async move {
 			let certificate_stream = self.stream_certificates().await?;
-			// Tick interval for generating HeartBeat.
-			let mut tick_interval = time::interval(Duration::from_secs(10));
-
 			let stream = try_stream! {
 				let mut last_height = start_height;
 				let mut certificate_stream = certificate_stream;
 
+				while let Some(certificate) = certificate_stream.next().await {
+
+					info!("certificate: {:?}", certificate);
 
-				loop {
-					tokio::select! {
-						// Yield from the data stream
-						Some(certificate) = certificate_stream.next() => {
-							info!("certificate: {:?}", certificate);
-							 match certificate {
-								Ok(Certificate::Height(height)) if height > last_height => {
-									tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got new accepted blob at height:{height}");
-									let blob_stream = self
-										.stream_da_blobs_between_heights(last_height, height)
-										.await.unwrap(); // TODO remove the unwrap()
-									tokio::pin!(blob_stream);
-
-									while let Some(blob) = blob_stream.next().await {
-										tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height send new accepted blob at height:{height}");
-										yield blob?;
-									}
-
-									last_height = height;
-								}
-								Ok(Certificate::Nolo) => {
-									tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height got Certificate::Nolo");
-									// Ignore Nolo
-								}
-								// Warn log non-fatal certificate errors
-								Err(DaError::NonFatalCertificate(e)) => {
-									warn!("non-fatal certificate error: {}", e);
-								}
-								// Exit on all other errors
-								Err(e) => {
-									yield Err(e)?;
-								}
-								// If height is less than last height, ignore
-								_ => {
-									warn!("ignoring certificate");
-								}
+					match certificate {
+						Ok(Certificate::Height(height)) if height > last_height => {
+							let blob_stream = self
+								.stream_da_blobs_between_heights(last_height, height)
+								.await?;
+							tokio::pin!(blob_stream);
+
+							while let Some(blob) = blob_stream.next().await {
+								yield blob?;
 							}
-						}
 
-						// Yield the periodic tick
-						_ = tick_interval.tick() => {
-							let heart_blob = (DaHeight(0u64), DaBlob::DigestV1(Vec::new()));
-							tracing::info!("TEST Da lib DaOperations stream_da_blobs_yield heartbeat.");
-							yield heart_blob;
+							last_height = height;
+						}
+						Ok(Certificate::Nolo) => {
+							// Ignore Nolo
+						}
+						// Warn log non-fatal certificate errors
+						Err(DaError::NonFatalCertificate(e)) => {
+							warn!("non-fatal certificate error: {}", e);
+						}
+						// Exit on all other errors
+						Err(e) => {
+							yield Err(e)?;
+						}
+						// If height is less than last height, ignore
+						_ => {
+							warn!("ignoring certificate");
 						}
 					}
 				}
diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index a6f32a6ce..19df26296 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -1,5 +1,6 @@
 use std::fmt::{self, Debug, Formatter};
 use std::sync::Arc;
+use tokio::time::{self, Duration};
 use tokio_stream::{Stream, StreamExt};
 use tracing::info;
 
@@ -127,27 +128,53 @@ where
 		let verifier = self.verifier.clone();
 		let height = request.into_inner().height;
 
+		// Tick interval for generating HeartBeat.
+		let mut tick_interval = time::interval(Duration::from_secs(10));
+
 		let output = async_stream::try_stream! {
 
 			let mut blob_stream = da.stream_da_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
 
-			while let Some(blob) = blob_stream.next().await {
-				let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
-				let response_content = if height.as_u64() == 0 {
-					//Heart beat. The value can be use to indicate some status.
-					BlobResponse { blob_type: Some(movement_da_light_node_proto::blob_response::BlobType::HeartbeatBlob(true)) }
-				} else {
-					let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
-					verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?
-				};
-				let response = StreamReadFromHeightResponse {
-					blob: Some(response_content)
-				};
-				tracing::info!("TEST passthrough LightNode stream_read_from_height send blob");
-				yield response;
-			}
-
-			info!("Stream read from height closed for height: {}", height);
+				loop {
+					let response_content = tokio::select! {
+						// Yield from the data stream
+						Some(blob) = blob_stream.next() => {
+							let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string())).unwrap(); //TODO remove unwrap
+							let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string())).unwrap(); //TODO remove unwrap;
+							tracing::info!("TEST passthrough LightNode stream_read_from_height send blob");
+							verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string())).unwrap() //TODO remove unwrap;
+						}
+						// Yield the periodic tick
+						_ = tick_interval.tick() => {
+							//Heart beat. The value can be use to indicate some status.
+							tracing::info!("TEST Da lib DaOperations stream_da_blobs_yield heartbeat.");
+							BlobResponse { blob_type: Some(movement_da_light_node_proto::blob_response::BlobType::HeartbeatBlob(true)) }
+						}
+					};
+					let response = StreamReadFromHeightResponse {
+						blob: Some(response_content)
+					};
+					yield response;
+				}
+
+
+			// while let Some(blob) = blob_stream.next().await {
+			// 	let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
+			// 	let response_content = if height.as_u64() == 0 {
+			// 		//Heart beat. The value can be use to indicate some status.
+			// 		BlobResponse { blob_type: Some(movement_da_light_node_proto::blob_response::BlobType::HeartbeatBlob(true)) }
+			// 	} else {
+			// 		let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
+			// 		verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?
+			// 	};
+			// 	let response = StreamReadFromHeightResponse {
+			// 		blob: Some(response_content)
+			// 	};
+			// 	tracing::info!("TEST passthrough LightNode stream_read_from_height send blob");
+			// 	yield response;
+			// }
+
+			// info!("Stream read from height closed for height: {}", height);
 
 		};
 

From 9dd4c62a8cde1463af7625c31e2396a0170019cd Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Wed, 22 Jan 2025 16:27:37 +0100
Subject: [PATCH 40/43]  remove all unwrap in the blob grpc stream processing

---
 .../protocol/light-node/src/passthrough.rs    | 78 +++++++++----------
 1 file changed, 38 insertions(+), 40 deletions(-)

diff --git a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
index 19df26296..87b880ae7 100644
--- a/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
+++ b/protocol-units/da/movement/protocol/light-node/src/passthrough.rs
@@ -135,47 +135,45 @@ where
 
 			let mut blob_stream = da.stream_da_blobs_from_height(height).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
 
-				loop {
-					let response_content = tokio::select! {
-						// Yield from the data stream
-						Some(blob) = blob_stream.next() => {
-							let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string())).unwrap(); //TODO remove unwrap
-							let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string())).unwrap(); //TODO remove unwrap;
-							tracing::info!("TEST passthrough LightNode stream_read_from_height send blob");
-							verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string())).unwrap() //TODO remove unwrap;
+			loop {
+				let response_content = tokio::select! {
+					// Yield from the data stream
+					block_opt = blob_stream.next() => {
+						match block_opt {
+							Some(Ok((height, da_blob))) => {
+								//let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
+								match verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string())).and_then(|verifed_blob| {
+									verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))
+								}) {
+									Ok(blob) => blob,
+									Err(err) => {
+										// Not verified block, skip to next one.
+										tracing::warn!("Stream blob of height: {} fail to verify error:{err}", height.as_u64());
+										continue;
+									}
+								}
+							}
+							Some(Err(err)) => {
+								tracing::warn!("Stream blob return an error, exit stream :{err}");
+								return;
+							},
+							None => {
+								info!("Stream blob closed , exit stream.");
+								return;
+							}
 						}
-						// Yield the periodic tick
-						_ = tick_interval.tick() => {
-							//Heart beat. The value can be use to indicate some status.
-							tracing::info!("TEST Da lib DaOperations stream_da_blobs_yield heartbeat.");
-							BlobResponse { blob_type: Some(movement_da_light_node_proto::blob_response::BlobType::HeartbeatBlob(true)) }
-						}
-					};
-					let response = StreamReadFromHeightResponse {
-						blob: Some(response_content)
-					};
-					yield response;
-				}
-
-
-			// while let Some(blob) = blob_stream.next().await {
-			// 	let (height, da_blob) = blob.map_err(|e| tonic::Status::internal(e.to_string()))?;
-			// 	let response_content = if height.as_u64() == 0 {
-			// 		//Heart beat. The value can be use to indicate some status.
-			// 		BlobResponse { blob_type: Some(movement_da_light_node_proto::blob_response::BlobType::HeartbeatBlob(true)) }
-			// 	} else {
-			// 		let verifed_blob = verifier.verify(da_blob, height.as_u64()).await.map_err(|e| tonic::Status::internal(e.to_string()))?;
-			// 		verifed_blob.into_inner().to_blob_passed_through_read_response(height.as_u64()).map_err(|e| tonic::Status::internal(e.to_string()))?
-			// 	};
-			// 	let response = StreamReadFromHeightResponse {
-			// 		blob: Some(response_content)
-			// 	};
-			// 	tracing::info!("TEST passthrough LightNode stream_read_from_height send blob");
-			// 	yield response;
-			// }
-
-			// info!("Stream read from height closed for height: {}", height);
-
+					}
+					// Yield the periodic tick
+					_ = tick_interval.tick() => {
+						//Heart beat. The value can be use to indicate some status.
+						BlobResponse { blob_type: Some(movement_da_light_node_proto::blob_response::BlobType::HeartbeatBlob(true)) }
+					}
+				};
+				let response = StreamReadFromHeightResponse {
+					blob: Some(response_content)
+				};
+				yield response;
+			}
 		};
 
 		Ok(tonic::Response::new(Box::pin(output) as Self::StreamReadFromHeightStream))

From 40268161756a06f02b0e66f475a021aaa5828370 Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Wed, 22 Jan 2025 17:57:42 +0100
Subject: [PATCH 41/43] remove unnecessary comments

---
 protocol-units/da/movement/protocol/da/src/lib.rs | 1 -
 1 file changed, 1 deletion(-)

diff --git a/protocol-units/da/movement/protocol/da/src/lib.rs b/protocol-units/da/movement/protocol/da/src/lib.rs
index ea0003002..876fe479b 100644
--- a/protocol-units/da/movement/protocol/da/src/lib.rs
+++ b/protocol-units/da/movement/protocol/da/src/lib.rs
@@ -107,7 +107,6 @@ pub trait DaOperations: Send + Sync {
 		&self,
 		start_height: u64,
 	) -> Pin<Box<dyn Future<Output = Result<DaBlobStream, DaError>> + Send + '_>> {
-		tracing::info!("TEST Da lib DaOperations stream_da_blobs_from_height start");
 		let fut = async move {
 			let certificate_stream = self.stream_certificates().await?;
 			let stream = try_stream! {

From 057e22de350d51b237b48edf77cc35e96cf8590f Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Tue, 28 Jan 2025 20:36:22 +0100
Subject: [PATCH 42/43] Add sync DB archive file split in sub chunks to allow
 to upload bigger file.

---
 util/syncador/Cargo.toml                      |   1 +
 util/syncador/src/backend/archive/gzip/mod.rs |  93 +++++++++++
 .../syncador/src/backend/archive/gzip/pull.rs |  92 ++++++++++-
 .../syncador/src/backend/archive/gzip/push.rs | 153 +++++++++++++++---
 .../src/backend/s3/shared_bucket/mod.rs       |   2 +
 .../src/backend/s3/shared_bucket/pull.rs      |  24 +++
 .../src/backend/s3/shared_bucket/push.rs      |  46 +++++-
 util/syncador/src/files/package/mod.rs        |  11 +-
 8 files changed, 390 insertions(+), 32 deletions(-)

diff --git a/util/syncador/Cargo.toml b/util/syncador/Cargo.toml
index 2504fe183..9c05206ec 100644
--- a/util/syncador/Cargo.toml
+++ b/util/syncador/Cargo.toml
@@ -35,6 +35,7 @@ tracing = { workspace = true }
 
 [dev-dependencies]
 uuid = { workspace = true }
+tempfile = { workspace = true }
 
 [lints]
 workspace = true
diff --git a/util/syncador/src/backend/archive/gzip/mod.rs b/util/syncador/src/backend/archive/gzip/mod.rs
index a09e3324d..230df2329 100644
--- a/util/syncador/src/backend/archive/gzip/mod.rs
+++ b/util/syncador/src/backend/archive/gzip/mod.rs
@@ -1,2 +1,95 @@
 pub mod pull;
 pub mod push;
+
+pub(crate) const DEFAULT_CHUNK_SIZE: usize = 500 * 1024 * 1024; // 500 MB per chunk (adjustable)
+pub(crate) const BUFFER_SIZE: usize = 10 * 1024 * 1024; // 10 MB buffer for each read/write operation
+
+#[cfg(test)]
+pub mod test {
+
+	use crate::backend::archive::gzip::pull::Pull;
+	use crate::backend::archive::gzip::push::Push;
+	use crate::backend::PullOperations;
+	use crate::backend::PushOperations;
+	use crate::files::package::{Package, PackageElement};
+	use std::fs::File;
+	use std::io::BufWriter;
+	use std::io::Write;
+	use std::path::PathBuf;
+
+	#[tokio::test]
+	pub async fn test_archive_split() -> Result<(), anyhow::Error> {
+		// 1) Chunk size is bigger than the archive. No split in chunk.
+		process_archive_test("test_archive_split.tmp", 10 * 1024, 1024).await?;
+		// 2) Chunk size is smaller than the archive. Several chunk is create and reconstructed.
+		process_archive_test("test_archive_split2.tmp", 2024, 1024).await?;
+		Ok(())
+	}
+
+	async fn process_archive_test(
+		temp_file_name: &str,
+		chunk_size: usize,
+		buffer_size: usize,
+	) -> Result<(), anyhow::Error> {
+		//Create source and destination temp dir.
+		let source_dir = tempfile::tempdir()?;
+		let destination_dir = tempfile::tempdir()?;
+
+		//1) First test file too small doesn't archive.
+
+		let archive_file_path = source_dir.path().join(temp_file_name);
+		{
+			let file = File::create(&archive_file_path)?;
+			let mut writer = BufWriter::new(file);
+			//Fill with some data. 10 Mb
+			let data: Vec<u8> = vec![2; 1024 * 1024];
+			(0..10).try_for_each(|_| writer.write_all(&data))?;
+		}
+
+		let push = Push { archives_dir: source_dir.path().to_path_buf(), chunk_size, buffer_size };
+
+		let element = PackageElement {
+			sync_files: vec![archive_file_path],
+			root_dir: source_dir.path().to_path_buf(),
+		};
+		let package = Package(vec![element]);
+		let archive_package = push.push(package).await?;
+		println!("TEST archive_package: {:?}", archive_package);
+
+		let file_metadata = std::fs::metadata(&archive_package.0[0].sync_files[0])?;
+		let file_size = file_metadata.len() as usize;
+		println!("TEST Dest chunk file size: {file_size}",);
+
+		// Unarchive and verify
+		//move archive to dest folder.
+		let dest_files = archive_package
+			.0
+			.into_iter()
+			.flat_map(|element| element.sync_files)
+			.map(|absolute_path| {
+				let dest = destination_dir.path().join(absolute_path.file_name().unwrap());
+				println!("TEST move file source:{absolute_path:?} dest:{dest:?}");
+				std::fs::rename(&absolute_path, &dest)?;
+				Ok(dest)
+			})
+			.collect::<std::io::Result<Vec<PathBuf>>>()?;
+
+		let pull = Pull { destination_dir: destination_dir.path().to_path_buf() };
+		let element = PackageElement {
+			sync_files: dest_files,
+			root_dir: destination_dir.path().to_path_buf(),
+		};
+		let package = Package(vec![element]);
+
+		let dest_package = pull.pull(Some(package)).await;
+		println!("ICICICIC dest_package: {:?}", dest_package);
+
+		//verify the dest file has the right size
+		let file_metadata = std::fs::metadata(&destination_dir.path().join(temp_file_name))?;
+		let file_size = file_metadata.len() as usize;
+		println!("Dest fiel size: {file_size}",);
+		assert_eq!(file_size, 10 * 1024 * 1024, "dest file hasn't the right size: {file_size}");
+
+		Ok(())
+	}
+}
diff --git a/util/syncador/src/backend/archive/gzip/pull.rs b/util/syncador/src/backend/archive/gzip/pull.rs
index 40ccfa46e..a399f411d 100644
--- a/util/syncador/src/backend/archive/gzip/pull.rs
+++ b/util/syncador/src/backend/archive/gzip/pull.rs
@@ -1,8 +1,12 @@
+use crate::backend::archive::gzip::BUFFER_SIZE;
 use crate::backend::PullOperations;
 use crate::files::package::{Package, PackageElement};
 use flate2::read::GzDecoder;
 use std::collections::VecDeque;
 use std::fs::File;
+use std::fs::OpenOptions;
+use std::io::BufReader;
+use std::io::{Read, Write};
 use std::path::{Path, PathBuf};
 use tar::Archive;
 use tokio::{fs, task};
@@ -51,8 +55,28 @@ impl Pull {
 		// Create the destination directory if it doesn't exist
 		fs::create_dir_all(&destination).await?;
 
+		println!("PULL manifest:{:?}", manifest);
+
+		let mut unsplit_manifest =
+			PackageElement { sync_files: vec![], root_dir: manifest.root_dir.clone() };
+
 		// Unpack each archive in the manifest
 		for (_relative_path, absolute_path) in manifest.try_path_tuples()? {
+			let path_buf = absolute_path.to_path_buf();
+			let absolute_path = task::spawn_blocking(move || recreate_archive(path_buf)).await??;
+
+			println!("PULL absolute_path {absolute_path:?}",);
+			println!("PULL destination {destination:?}",);
+
+			if !unsplit_manifest.sync_files.contains(&absolute_path) {
+				unsplit_manifest.sync_files.push(absolute_path)
+			}
+		}
+
+		println!("PULL unsplit_manifest:{:?}", unsplit_manifest);
+
+		// Unpack each archive in the unsplit_manifest
+		for (_relative_path, absolute_path) in unsplit_manifest.try_path_tuples()? {
 			let tar_gz = File::open(&absolute_path)?;
 			let decoder = GzDecoder::new(tar_gz);
 			let mut archive = Archive::new(decoder);
@@ -68,7 +92,7 @@ impl Pull {
 		// Recursively add every file (not directory) in the destination directory to the new manifest
 		let mut entries = Vec::new();
 		Self::collect_files(&destination, &mut entries).await?;
-		info!("Unarchived files: {:?}", entries.len());
+		info!("PULL Unarchived files: {:?}", entries.len());
 		for file_path in entries {
 			new_manifest.add_sync_file(file_path);
 		}
@@ -97,3 +121,69 @@ impl PullOperations for Pull {
 		Ok(Some(Package(manifests)))
 	}
 }
+
+fn recreate_archive(archive_chunk: PathBuf) -> Result<PathBuf, anyhow::Error> {
+	if archive_chunk
+		.extension()
+		.map(|ext| {
+			println!("ext:{ext:?}",);
+			ext != "chunk"
+		})
+		.unwrap_or(true)
+	{
+		//not a chunk file return.
+		return Ok(archive_chunk);
+	}
+
+	let arhive_file_name = archive_chunk
+		.file_name()
+		.and_then(|file_name| file_name.to_str())
+		.and_then(|file_name_str| file_name_str.strip_suffix(".chunk"))
+		.and_then(|base_filename| {
+			let base_filename_parts: Vec<&str> = base_filename.rsplitn(2, '_').collect();
+			(base_filename_parts.len() > 1).then(|| base_filename_parts[1].to_string())
+		})
+		.ok_or(anyhow::anyhow!(format!(
+			"Archive filename not found for chunk path:{:?}",
+			archive_chunk.to_str()
+		)))?;
+
+	println!("PULL arhive_file_name:{:?}", arhive_file_name);
+
+	let archive_path = archive_chunk.parent().map(|parent| parent.join(arhive_file_name)).ok_or(
+		anyhow::anyhow!(format!(
+			"Archive filename no root dir in path:{:?}",
+			archive_chunk.to_str()
+		)),
+	)?;
+
+	println!("PULL archive_path:{:?}", archive_path);
+	let mut archive_file = OpenOptions::new()
+		.create(true) // Create the file if it doesn't exist
+		.append(true) // Open in append mode (do not overwrite)
+		.open(&archive_path)?;
+
+	let mut buffer = vec![0; BUFFER_SIZE];
+
+	println!("PULL archive_chunk:{:?}", archive_chunk);
+	let chunk_file = File::open(&archive_chunk)?;
+	let mut chunk_reader = BufReader::new(chunk_file);
+
+	loop {
+		// Read a part of the chunk into the buffer
+		let bytes_read = chunk_reader.read(&mut buffer)?;
+
+		if bytes_read == 0 {
+			break; // End of chunk file
+		}
+
+		// Write the buffer data to the output file
+		archive_file.write_all(&buffer[..bytes_read])?;
+	}
+
+	let file_metadata = std::fs::metadata(&archive_path)?;
+	let file_size = file_metadata.len() as usize;
+	println!("PULL {archive_path:?} archive_chunk size: {file_size}",);
+
+	Ok(archive_path)
+}
diff --git a/util/syncador/src/backend/archive/gzip/push.rs b/util/syncador/src/backend/archive/gzip/push.rs
index fb08f2a17..893b50620 100644
--- a/util/syncador/src/backend/archive/gzip/push.rs
+++ b/util/syncador/src/backend/archive/gzip/push.rs
@@ -1,29 +1,67 @@
+use crate::backend::archive::gzip::BUFFER_SIZE;
+use crate::backend::archive::gzip::DEFAULT_CHUNK_SIZE;
 use crate::backend::PushOperations;
 use crate::files::package::{Package, PackageElement};
 use flate2::write::GzEncoder;
 use flate2::Compression;
 use std::fs::File;
+use std::io::{BufReader, Read, Write};
+use std::path::Path;
 use std::path::PathBuf;
 use tar::Builder;
 
 #[derive(Debug, Clone)]
 pub struct Push {
 	pub archives_dir: PathBuf,
+	pub chunk_size: usize,
+	pub buffer_size: usize,
 }
 
 impl Push {
 	pub fn new(archives_dir: PathBuf) -> Self {
-		Self { archives_dir }
+		Self { archives_dir, chunk_size: DEFAULT_CHUNK_SIZE, buffer_size: BUFFER_SIZE }
 	}
+}
+
+#[async_trait::async_trait]
+impl PushOperations for Push {
+	async fn push(&self, package: Package) -> Result<Package, anyhow::Error> {
+		let mut manifests = Vec::new();
+		for (i, manifest) in package.0.into_iter().enumerate() {
+			let new_manifest = tokio::task::spawn_blocking({
+				let archive_dir = self.archives_dir.clone();
+				let chunk_size = self.chunk_size;
+				let buffer_size = self.buffer_size;
 
-	/// Tar GZips a manifest.
-	fn tar_gzip_manifest(
-		manifest: PackageElement,
-		root_dir: PathBuf,
-		destination: PathBuf,
-	) -> Result<PackageElement, anyhow::Error> {
-		// create the archive builder
-		let file = File::create(destination.clone())?;
+				move || {
+					tar_gzip_manifest(
+						manifest,
+						archive_dir.join(format!("{}.tar.gz", i)),
+						archive_dir,
+						chunk_size,
+						buffer_size,
+					)
+				}
+			})
+			.await??;
+			manifests.push(new_manifest);
+		}
+		Ok(Package(manifests))
+	}
+}
+
+/// Tar GZips a manifest.
+fn tar_gzip_manifest(
+	manifest: PackageElement,
+	destination: PathBuf,
+	root_dir: PathBuf,
+	chunk_size: usize,
+	buffer_size: usize,
+) -> Result<PackageElement, anyhow::Error> {
+	// create the archive builder
+	println!("tar_gzip_manifest destination:{:?}", destination);
+	let file = File::create(destination.clone())?;
+	{
 		let encoder = GzEncoder::new(file, Compression::default());
 		let mut tar_builder = Builder::new(encoder);
 
@@ -34,25 +72,92 @@ impl Push {
 
 		// Finish writing the tar archive
 		tar_builder.finish()?;
+	}
 
-		let mut new_manifest = PackageElement::new(root_dir);
-		new_manifest.add_sync_file(destination);
-		Ok(new_manifest)
+	// Split the archive if needed
+	let destinations = split_archive(destination, &root_dir, chunk_size, buffer_size)?;
+	let mut new_manifest = PackageElement::new(root_dir);
+	for dest in destinations {
+		new_manifest.add_sync_file(dest);
 	}
+	Ok(new_manifest)
 }
 
-#[async_trait::async_trait]
-impl PushOperations for Push {
-	async fn push(&self, package: Package) -> Result<Package, anyhow::Error> {
-		let mut manifests = Vec::new();
-		for (i, manifest) in package.0.into_iter().enumerate() {
-			let new_manifest = Self::tar_gzip_manifest(
-				manifest,
-				self.archives_dir.clone(),
-				self.archives_dir.join(format!("{}.tar.gz", i)),
-			)?;
-			manifests.push(new_manifest);
+fn split_archive<P: AsRef<Path>>(
+	archive: PathBuf,
+	root_dir: P,
+	chunk_size: usize,
+	buffer_size: usize,
+) -> Result<Vec<PathBuf>, anyhow::Error> {
+	let output_dir = root_dir.as_ref();
+
+	// Check the file size before proceeding with the split
+	let file_metadata = std::fs::metadata(&archive)?;
+	let file_size = file_metadata.len() as usize;
+	println!("Push split file size{file_size} chunksize:{chunk_size}",);
+	if file_size <= chunk_size {
+		return Ok(vec![archive]);
+	}
+
+	let archive_file = File::open(&archive)?;
+
+	let file_metadata = std::fs::metadata(&archive)?;
+	let file_size = file_metadata.len() as usize;
+	println!("PUSH {archive:?} archive_file size: {file_size}",);
+
+	std::fs::create_dir_all(output_dir)?;
+
+	let mut chunk_num = 0;
+	let mut buffer = vec![0; buffer_size];
+
+	let archive_relative_path = archive.strip_prefix(&output_dir)?;
+	let mut input_reader = BufReader::new(archive_file);
+
+	let mut chunk_list = vec![];
+	loop {
+		// Create a new file for the chunk
+		let chunk_path = output_dir.join(format!(
+			"{}_{:03}.chunk",
+			archive_relative_path.to_string_lossy(),
+			chunk_num
+		));
+
+		println!("PUSH create chunk_path: {chunk_path:?}",);
+		let mut chunk_file = File::create(&chunk_path)?;
+
+		let mut all_read_bytes = 0;
+		let end = loop {
+			// Read a part of the chunk into the buffer
+			let bytes_read = input_reader.read(&mut buffer)?;
+			println!("PUSH Read bytes: {bytes_read}",);
+			if bytes_read == 0 {
+				break true; // End of chunk file
+			}
+
+			// Write the buffer data to the output file
+			chunk_file.write_all(&buffer[..bytes_read])?;
+			all_read_bytes += bytes_read;
+			println!("PUSH all_read_bytes {all_read_bytes:?} chunk_size:{chunk_size}",);
+			if all_read_bytes >= chunk_size {
+				break false;
+			}
+		};
+
+		if all_read_bytes == 0 {
+			break; // End of chunk file and discard the current one.
+		}
+
+		let file_metadata = std::fs::metadata(&chunk_path)?;
+		let file_size = file_metadata.len() as usize;
+		println!("{chunk_path:?} chunk_file size: {file_size}",);
+
+		chunk_num += 1;
+		chunk_list.push(chunk_path);
+		if end {
+			break; // End of chunk file
 		}
-		Ok(Package(manifests))
 	}
+
+	println!("split_archive return {chunk_list:?}",);
+	Ok(chunk_list)
 }
diff --git a/util/syncador/src/backend/s3/shared_bucket/mod.rs b/util/syncador/src/backend/s3/shared_bucket/mod.rs
index e85e2611f..f750d6956 100644
--- a/util/syncador/src/backend/s3/shared_bucket/mod.rs
+++ b/util/syncador/src/backend/s3/shared_bucket/mod.rs
@@ -2,6 +2,8 @@ use super::bucket_connection;
 use aws_types::region::Region;
 use tracing::info;
 
+const UPLOAD_COMPLETE_MARKER_FILE_NAME: &str = "upload_complete.txt";
+
 pub mod metadata;
 pub mod pull;
 pub mod push;
diff --git a/util/syncador/src/backend/s3/shared_bucket/pull.rs b/util/syncador/src/backend/s3/shared_bucket/pull.rs
index 547692533..7578e64f6 100644
--- a/util/syncador/src/backend/s3/shared_bucket/pull.rs
+++ b/util/syncador/src/backend/s3/shared_bucket/pull.rs
@@ -87,6 +87,26 @@ impl Pull {
 			candidates.entry(candidate).or_insert_with(HashSet::new).insert(file_path);
 		}
 
+		//filter all path with only the one that contains the complete upload marker file.
+		let to_remove: Vec<_> = candidates
+			.iter()
+			.filter_map(|(key, set)| {
+				if set
+					.iter()
+					.find(|path| path.ends_with(super::UPLOAD_COMPLETE_MARKER_FILE_NAME))
+					.is_none()
+				{
+					Some(key.clone())
+				} else {
+					None
+				}
+			})
+			.collect();
+		println!("S3 PUSH to_remove: {to_remove:?}",);
+		to_remove.iter().for_each(|key| {
+			candidates.remove(key);
+		});
+
 		Ok(candidates.keys().cloned().collect())
 	}
 
@@ -140,6 +160,10 @@ impl Pull {
 		// download each file
 		let mut manifest_futures = Vec::new();
 		for file_path in file_paths {
+			//remove complte marker file
+			if file_path.ends_with(super::UPLOAD_COMPLETE_MARKER_FILE_NAME) {
+				continue;
+			}
 			let relative_path = PathBuf::from(
 				file_path
 					.strip_prefix(format!("{}/", candidate.key).as_str())
diff --git a/util/syncador/src/backend/s3/shared_bucket/push.rs b/util/syncador/src/backend/s3/shared_bucket/push.rs
index 89e74dcfd..a0a29efb2 100644
--- a/util/syncador/src/backend/s3/shared_bucket/push.rs
+++ b/util/syncador/src/backend/s3/shared_bucket/push.rs
@@ -25,8 +25,8 @@ impl Push {
 
 	pub(crate) async fn upload_path(
 		&self,
-		relative_path: &std::path::Path,
-		full_path: &std::path::Path,
+		relative_path: std::path::PathBuf,
+		full_path: std::path::PathBuf,
 	) -> Result<(PutObjectOutput, PathBuf), anyhow::Error> {
 		let bucket = self.bucket_connection.bucket.clone();
 		let key =
@@ -45,6 +45,38 @@ impl Push {
 		Ok((output, s3_path.into()))
 	}
 
+	async fn add_marker_file(
+		&self,
+		marker_name: &str,
+	) -> Result<(PutObjectOutput, PathBuf), anyhow::Error> {
+		let bucket = self.bucket_connection.bucket.clone();
+		let marker_key = format!("{}/{}", self.metadata.syncer_epoch_prefix()?, marker_name);
+		let s3_path = format!("s3://{}/{}", bucket, marker_key);
+		let output = self
+			.bucket_connection
+			.client
+			.put_object()
+			.bucket(bucket)
+			.key(marker_key)
+			.body(ByteStream::from_static(b"Upload complete"))
+			.send()
+			.await?;
+		Ok((output, s3_path.into()))
+	}
+
+	// Adapter method for the upload_path and add_marker_file future.
+	async fn add_upload_entry(
+		&self,
+		relative_path: std::path::PathBuf,
+		full_path: std::path::PathBuf,
+		marker_file: Option<&str>,
+	) -> Result<(PutObjectOutput, PathBuf), anyhow::Error> {
+		match marker_file {
+			Some(file) => self.add_marker_file(file).await,
+			None => self.upload_path(relative_path, full_path).await,
+		}
+	}
+
 	pub(crate) async fn upload_based_on_manifest(
 		&self,
 		manifest: PackageElement,
@@ -55,10 +87,18 @@ impl Push {
 		// upload each file
 		let mut manifest_futures = Vec::new();
 		for (relative_path, full_path) in path_tuples {
-			let future = self.upload_path(&relative_path, &full_path);
+			let future = self.add_upload_entry(relative_path, full_path, None);
 			manifest_futures.push(future);
 		}
 
+		// Add upload completed marker file
+		let future = self.add_upload_entry(
+			Default::default(),
+			Default::default(),
+			Some(super::UPLOAD_COMPLETE_MARKER_FILE_NAME),
+		);
+		manifest_futures.push(future);
+
 		// try to join all the manifest_futures
 		let put_object_outputs = futures::future::try_join_all(manifest_futures).await?;
 		let mut new_manifest = PackageElement::new(self.bucket_connection.bucket.clone().into());
diff --git a/util/syncador/src/files/package/mod.rs b/util/syncador/src/files/package/mod.rs
index 3e787b5ef..7620afe2c 100644
--- a/util/syncador/src/files/package/mod.rs
+++ b/util/syncador/src/files/package/mod.rs
@@ -1,4 +1,4 @@
-use std::path::{Path, PathBuf};
+use std::path::PathBuf;
 
 /// A package is a collection of file system locations that are synced together.
 #[derive(Debug, Clone)]
@@ -34,10 +34,13 @@ impl PackageElement {
 		Self { sync_files: Vec::new(), root_dir }
 	}
 
-	pub fn try_path_tuples(&self) -> Result<Vec<(&Path, &PathBuf)>, anyhow::Error> {
+	pub fn try_path_tuples(&self) -> Result<Vec<(PathBuf, PathBuf)>, anyhow::Error> {
 		let mut tuples = Vec::new();
-		for file in &self.sync_files {
-			let relative_path = file.strip_prefix(&self.root_dir)?;
+		// Order file in case of chunk files that must be processed in order.
+		let mut ordered_files = self.sync_files.clone();
+		ordered_files.sort();
+		for file in ordered_files {
+			let relative_path = file.strip_prefix(&self.root_dir)?.to_path_buf();
 			tuples.push((relative_path, file));
 		}
 		Ok(tuples)

From 6e00b778ee7d8139b153aa4eb80805aead07e252 Mon Sep 17 00:00:00 2001
From: musitdev <philippe.delrieu@free.fr>
Date: Thu, 30 Jan 2025 15:07:10 +0100
Subject: [PATCH 43/43] set db sync optional to allow genesis boot

---
 .../compose/movement-full-node/docker-compose.follower.yml  | 2 +-
 .../movement-full-node/process-compose.test-followers.yml   | 6 +++---
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/docker/compose/movement-full-node/docker-compose.follower.yml b/docker/compose/movement-full-node/docker-compose.follower.yml
index 2293198ef..6a1b060be 100644
--- a/docker/compose/movement-full-node/docker-compose.follower.yml
+++ b/docker/compose/movement-full-node/docker-compose.follower.yml
@@ -12,7 +12,7 @@ services:
       AWS_REGION: ${AWS_REGION:?AWS_REGION is not set}
       AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
       AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
-      MOVEMENT_SYNC: ${MOVEMENT_SYNC:?MOVEMENT_SYNC is not set}
+      MOVEMENT_SYNC: ${MOVEMENT_SYNC} #:?MOVEMENT_SYNC is not set}
       MAYBE_RUN_LOCAL: "false"
       MOVEMENT_DA_LIGHT_NODE_HTTP1: ${MOVEMENT_DA_LIGHT_NODE_HTTP1}
       RUST_LOG: info,aws_sdk_s3=debug
diff --git a/process-compose/movement-full-node/process-compose.test-followers.yml b/process-compose/movement-full-node/process-compose.test-followers.yml
index 8e0b4033c..36ae85c23 100644
--- a/process-compose/movement-full-node/process-compose.test-followers.yml
+++ b/process-compose/movement-full-node/process-compose.test-followers.yml
@@ -14,7 +14,7 @@ processes:
       - "MOVEMENT_DA_LIGHT_NODE_HTTP1=false"
     command: |
       export AWS_REGION=us-west-2
-      export MOVEMENT_SYNC="leader::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
+      # export MOVEMENT_SYNC="leader::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
       export MAYBE_RUN_LOCAL=true
       export MAYBE_DEPLOY_MCR=true
       movement-full-node-setup
@@ -45,7 +45,7 @@ processes:
       export MAPTOS_API_CONNECTION_PORT=31731
       export MAPTOS_API_LISTEN_PORT=31731
       export DOT_MOVEMENT_PATH=$DOT_MOVEMENT_PATH-follower-1
-      export MOVEMENT_SYNC="follower::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
+      # export MOVEMENT_SYNC="follower::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
       movement-full-node-setup
     depends_on:
       build:
@@ -92,7 +92,7 @@ processes:
       export MAPTOS_API_CONNECTION_PORT=32731
       export MAPTOS_API_LISTEN_PORT=32731
       export DOT_MOVEMENT_PATH=$DOT_MOVEMENT_PATH-follower-2
-      export MOVEMENT_SYNC="follower::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
+      # export MOVEMENT_SYNC="follower::follower-test-$MOVEMENT_SHARED_RANDOM_1<=>{default_signer_address_whitelist,maptos,maptos-storage,movement-da-db}/**"
       movement-full-node-setup
     depends_on:
       build: