diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index c0b7f8156..3cdee27ed 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -14,6 +14,7 @@ concurrency:
 env:
   CARGO_TERM_COLOR: always
   BINSTALL_NO_CONFIRM: true
+  RUSTDOCFLAGS: "-D warnings"
 
 jobs:
   clippy:
@@ -38,7 +39,7 @@ jobs:
     steps:
       - uses: actions/checkout@v3
       - uses: ./.github/actions/rust
-      - run: cargo doc --all
+      - run: cargo doc --all --no-deps
   cargo-fmt:
     runs-on: ubuntu-latest
     steps:
diff --git a/evm_arithmetization/src/cpu/kernel/constants/mod.rs b/evm_arithmetization/src/cpu/kernel/constants/mod.rs
index c738bb582..a029dc9fe 100644
--- a/evm_arithmetization/src/cpu/kernel/constants/mod.rs
+++ b/evm_arithmetization/src/cpu/kernel/constants/mod.rs
@@ -472,7 +472,7 @@ pub mod cancun_constants {
 pub mod global_exit_root {
     use super::*;
 
-    /// Taken from https://github.com/0xPolygonHermez/cdk-erigon/blob/61f0b6912055c73f6879ea7e9b5bac22ea5fc85c/zk/utils/global_exit_root.go#L16.
+    /// Taken from <https://github.com/0xPolygonHermez/cdk-erigon/blob/61f0b6912055c73f6879ea7e9b5bac22ea5fc85c/zk/utils/global_exit_root.go#L16>.
     pub const GLOBAL_EXIT_ROOT_MANAGER_L2: (&str, [u8; 20]) = (
         "GLOBAL_EXIT_ROOT_MANAGER_L2",
         GLOBAL_EXIT_ROOT_ADDRESS.to_fixed_bytes(),
@@ -486,7 +486,7 @@ pub mod global_exit_root {
         "GLOBAL_EXIT_ROOT_MANAGER_L2_STATE_KEY",
         GLOBAL_EXIT_ROOT_ADDRESS_HASHED.to_fixed_bytes(),
     );
-    /// Taken from https://github.com/0xPolygonHermez/cdk-erigon/blob/dc3cbcc59a95769626056c7bc70aade501e7741d/core/state/intra_block_state_zkevm.go#L20.
+    /// Taken from <https://github.com/0xPolygonHermez/cdk-erigon/blob/dc3cbcc59a95769626056c7bc70aade501e7741d/core/state/intra_block_state_zkevm.go#L20>.
     pub const ADDRESS_SCALABLE_L2: Address = H160(hex!("000000000000000000000000000000005ca1ab1e"));
 
     pub const ADDRESS_SCALABLE_L2_ADDRESS_HASHED: H256 = H256(hex!(
@@ -497,16 +497,16 @@ pub mod global_exit_root {
         "ADDRESS_SCALABLE_L2_STATE_KEY",
         ADDRESS_SCALABLE_L2_ADDRESS_HASHED.to_fixed_bytes(),
     );
-    /// Taken from https://github.com/0xPolygonHermez/cdk-erigon/blob/61f0b6912055c73f6879ea7e9b5bac22ea5fc85c/zk/utils/global_exit_root.go#L17.
+    /// Taken from <https://github.com/0xPolygonHermez/cdk-erigon/blob/61f0b6912055c73f6879ea7e9b5bac22ea5fc85c/zk/utils/global_exit_root.go#L17>.
     pub const GLOBAL_EXIT_ROOT_STORAGE_POS: (&str, u64) = ("GLOBAL_EXIT_ROOT_STORAGE_POS", 0);
 
-    /// Taken from https://github.com/0xPolygonHermez/cdk-erigon/blob/dc3cbcc59a95769626056c7bc70aade501e7741d/core/state/intra_block_state_zkevm.go#L16.
+    /// Taken from <https://github.com/0xPolygonHermez/cdk-erigon/blob/dc3cbcc59a95769626056c7bc70aade501e7741d/core/state/intra_block_state_zkevm.go#L16>.
     pub const LAST_BLOCK_STORAGE_POS: (&str, u64) = ("LAST_BLOCK_STORAGE_POS", 0);
     pub const STATE_ROOT_STORAGE_POS: (&str, u64) = ("STATE_ROOT_STORAGE_POS", 1);
     pub const TIMESTAMP_STORAGE_POS: (&str, u64) = ("TIMESTAMP_STORAGE_POS", 2);
     pub const BLOCK_INFO_ROOT_STORAGE_POS: (&str, u64) = ("BLOCK_INFO_ROOT_STORAGE_POS", 3);
 
-    /// Taken from https://zkevm.polygonscan.com/address/0xa40D5f56745a118D0906a34E69aeC8C0Db1cB8fA#code.
+    /// Taken from <https://zkevm.polygonscan.com/address/0xa40D5f56745a118D0906a34E69aeC8C0Db1cB8fA#code>.
     pub const GLOBAL_EXIT_ROOT_CONTRACT_CODE: [u8; 2112] = hex!("60806040526004361061004e5760003560e01c80633659cfe6146100655780634f1ef286146100855780635c60da1b146100985780638f283970146100c9578063f851a440146100e95761005d565b3661005d5761005b6100fe565b005b61005b6100fe565b34801561007157600080fd5b5061005b6100803660046106ca565b610118565b61005b6100933660046106e5565b61015f565b3480156100a457600080fd5b506100ad6101d0565b6040516001600160a01b03909116815260200160405180910390f35b3480156100d557600080fd5b5061005b6100e43660046106ca565b61020b565b3480156100f557600080fd5b506100ad610235565b610106610292565b610116610111610331565b61033b565b565b61012061035f565b6001600160a01b0316336001600160a01b031614156101575761015481604051806020016040528060008152506000610392565b50565b6101546100fe565b61016761035f565b6001600160a01b0316336001600160a01b031614156101c8576101c38383838080601f01602080910402602001604051908101604052809392919081815260200183838082843760009201919091525060019250610392915050565b505050565b6101c36100fe565b60006101da61035f565b6001600160a01b0316336001600160a01b03161415610200576101fb610331565b905090565b6102086100fe565b90565b61021361035f565b6001600160a01b0316336001600160a01b0316141561015757610154816103f1565b600061023f61035f565b6001600160a01b0316336001600160a01b03161415610200576101fb61035f565b606061028583836040518060600160405280602781526020016107e460279139610445565b9392505050565b3b151590565b61029a61035f565b6001600160a01b0316336001600160a01b031614156101165760405162461bcd60e51b815260206004820152604260248201527f5472616e73706172656e745570677261646561626c6550726f78793a2061646d60448201527f696e2063616e6e6f742066616c6c6261636b20746f2070726f78792074617267606482015261195d60f21b608482015260a4015b60405180910390fd5b60006101fb610519565b3660008037600080366000845af43d6000803e80801561035a573d6000f35b3d6000fd5b60007fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61035b546001600160a01b0316919050565b61039b83610541565b6040516001600160a01b038416907fbc7cd75a20ee27fd9adebab32041f755214dbc6bffa90cc0225b39da2e5c2d3b90600090a26000825111806103dc5750805b156101c3576103eb8383610260565b50505050565b7f7e644d79422f17c01e4894b5f4f588d331ebfa28653d42ae832dc59e38c9798f61041a61035f565b604080516001600160a01b03928316815291841660208301520160405180910390a1610154816105e9565b6060833b6104a45760405162461bcd60e51b815260206004820152602660248201527f416464726573733a2064656c65676174652063616c6c20746f206e6f6e2d636f6044820152651b9d1c9858dd60d21b6064820152608401610328565b600080856001600160a01b0316856040516104bf9190610794565b600060405180830381855af49150503d80600081146104fa576040519150601f19603f3d011682016040523d82523d6000602084013e6104ff565b606091505b509150915061050f828286610675565b9695505050505050565b60007f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc610383565b803b6105a55760405162461bcd60e51b815260206004820152602d60248201527f455243313936373a206e657720696d706c656d656e746174696f6e206973206e60448201526c1bdd08184818dbdb9d1c9858dd609a1b6064820152608401610328565b807f360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc5b80546001600160a01b0319166001600160a01b039290921691909117905550565b6001600160a01b03811661064e5760405162461bcd60e51b815260206004820152602660248201527f455243313936373a206e65772061646d696e20697320746865207a65726f206160448201526564647265737360d01b6064820152608401610328565b807fb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d61036105c8565b60608315610684575081610285565b8251156106945782518084602001fd5b8160405162461bcd60e51b815260040161032891906107b0565b80356001600160a01b03811681146106c557600080fd5b919050565b6000602082840312156106dc57600080fd5b610285826106ae565b6000806000604084860312156106fa57600080fd5b610703846106ae565b9250602084013567ffffffffffffffff8082111561072057600080fd5b818601915086601f83011261073457600080fd5b81358181111561074357600080fd5b87602082850101111561075557600080fd5b6020830194508093505050509250925092565b60005b8381101561078357818101518382015260200161076b565b838111156103eb5750506000910152565b600082516107a6818460208701610768565b9190910192915050565b60208152600082518060208401526107cf816040850160208701610768565b601f01601f1916919091016040019291505056fe416464726573733a206c6f772d6c6576656c2064656c65676174652063616c6c206661696c6564a26469706673582212204675187caf3a43285d9a2c1844a981e977bd52a85ff073e7fc649f73847d70a464736f6c63430008090033");
     pub const GLOBAL_EXIT_ROOT_CONTRACT_CODE_HASH: [u8; 32] =
         hex!("6bec2bf64f7e824109f6ed55f77dd7665801d6195e461666ad6a5342a9f6daf5");
diff --git a/evm_arithmetization/src/fixed_recursive_verifier.rs b/evm_arithmetization/src/fixed_recursive_verifier.rs
index 3c1f0b1e1..10a4f6f56 100644
--- a/evm_arithmetization/src/fixed_recursive_verifier.rs
+++ b/evm_arithmetization/src/fixed_recursive_verifier.rs
@@ -29,6 +29,7 @@ use plonky2::util::serialization::{
 };
 use plonky2::util::timing::TimingTree;
 use plonky2_util::log2_ceil;
+use serde::{Deserialize, Serialize};
 use starky::config::StarkConfig;
 use starky::cross_table_lookup::{verify_cross_table_lookups_circuit, CrossTableLookup};
 use starky::lookup::{get_grand_product_challenge_set_target, GrandProductChallengeSet};
@@ -60,6 +61,22 @@ use crate::verifier::initial_memory_merkle_cap;
 /// this size.
 const THRESHOLD_DEGREE_BITS: usize = 13;
 
+/// An internal proof for a segment execution along with its public values,
+/// for proper connection with contiguous proofs.
+#[derive(Clone, Debug, Deserialize, Serialize)]
+#[serde(bound = "")]
+pub struct ProofWithPublicValues<F, C, const D: usize>
+where
+    F: RichField + Extendable<D>,
+    C: GenericConfig<D, F = F>,
+    C::Hasher: AlgebraicHasher<F>,
+{
+    /// Public values of this transaction proof.
+    pub public_values: PublicValues<F>,
+    /// Underlying recursive proof.
+    pub intern: ProofWithPublicInputs<F, C, D>,
+}
+
 #[derive(Clone)]
 pub struct ProverOutputData<F, C, const D: usize>
 where
@@ -67,9 +84,14 @@ where
     C: GenericConfig<D, F = F>,
     C::Hasher: AlgebraicHasher<F>,
 {
+    /// Flag indicating whether this represents an individual
+    /// segment / batch or an aggregation of them.
+    pub is_agg: bool,
+    /// Flag indicating whether this represents a dummy run. This is specific to
+    /// the segment aggregation logic.
     pub is_dummy: bool,
-    pub proof_with_pis: ProofWithPublicInputs<F, C, D>,
-    pub public_values: PublicValues<F>,
+    /// The underlying recursive proof with its public values
+    pub proof_with_pvs: ProofWithPublicValues<F, C, D>,
 }
 
 /// Contains all recursive circuits used in the system. For each STARK and each
@@ -90,10 +112,11 @@ where
     /// The segment aggregation circuit, which verifies that two segment proofs
     /// that can either be root or aggregation proofs.
     pub segment_aggregation: SegmentAggregationCircuitData<F, C, D>,
-    /// The transaction aggregation circuit, which verifies the aggregation of
-    /// two proofs that can either be a segment aggregation representing a
-    /// batch of transactions or an aggregation of those batches.
-    pub txn_aggregation: TxnAggregationCircuitData<F, C, D>,
+    /// The transaction batch aggregation circuit, which verifies the
+    /// aggregation of two proofs that can either be a segment aggregation
+    /// representing a batch of transactions or an aggregation of those
+    /// batches.
+    pub batch_aggregation: BatchAggregationCircuitData<F, C, D>,
     /// The block circuit, which verifies a transaction aggregation proof and an
     /// optional previous block proof.
     pub block: BlockCircuitData<F, C, D>,
@@ -282,7 +305,7 @@ impl<const D: usize> AggregationChildWithDummyTarget<D> {
 /// proofs into one. Each inner proof can be either a segment aggregation proof
 /// or another transaction aggregation proof.
 #[derive(Eq, PartialEq, Debug)]
-pub struct TxnAggregationCircuitData<F, C, const D: usize>
+pub struct BatchAggregationCircuitData<F, C, const D: usize>
 where
     F: RichField + Extendable<D>,
     C: GenericConfig<D, F = F>,
@@ -294,7 +317,7 @@ where
     cyclic_vk: VerifierCircuitTarget,
 }
 
-impl<F, C, const D: usize> TxnAggregationCircuitData<F, C, D>
+impl<F, C, const D: usize> BatchAggregationCircuitData<F, C, D>
 where
     F: RichField + Extendable<D>,
     C: GenericConfig<D, F = F>,
@@ -569,7 +592,7 @@ where
             .to_buffer(&mut buffer, gate_serializer, generator_serializer)?;
         self.segment_aggregation
             .to_buffer(&mut buffer, gate_serializer, generator_serializer)?;
-        self.txn_aggregation
+        self.batch_aggregation
             .to_buffer(&mut buffer, gate_serializer, generator_serializer)?;
         self.block
             .to_buffer(&mut buffer, gate_serializer, generator_serializer)?;
@@ -612,7 +635,7 @@ where
             gate_serializer,
             generator_serializer,
         )?;
-        let txn_aggregation = TxnAggregationCircuitData::from_buffer(
+        let batch_aggregation = BatchAggregationCircuitData::from_buffer(
             &mut buffer,
             gate_serializer,
             generator_serializer,
@@ -663,7 +686,7 @@ where
         Ok(Self {
             root,
             segment_aggregation,
-            txn_aggregation,
+            batch_aggregation,
             block,
             block_wrapper,
             two_to_one_block,
@@ -738,16 +761,16 @@ where
 
         let root = Self::create_segment_circuit(&by_table, stark_config);
         let segment_aggregation = Self::create_segment_aggregation_circuit(&root);
-        let txn_aggregation =
-            Self::create_txn_aggregation_circuit(&segment_aggregation, stark_config);
-        let block = Self::create_block_circuit(&txn_aggregation);
+        let batch_aggregation =
+            Self::create_batch_aggregation_circuit(&segment_aggregation, stark_config);
+        let block = Self::create_block_circuit(&batch_aggregation);
         let block_wrapper = Self::create_block_wrapper_circuit(&block);
         let two_to_one_block = Self::create_two_to_one_block_circuit(&block_wrapper);
 
         Self {
             root,
             segment_aggregation,
-            txn_aggregation,
+            batch_aggregation,
             block,
             block_wrapper,
             two_to_one_block,
@@ -1092,21 +1115,21 @@ where
         }
     }
 
-    fn create_txn_aggregation_circuit(
+    fn create_batch_aggregation_circuit(
         agg: &SegmentAggregationCircuitData<F, C, D>,
         stark_config: &StarkConfig,
-    ) -> TxnAggregationCircuitData<F, C, D> {
+    ) -> BatchAggregationCircuitData<F, C, D> {
         // Create a circuit for the aggregation of two transactions.
 
         let mut builder = CircuitBuilder::<F, D>::new(agg.circuit.common.config.clone());
         let public_values = add_virtual_public_values_public_input(&mut builder);
         let cyclic_vk = builder.add_verifier_data_public_inputs();
 
-        let lhs_txn_proof = Self::add_txn_agg_child(&mut builder, agg);
-        let rhs_txn_proof = Self::add_txn_agg_child(&mut builder, agg);
+        let lhs_batch_proof = Self::add_batch_agg_child(&mut builder, agg);
+        let rhs_batch_proof = Self::add_batch_agg_child(&mut builder, agg);
 
-        let lhs_pv = lhs_txn_proof.public_values(&mut builder);
-        let rhs_pv = rhs_txn_proof.public_values(&mut builder);
+        let lhs_pv = lhs_batch_proof.public_values(&mut builder);
+        let rhs_pv = rhs_batch_proof.public_values(&mut builder);
 
         // Connect all block hash values
         BlockHashesTarget::connect(
@@ -1197,10 +1220,10 @@ where
         }
 
         let circuit = builder.build::<C>();
-        TxnAggregationCircuitData {
+        BatchAggregationCircuitData {
             circuit,
-            lhs: lhs_txn_proof,
-            rhs: rhs_txn_proof,
+            lhs: lhs_batch_proof,
+            rhs: rhs_batch_proof,
             public_values,
             cyclic_vk,
         }
@@ -1284,7 +1307,9 @@ where
         builder.connect(x.registers_before.program_counter, main_label);
     }
 
-    fn create_block_circuit(agg: &TxnAggregationCircuitData<F, C, D>) -> BlockCircuitData<F, C, D> {
+    fn create_block_circuit(
+        agg: &BatchAggregationCircuitData<F, C, D>,
+    ) -> BlockCircuitData<F, C, D> {
         // Here, we have two block proofs and we aggregate them together.
         // The block circuit is similar to the agg circuit; both verify two inner
         // proofs.
@@ -1486,7 +1511,7 @@ where
         }
     }
 
-    fn add_txn_agg_child(
+    fn add_batch_agg_child(
         builder: &mut CircuitBuilder<F, D>,
         segment_agg: &SegmentAggregationCircuitData<F, C, D>,
     ) -> AggregationChildTarget<D> {
@@ -1864,9 +1889,12 @@ where
         let root_proof = self.root.circuit.prove(root_inputs)?;
 
         Ok(ProverOutputData {
+            is_agg: false,
             is_dummy: false,
-            proof_with_pis: root_proof,
-            public_values: all_proof.public_values,
+            proof_with_pvs: ProofWithPublicValues {
+                public_values: all_proof.public_values,
+                intern: root_proof,
+            },
         })
     }
 
@@ -1967,7 +1995,7 @@ where
         all_proof: AllProof<F, C, D>,
         table_circuits: &[(RecursiveCircuitsForTableSize<F, C, D>, u8); NUM_TABLES],
         abort_signal: Option<Arc<AtomicBool>>,
-    ) -> anyhow::Result<(ProofWithPublicInputs<F, C, D>, PublicValues<F>)> {
+    ) -> anyhow::Result<ProofWithPublicValues<F, C, D>> {
         let mut root_inputs = PartialWitness::new();
 
         for table in 0..NUM_TABLES {
@@ -2002,20 +2030,23 @@ where
 
         let root_proof = self.root.circuit.prove(root_inputs)?;
 
-        Ok((root_proof, all_proof.public_values))
+        Ok(ProofWithPublicValues {
+            public_values: all_proof.public_values,
+            intern: root_proof,
+        })
     }
 
     pub fn verify_root(&self, agg_proof: ProofWithPublicInputs<F, C, D>) -> anyhow::Result<()> {
         self.root.circuit.verify(agg_proof)
     }
 
-    /// Create an aggregation proof, combining two contiguous proofs into a
-    /// single one. The combined proofs are segment proofs: they are proofs
-    /// of some parts of one execution.
+    /// Create a segment aggregation proof, combining two contiguous proofs into
+    /// a single one. The combined proofs are segment proofs: they are
+    /// proofs of some parts of one execution.
     ///
     /// While regular root proofs can only assert validity of a
     /// single segment of a transaction, segment aggregation proofs
-    /// can cover an arbitrary range, up to an entire transaction.
+    /// can cover an arbitrary range, up to an entire batch of transactions.
     ///
     /// # Arguments
     ///
@@ -2023,30 +2054,28 @@ where
     ///   aggregation proof or a regular segment proof.
     /// - `lhs_proof`: the left child prover output data.
     /// - `rhs_is_agg`: a boolean indicating whether the right child proof is an
-    ///   aggregation proof or a regular transaction proof.
+    ///   aggregation proof or a regular segment proof.
     /// - `rhs_proof`: the right child prover output data.
     ///
     /// # Outputs
     ///
-    /// This method outputs a [`ProverOutputData<F, C, D>`]. Only the proof with
-    /// public inputs is necessary for a verifier to assert correctness of
-    /// the computation, but the public values and `is_dummy` are output for the
-    /// prover convenience, as these are necessary during proof aggregation.
+    /// This method outputs a [`ProverOutputData<F, C, D>`]. Only the inner
+    /// proof with public inputs is necessary for a verifier to assert
+    /// correctness of the computation, but the public values and `is_dummy`
+    /// flag associated are necessary during batch aggregation.
     pub fn prove_segment_aggregation(
         &self,
-        lhs_is_agg: bool,
-        lhs_prover_output: &ProverOutputData<F, C, D>,
-        rhs_is_agg: bool,
-        rhs_prover_output: &ProverOutputData<F, C, D>,
+        lhs: &ProverOutputData<F, C, D>,
+        rhs: &ProverOutputData<F, C, D>,
     ) -> anyhow::Result<ProverOutputData<F, C, D>> {
         let mut agg_inputs = PartialWitness::new();
 
-        let lhs_proof = &lhs_prover_output.proof_with_pis;
-        let rhs_proof = &rhs_prover_output.proof_with_pis;
-        let rhs_is_dummy = rhs_prover_output.is_dummy;
+        let lhs_proof = &lhs.proof_with_pvs.intern;
+        let rhs_proof = &rhs.proof_with_pvs.intern;
+        let rhs_is_dummy = rhs.is_dummy;
         Self::set_dummy_if_necessary(
             &self.segment_aggregation.lhs,
-            lhs_is_agg,
+            lhs.is_agg,
             &self.segment_aggregation.circuit,
             &mut agg_inputs,
             lhs_proof,
@@ -2057,7 +2086,7 @@ where
 
         Self::set_dummy_if_necessary_with_dummy(
             &self.segment_aggregation.rhs,
-            rhs_is_agg,
+            rhs.is_agg,
             rhs_is_dummy,
             &self.segment_aggregation.circuit,
             &mut agg_inputs,
@@ -2070,9 +2099,8 @@ where
         );
 
         // Aggregates both `PublicValues` from the provided proofs into a single one.
-
-        let lhs_public_values = &lhs_prover_output.public_values;
-        let rhs_public_values = &rhs_prover_output.public_values;
+        let lhs_public_values = &lhs.proof_with_pvs.public_values;
+        let rhs_public_values = &rhs.proof_with_pvs.public_values;
 
         let real_public_values = if rhs_is_dummy {
             lhs_public_values.clone()
@@ -2115,9 +2143,12 @@ where
 
         let aggregation_proof = self.segment_aggregation.circuit.prove(agg_inputs)?;
         let agg_output = ProverOutputData {
+            is_agg: true,
             is_dummy: false,
-            proof_with_pis: aggregation_proof,
-            public_values: agg_public_values,
+            proof_with_pvs: ProofWithPublicValues {
+                public_values: agg_public_values,
+                intern: aggregation_proof,
+            },
         };
         Ok(agg_output)
     }
@@ -2134,92 +2165,94 @@ where
         )
     }
 
-    /// Creates a final transaction proof, once all segments of a given
-    /// transaction have been combined into a single aggregation proof.
+    /// Creates a final batch proof, once all segments of a given
+    /// transaction batch have been combined into a single aggregation proof.
     ///
-    /// Transaction proofs can either be generated as a standalone, or combined
-    /// with a previous transaction proof to assert validity of a range of
-    /// transactions.
+    /// Batch proofs can either be generated as standalone, or combined
+    /// with a previous batch proof to assert validity of a larger batch of
+    /// transactions, up to an entire block.
     ///
     /// # Arguments
     ///
-    /// - `opt_parent_txn_proof`: an optional parent transaction proof. Passing
-    ///   one will generate a proof of validity for both the transaction range
-    ///   covered by the previous proof and the current transaction.
-    /// - `agg_proof`: the final aggregation proof containing all segments
-    ///   within the current transaction.
-    /// - `public_values`: the public values associated to the aggregation
-    ///   proof.
+    /// - `lhs_is_agg`: a boolean indicating whether the left child proof is an
+    ///   aggregation proof or a regular batch proof.
+    /// - `lhs`: the left child proof along with its public values.
+    /// - `rhs_is_agg`: a boolean indicating whether the right child proof is an
+    ///   aggregation proof or a regular batch proof.
+    /// - `rhs`: the right child proof along with its public values.
     ///
     /// # Outputs
     ///
-    /// This method outputs a tuple of [`ProofWithPublicInputs<F, C, D>`] and
-    /// its [`PublicValues`]. Only the proof with public inputs is necessary
-    /// for a verifier to assert correctness of the computation.
+    /// This method outputs a [`ProofWithPublicValues<F, C, D>`].
+    /// Only the inner proof with public inputs is necessary for
+    /// a verifier to assert correctness of the computation.
     pub fn prove_batch_aggregation(
         &self,
         lhs_is_agg: bool,
-        lhs_proof: &ProofWithPublicInputs<F, C, D>,
-        lhs_public_values: PublicValues<F>,
+        lhs: &ProofWithPublicValues<F, C, D>,
         rhs_is_agg: bool,
-        rhs_proof: &ProofWithPublicInputs<F, C, D>,
-        rhs_public_values: PublicValues<F>,
-    ) -> anyhow::Result<(ProofWithPublicInputs<F, C, D>, PublicValues<F>)> {
-        let mut txn_inputs = PartialWitness::new();
+        rhs: &ProofWithPublicValues<F, C, D>,
+    ) -> anyhow::Result<ProofWithPublicValues<F, C, D>> {
+        let mut batch_inputs = PartialWitness::new();
 
         Self::set_dummy_if_necessary(
-            &self.txn_aggregation.lhs,
+            &self.batch_aggregation.lhs,
             lhs_is_agg,
-            &self.txn_aggregation.circuit,
-            &mut txn_inputs,
-            lhs_proof,
+            &self.batch_aggregation.circuit,
+            &mut batch_inputs,
+            &lhs.intern,
         );
 
         Self::set_dummy_if_necessary(
-            &self.txn_aggregation.rhs,
+            &self.batch_aggregation.rhs,
             rhs_is_agg,
-            &self.txn_aggregation.circuit,
-            &mut txn_inputs,
-            rhs_proof,
+            &self.batch_aggregation.circuit,
+            &mut batch_inputs,
+            &rhs.intern,
         );
 
-        txn_inputs.set_verifier_data_target(
-            &self.txn_aggregation.cyclic_vk,
-            &self.txn_aggregation.circuit.verifier_only,
+        batch_inputs.set_verifier_data_target(
+            &self.batch_aggregation.cyclic_vk,
+            &self.batch_aggregation.circuit.verifier_only,
         );
 
-        let txn_public_values = PublicValues {
-            trie_roots_before: lhs_public_values.trie_roots_before,
+        let lhs_pvs = &lhs.public_values;
+        let batch_public_values = PublicValues {
+            trie_roots_before: lhs_pvs.trie_roots_before.clone(),
             extra_block_data: ExtraBlockData {
-                txn_number_before: lhs_public_values.extra_block_data.txn_number_before,
-                gas_used_before: lhs_public_values.extra_block_data.gas_used_before,
-                ..rhs_public_values.extra_block_data
+                txn_number_before: lhs_pvs.extra_block_data.txn_number_before,
+                gas_used_before: lhs_pvs.extra_block_data.gas_used_before,
+                ..rhs.public_values.extra_block_data
             },
-            ..rhs_public_values
+            ..rhs.public_values.clone()
         };
 
         set_public_value_targets(
-            &mut txn_inputs,
-            &self.txn_aggregation.public_values,
-            &txn_public_values,
+            &mut batch_inputs,
+            &self.batch_aggregation.public_values,
+            &batch_public_values,
         )
         .map_err(|_| {
             anyhow::Error::msg("Invalid conversion when setting public values targets.")
         })?;
 
-        let txn_proof = self.txn_aggregation.circuit.prove(txn_inputs)?;
-        Ok((txn_proof, txn_public_values))
+        let batch_proof = self.batch_aggregation.circuit.prove(batch_inputs)?;
+
+        Ok(ProofWithPublicValues {
+            public_values: batch_public_values,
+            intern: batch_proof,
+        })
     }
 
-    pub fn verify_txn_aggregation(
+    pub fn verify_batch_aggregation(
         &self,
         txn_proof: &ProofWithPublicInputs<F, C, D>,
     ) -> anyhow::Result<()> {
-        self.txn_aggregation.circuit.verify(txn_proof.clone())?;
+        self.batch_aggregation.circuit.verify(txn_proof.clone())?;
         check_cyclic_proof_verifier_data(
             txn_proof,
-            &self.txn_aggregation.circuit.verifier_only,
-            &self.txn_aggregation.circuit.common,
+            &self.batch_aggregation.circuit.verifier_only,
+            &self.batch_aggregation.circuit.common,
         )
     }
 
@@ -2262,21 +2295,18 @@ where
     ///   will generate a proof of validity for both the block range covered by
     ///   the previous proof and the current block.
     /// - `agg_root_proof`: the final aggregation proof containing all
-    ///   transactions within the current block.
-    /// - `public_values`: the public values associated to the aggregation
-    ///   proof.
+    ///   transactions within the current block, along with its public values.
     ///
     /// # Outputs
     ///
-    /// This method outputs a tuple of [`ProofWithPublicInputs<F, C, D>`] and
-    /// its [`PublicValues`]. Only the proof with public inputs is necessary
-    /// for a verifier to assert correctness of the computation.
+    /// This method outputs a [`ProofWithPublicValues<F, C, D>`]. Only the inner
+    /// proof with public inputs is necessary for a verifier to assert
+    /// correctness of the computation.
     pub fn prove_block(
         &self,
         opt_parent_block_proof: Option<&ProofWithPublicInputs<F, C, D>>,
-        agg_root_proof: &ProofWithPublicInputs<F, C, D>,
-        public_values: PublicValues<F>,
-    ) -> anyhow::Result<(ProofWithPublicInputs<F, C, D>, PublicValues<F>)> {
+        agg_root_proof: &ProofWithPublicValues<F, C, D>,
+    ) -> anyhow::Result<ProofWithPublicValues<F, C, D>> {
         let mut block_inputs = PartialWitness::new();
 
         block_inputs.set_bool_target(
@@ -2287,13 +2317,19 @@ where
             block_inputs
                 .set_proof_with_pis_target(&self.block.parent_block_proof, parent_block_proof);
         } else {
-            if public_values.trie_roots_before.state_root
-                != public_values.extra_block_data.checkpoint_state_trie_root
+            if agg_root_proof.public_values.trie_roots_before.state_root
+                != agg_root_proof
+                    .public_values
+                    .extra_block_data
+                    .checkpoint_state_trie_root
             {
                 return Err(anyhow::Error::msg(format!(
                     "Inconsistent pre-state for first block {:?} with checkpoint state {:?}.",
-                    public_values.trie_roots_before.state_root,
-                    public_values.extra_block_data.checkpoint_state_trie_root,
+                    agg_root_proof.public_values.trie_roots_before.state_root,
+                    agg_root_proof
+                        .public_values
+                        .extra_block_data
+                        .checkpoint_state_trie_root,
                 )));
             }
 
@@ -2303,14 +2339,17 @@ where
 
             // Initialize the checkpoint block roots before, and state root after.
             let state_trie_root_before_keys = 0..TARGET_HASH_SIZE;
-            for (key, &value) in state_trie_root_before_keys
-                .zip_eq(&h256_limbs::<F>(public_values.trie_roots_before.state_root))
-            {
+            for (key, &value) in state_trie_root_before_keys.zip_eq(&h256_limbs::<F>(
+                agg_root_proof.public_values.trie_roots_before.state_root,
+            )) {
                 nonzero_pis.insert(key, value);
             }
             let txn_trie_root_before_keys = TARGET_HASH_SIZE..TARGET_HASH_SIZE * 2;
             for (key, &value) in txn_trie_root_before_keys.clone().zip_eq(&h256_limbs::<F>(
-                public_values.trie_roots_before.transactions_root,
+                agg_root_proof
+                    .public_values
+                    .trie_roots_before
+                    .transactions_root,
             )) {
                 nonzero_pis.insert(key, value);
             }
@@ -2318,16 +2357,16 @@ where
             for (key, &value) in receipts_trie_root_before_keys
                 .clone()
                 .zip_eq(&h256_limbs::<F>(
-                    public_values.trie_roots_before.receipts_root,
+                    agg_root_proof.public_values.trie_roots_before.receipts_root,
                 ))
             {
                 nonzero_pis.insert(key, value);
             }
             let state_trie_root_after_keys =
                 TrieRootsTarget::SIZE..TrieRootsTarget::SIZE + TARGET_HASH_SIZE;
-            for (key, &value) in state_trie_root_after_keys
-                .zip_eq(&h256_limbs::<F>(public_values.trie_roots_before.state_root))
-            {
+            for (key, &value) in state_trie_root_after_keys.zip_eq(&h256_limbs::<F>(
+                agg_root_proof.public_values.trie_roots_before.state_root,
+            )) {
                 nonzero_pis.insert(key, value);
             }
 
@@ -2341,7 +2380,8 @@ where
                 let burn_addr_keys =
                     TrieRootsTarget::SIZE * 2..TrieRootsTarget::SIZE * 2 + burn_addr_offset;
                 for (key, &value) in burn_addr_keys.zip_eq(&crate::util::u256_limbs(
-                    public_values
+                    agg_root_proof
+                        .public_values
                         .burn_addr
                         .expect("We should have a burn addr when cdk_erigon is activated"),
                 )) {
@@ -2359,7 +2399,10 @@ where
                     + BlockHashesTarget::SIZE
                     + 8;
             for (key, &value) in checkpoint_state_trie_keys.zip_eq(&h256_limbs::<F>(
-                public_values.extra_block_data.checkpoint_state_trie_root,
+                agg_root_proof
+                    .public_values
+                    .extra_block_data
+                    .checkpoint_state_trie_root,
             )) {
                 nonzero_pis.insert(key, value);
             }
@@ -2373,8 +2416,9 @@ where
                         + BlockMetadataTarget::SIZE
                         + BlockHashesTarget::SIZE
                         - 16;
-            for i in 0..public_values.block_hashes.prev_hashes.len() - 1 {
-                let targets = h256_limbs::<F>(public_values.block_hashes.prev_hashes[i]);
+            for i in 0..agg_root_proof.public_values.block_hashes.prev_hashes.len() - 1 {
+                let targets =
+                    h256_limbs::<F>(agg_root_proof.public_values.block_hashes.prev_hashes[i]);
                 for j in 0..8 {
                     nonzero_pis.insert(block_hashes_keys.start + 8 * (i + 1) + j, targets[j]);
                 }
@@ -2385,7 +2429,8 @@ where
                 + BlockMetadataTarget::SIZE
                 + BlockHashesTarget::SIZE
                 - 8;
-            let cur_targets = h256_limbs::<F>(public_values.block_hashes.prev_hashes[255]);
+            let cur_targets =
+                h256_limbs::<F>(agg_root_proof.public_values.block_hashes.prev_hashes[255]);
             for i in 0..8 {
                 nonzero_pis.insert(block_hashes_current_start + i, cur_targets[i]);
             }
@@ -2396,7 +2441,14 @@ where
             let block_number_key = burn_addr_offset + TrieRootsTarget::SIZE * 2 + 6;
             nonzero_pis.insert(
                 block_number_key,
-                F::from_canonical_u64(public_values.block_metadata.block_number.low_u64() - 1),
+                F::from_canonical_u64(
+                    agg_root_proof
+                        .public_values
+                        .block_metadata
+                        .block_number
+                        .low_u64()
+                        - 1,
+                ),
             );
 
             block_inputs.set_proof_with_pis_target(
@@ -2409,7 +2461,7 @@ where
             );
         }
 
-        block_inputs.set_proof_with_pis_target(&self.block.agg_root_proof, agg_root_proof);
+        block_inputs.set_proof_with_pis_target(&self.block.agg_root_proof, &agg_root_proof.intern);
 
         block_inputs
             .set_verifier_data_target(&self.block.cyclic_vk, &self.block.circuit.verifier_only);
@@ -2419,8 +2471,8 @@ where
         let block_public_values = PublicValues {
             trie_roots_before: opt_parent_block_proof
                 .map(|p| TrieRoots::from_public_inputs(&p.public_inputs[0..TrieRootsTarget::SIZE]))
-                .unwrap_or(public_values.trie_roots_before),
-            ..public_values
+                .unwrap_or(agg_root_proof.public_values.trie_roots_before.clone()),
+            ..agg_root_proof.public_values.clone()
         };
 
         set_public_value_targets(
@@ -2433,7 +2485,10 @@ where
         })?;
 
         let block_proof = self.block.circuit.prove(block_inputs)?;
-        Ok((block_proof, block_public_values))
+        Ok(ProofWithPublicValues {
+            public_values: block_public_values,
+            intern: block_proof,
+        })
     }
 
     pub fn verify_block(&self, block_proof: &ProofWithPublicInputs<F, C, D>) -> anyhow::Result<()> {
@@ -2450,9 +2505,8 @@ where
     ///
     /// # Arguments
     ///
-    /// - `block_proof`: the final block proof of a chain to be wrapped.
-    /// - `public_values`: the public values associated to the aggregation
-    ///   proof.
+    /// - `block_proof`: the final block proof of a chain to be wrapped, along
+    ///   with its public values.
     ///
     /// # Outputs
     ///
@@ -2462,8 +2516,7 @@ where
     #[allow(clippy::type_complexity)]
     pub fn prove_block_wrapper(
         &self,
-        block_proof: &ProofWithPublicInputs<F, C, D>,
-        public_values: PublicValues<F>,
+        block_proof: &ProofWithPublicValues<F, C, D>,
     ) -> anyhow::Result<(
         ProofWithPublicInputs<F, C, D>,
         FinalPublicValues<F, C::InnerHasher>,
@@ -2471,14 +2524,14 @@ where
         let mut block_wrapper_inputs = PartialWitness::new();
 
         block_wrapper_inputs
-            .set_proof_with_pis_target(&self.block_wrapper.parent_block_proof, block_proof);
+            .set_proof_with_pis_target(&self.block_wrapper.parent_block_proof, &block_proof.intern);
 
         block_wrapper_inputs.set_verifier_data_target(
             &self.block_wrapper.cyclic_vk, // dummy
             &self.block_wrapper.circuit.verifier_only,
         );
 
-        let final_pvs = public_values.into();
+        let final_pvs = block_proof.public_values.clone().into();
         set_final_public_value_targets(
             &mut block_wrapper_inputs,
             &self.block_wrapper.public_values,
@@ -2502,7 +2555,8 @@ where
             .verify(wrapped_block_proof.clone())
     }
 
-    /// Aggregates two proofs in manner similar to [`prove_aggregation`].
+    /// Aggregates two proofs in a manner similar to
+    /// [`AllRecursiveCircuits::prove_batch_aggregation`].
     ///
     /// # Arguments
     ///
diff --git a/evm_arithmetization/src/lib.rs b/evm_arithmetization/src/lib.rs
index 1a6a4a38e..41b7f093a 100644
--- a/evm_arithmetization/src/lib.rs
+++ b/evm_arithmetization/src/lib.rs
@@ -38,74 +38,123 @@
 //!
 //! Transactions need to be processed into an Intermediary Representation (IR)
 //! format for the prover to be able to generate proofs of valid state
-//! transition. This involves passing the encoded transaction, the header of the
-//! block in which it was included, some information on the state prior
-//! execution of this transaction, etc.
-//! This intermediary representation is called [`GenerationInputs`].
+//! transition. This involves passing the encoded transactions, the header of
+//! the block in which they were included, some information on the state prior
+//! execution of these transactions, etc.
+//! This intermediary representation is called [`GenerationInputs`], although
+//! the prover may sometimes rely on a trimmed version,
+//! [`TrimmedGenerationInputs`], if some initial data processing already
+//! happened.
 //!
 //!
 //! # Generating succinct proofs
 //!
-//! ## Transaction proofs
+//! ## Segment proofs
 //!
-//! To generate a proof for a transaction, given its [`GenerationInputs`] and an
-//! [`AllRecursiveCircuits`] prover state, one can simply call the
-//! [prove_root](AllRecursiveCircuits::prove_root) method.
+//! To generate a proof for a batch of transactions,
+//! given their [`GenerationInputs`] and an [`AllRecursiveCircuits`] prover
+//! state, we first break the execution down into consecutive segments, each
+//! representing a partial run of the zkCPU over these inputs. For this step,
+//! one must first generate the data needed for each of these segments by
+//! initializing a [`SegmentDataIterator`] from the inputs and an optional
+//! maximum segment length, and running it until exhaustion. One can then call
+//! the [prove_segment](AllRecursiveCircuits::prove_segment) method over each of
+//! these obtained segment data independently to generate individual segment
+//! proofs:
 //!
 //! ```ignore
+//! type F = GoldilocksField;
+//!
 //! let mut timing = TimingTree::new("prove", log::Level::Debug);
 //! let kill_signal = None; // Useful only with distributed proving to kill hanging jobs.
-//! let (proof, public_values) =
-//!     prover_state.prove_root(all_stark, config, inputs, &mut timing, kill_signal);
+//!
+//! // Collect the segment data needed to prove this batch.
+//! let data_iterator =
+//!     SegmentDataIterator::<Field>::new(inputs, Some(max_segment_log_length));
+//!
+//! // Prove all segments associated to this batch
+//! let mut segment_proof_data = vec![];
+//! for segment_run in data_iterator {
+//!     let (_, mut segment_data) = segment_run?;
+//!     segment_proof_data.push(
+//!         prover_state.prove_segment(
+//!             all_stark,
+//!             config,
+//!             inputs,
+//!             segment_data,
+//!             &mut timing,
+//!             kill_signal
+//!         )?
+//!     );
+//! }
 //! ```
 //!
-//! This outputs a transaction proof and its associated public values. These are
-//! necessary during the aggregation levels (see below). If one were to miss the
-//! public values, they are also retrievable directly from the proof's encoded
-//! public inputs, as such:
+//! The [prove_segment](AllRecursiveCircuits::prove_segment) method outputs a
+//! segment proof and its associated public values. Public values are also
+//! directly retrievable from the proof's encoded public inputs, as such:
 //!
 //! ```ignore
 //! let public_values = PublicValues::from_public_inputs(&proof.public_inputs);
 //! ```
 //!
-//! ## Aggregation proofs
+//! ## Segment aggregation proofs
 //!
-//! Because the plonky2 zkEVM generates proofs on a transaction basis, we then
-//! need to aggregate them for succinct verification. This is done in a binary
-//! tree fashion, where each inner node proof verifies two children proofs,
-//! through the [prove_aggregation](AllRecursiveCircuits::prove_aggregation)
+//! To improve parallelism and overall proving costs, segments of
+//! execution can be proven independently once their associated data have been
+//! generated, and are then aggregated together in a binary tree fashion,
+//! where each inner node proof verifies two children proofs, through the
+//! [prove_segment_aggregation](AllRecursiveCircuits::prove_segment_aggregation)
 //! method. Note that the tree does *not* need to be complete, as this
-//! aggregation process can take as inputs both regular transaction proofs and
-//! aggregation proofs. We only need to specify for each child if it is an
-//! aggregation proof or a regular one.
+//! aggregation process can take as inputs both simple segment proofs and
+//! aggregated segment proofs. We only need to specify for each child which
+//! type of proof it corresponds to.
 //!
 //! ```ignore
 //! let (proof_1, pv_1) =
-//!     prover_state.prove_root(all_stark, config, inputs_1, &mut timing, None);
+//!     prover_state.prove_segment(all_stark, config, inputs_1, &mut timing, None);
 //! let (proof_2, pv_2) =
-//!     prover_state.prove_root(all_stark, config, inputs_2, &mut timing, None);
+//!     prover_state.prove_segment(all_stark, config, inputs_2, &mut timing, None);
 //! let (proof_3, pv_3) =
-//!     prover_state.prove_root(all_stark, config, inputs_3, &mut timing, None);
+//!     prover_state.prove_segment(all_stark, config, inputs_3, &mut timing, None);
 //!
-//! // Now aggregate proofs for txn 1 and 2.
-//! let (agg_proof_1_2, pv_1_2) =
-//!     prover_state.prove_aggregation(false, proof_1, pv_1, false, proof_2, pv_2);
+//! // Now aggregate proofs for segments 1 and 2.
+//! let agg_proof_1_2 =
+//!     prover_state.prove_segment_aggregation(proof_1, proof_2);
 //!
-//! // Now aggregate the newly generated aggregation proof with the last regular txn proof.
-//! let (agg_proof_1_3, pv_1_3) =
-//!     prover_state.prove_aggregation(true, agg_proof_1_2, pv_1_2, false, proof_3, pv_3);
+//! // Now aggregate the newly generated aggregation proof with the last regular segment proof.
+//! let agg_proof_1_3 =
+//!     prover_state.prove_segment_aggregation(agg_proof_1_2, proof_3);
 //! ```
 //!
 //! **Note**: The proofs provided to the
-//! [prove_aggregation](AllRecursiveCircuits::prove_aggregation) method *MUST*
-//! have contiguous states. Trying to combine `proof_1` and `proof_3` from the
-//! example above would fail.
+//! [prove_segment_aggregation](AllRecursiveCircuits::prove_segment_aggregation)
+//! method *MUST* have contiguous states. Trying to combine `proof_1` and
+//! `proof_3` from the example above, or reverting the order of `agg_proof_1_2`
+//! and `proof_3`, would fail.
+//!
+//! ## Batch aggregation proofs
+//!
+//! In a similar manner to the previous stage, once an entire batch of
+//! transaction has been proven and reduced to a single segment aggregation
+//! proof, it can then be combined with other batch proofs or aggregated batch
+//! proofs, through the
+//! [prove_batch_aggregation](AllRecursiveCircuits::prove_batch_aggregation)
+//! method.
+//!
+//! ```ignore
+//! let batch_agg_proof =
+//!     prover_state.prove_batch_aggregation(false, batch_proof_1, false, batch_proof_2);
+//!
+//! // Now aggregate the newly generated batch aggregation proof with the last regular batch proof.
+//! let batch_agg_proof =
+//!     prover_state.prove_batch_aggregation(batch_agg_proof, batch_proof_3);
+//! ```
 //!
 //! ## Block proofs
 //!
 //! Once all transactions of a block have been proven and we are left with a
-//! single aggregation proof and its public values, we can then wrap it into a
-//! final block proof, attesting validity of the entire block.
+//! single aggregated batch proof and its public values, we can then wrap it
+//! into a final block proof, attesting validity of the entire block.
 //! This [prove_block](AllRecursiveCircuits::prove_block) method accepts an
 //! optional previous block proof as argument, which will then try combining the
 //! previously proven block with the current one, generating a validity proof
@@ -114,8 +163,8 @@
 //!
 //! ```ignore
 //! let previous_block_proof = { ... };
-//! let (block_proof, block_public_values) =
-//!     prover_state.prove_block(Some(&previous_block_proof), &agg_proof, agg_pv)?;
+//! let block_proof =
+//!     prover_state.prove_block(Some(&previous_block_proof), &agg_proof)?;
 //! ```
 //!
 //! ### Checkpoint heights
@@ -135,10 +184,31 @@
 //!
 //!
 //! ```ignore
-//! let (block_proof, block_public_values) =
-//!     prover_state.prove_block(None, &agg_proof, agg_pv)?;
+//! let block_proof =
+//!     prover_state.prove_block(None, &agg_proof)?;
 //! ```
 //!
+//! ## Wrapped block proofs
+//!
+//! Public values expose data useful for aggregating intermediate proofs
+//! together, but may not be disclosed to verifiers outside of the chain in
+//! their entirety. For this purpose, once a chain has aggregated sufficiently
+//! many blocks together and wants to ship the final generated proof, it may
+//! call the [prove_block_wrapper](AllRecursiveCircuits::prove_block_wrapper)
+//! method to obfuscate any non-required chain data. The remaining
+//! [FinalPublicValues](proof::FinalPublicValues) contain all the data
+//! needed to identify the chain and its claimed state transition between two
+//! checkpoint heights.
+//!
+//! ```ignore
+//! let (wrapped_block_proof, final_public_values) =
+//!     prover_state.prove_block_wrapper(&block_proof, public_values)?;
+//! ```
+//!
+//! **Note**: Despite its name, the method produces a [`plonky2`] proof, which
+//! may not be suitable for direct on-chain verification in a smart-contract,
+//! unlike pairing-based SNARK proofs.
+//!
 //! # Prover state serialization
 //!
 //! Because the recursive circuits only need to be generated once, they can be
diff --git a/evm_arithmetization/src/public_types.rs b/evm_arithmetization/src/public_types.rs
index 0b917317d..dd38d55fa 100644
--- a/evm_arithmetization/src/public_types.rs
+++ b/evm_arithmetization/src/public_types.rs
@@ -35,6 +35,14 @@ pub use crate::proof::EMPTY_CONSOLIDATED_BLOCKHASH;
 pub type ProofWithPublicInputs =
     plonky2::plonk::proof::ProofWithPublicInputs<Field, RecursionConfig, EXTENSION_DEGREE>;
 
+/// A type alias for recursive proofs generated by the zkEVM along with their
+/// public values.
+pub type ProofWithPublicValues = crate::fixed_recursive_verifier::ProofWithPublicValues<
+    Field,
+    RecursionConfig,
+    EXTENSION_DEGREE,
+>;
+
 /// A type alias for EVM public values used to generate and verify intermediate
 /// proofs.
 pub type PublicValues = crate::proof::PublicValues<Field>;
diff --git a/evm_arithmetization/tests/two_to_one_block.rs b/evm_arithmetization/tests/two_to_one_block.rs
index db4363ae6..378aaf3f8 100644
--- a/evm_arithmetization/tests/two_to_one_block.rs
+++ b/evm_arithmetization/tests/two_to_one_block.rs
@@ -115,43 +115,41 @@ fn get_test_block_proof(
     let dummy1_proof =
         all_circuits.prove_all_segments(all_stark, config, dummy1, 20, timing, None)?;
 
-    let inputs0_proof = all_circuits.prove_segment_aggregation(
-        false,
-        &dummy0_proof0[0],
-        false,
-        &dummy0_proof0[1],
-    )?;
+    let inputs0_proof =
+        all_circuits.prove_segment_aggregation(&dummy0_proof0[0], &dummy0_proof0[1])?;
     let dummy0_proof =
-        all_circuits.prove_segment_aggregation(false, &dummy1_proof[0], false, &dummy1_proof[1])?;
+        all_circuits.prove_segment_aggregation(&dummy1_proof[0], &dummy1_proof[1])?;
 
-    let (agg_proof, pv) = all_circuits.prove_batch_aggregation(
+    let batch_proof = all_circuits.prove_batch_aggregation(
         false,
-        &inputs0_proof.proof_with_pis,
-        inputs0_proof.public_values,
+        &inputs0_proof.proof_with_pvs,
         false,
-        &dummy0_proof.proof_with_pis,
-        dummy0_proof.public_values,
+        &dummy0_proof.proof_with_pvs,
     )?;
 
-    all_circuits.verify_txn_aggregation(&agg_proof)?;
+    all_circuits.verify_batch_aggregation(&batch_proof.intern)?;
 
     // Test retrieved public values from the proof public inputs.
-    let retrieved_public_values = PublicValues::from_public_inputs(&agg_proof.public_inputs);
-    assert_eq!(retrieved_public_values, pv);
+    let retrieved_public_values =
+        PublicValues::from_public_inputs(&batch_proof.intern.public_inputs);
+    assert_eq!(retrieved_public_values, batch_proof.public_values);
     assert_eq!(
-        pv.trie_roots_before.state_root,
-        pv.extra_block_data.checkpoint_state_trie_root
+        batch_proof.public_values.trie_roots_before.state_root,
+        batch_proof
+            .public_values
+            .extra_block_data
+            .checkpoint_state_trie_root
     );
 
-    let (block_proof, block_public_values) = all_circuits.prove_block(
+    let block_proof = all_circuits.prove_block(
         None, // We don't specify a previous proof, considering block 1 as the new checkpoint.
-        &agg_proof, pv,
+        &batch_proof,
     )?;
 
-    all_circuits.verify_block(&block_proof)?;
+    all_circuits.verify_block(&block_proof.intern)?;
 
     let (wrapped_block_proof, block_final_public_values) =
-        all_circuits.prove_block_wrapper(&block_proof, block_public_values)?;
+        all_circuits.prove_block_wrapper(&block_proof)?;
 
     // Test retrieved final public values from the proof public inputs.
     let retrieved_final_public_values =
diff --git a/smt_trie/src/smt.rs b/smt_trie/src/smt.rs
index 356c8c3e9..f9ea73319 100644
--- a/smt_trie/src/smt.rs
+++ b/smt_trie/src/smt.rs
@@ -161,7 +161,7 @@ impl<D: Db> Smt<D> {
 
     /// Set the value associated with the key in the SMT.
     /// If the value is 0 and the key is in the SMT, the key is removed from the
-    /// SMT. Reference implementation in https://github.com/0xPolygonHermez/zkevm-commonjs/blob/main/src/smt.js.
+    /// SMT. Reference implementation in <https://github.com/0xPolygonHermez/zkevm-commonjs/blob/main/src/smt.js>.
     pub fn set(&mut self, key: Key, value: U256) {
         if value.is_zero() {
             self.kv_store.remove(&key);
diff --git a/trace_decoder/src/core.rs b/trace_decoder/src/core.rs
index d96778dec..31f065b3b 100644
--- a/trace_decoder/src/core.rs
+++ b/trace_decoder/src/core.rs
@@ -27,7 +27,7 @@ use crate::{
     TxnInfo, TxnMeta, TxnTrace,
 };
 
-/// TODO(0xaatif): document this after https://github.com/0xPolygonZero/zk_evm/issues/275
+/// TODO(0xaatif): document this after <https://github.com/0xPolygonZero/zk_evm/issues/275>
 pub fn entrypoint(
     trace: BlockTrace,
     other: OtherBlockData,
diff --git a/zero/src/ops.rs b/zero/src/ops.rs
index cc180c7e5..053855e60 100644
--- a/zero/src/ops.rs
+++ b/zero/src/ops.rs
@@ -5,7 +5,7 @@ use std::time::Instant;
 use anyhow::anyhow;
 use evm_arithmetization::fixed_recursive_verifier::ProverOutputData;
 use evm_arithmetization::{prover::testing::simulate_execution_all_segments, GenerationInputs};
-use evm_arithmetization::{Field, PublicValues, TrimmedGenerationInputs};
+use evm_arithmetization::{Field, ProofWithPublicValues, PublicValues, TrimmedGenerationInputs};
 use paladin::{
     operation::{FatalError, FatalStrategy, Monoid, Operation, Result},
     registry, RemoteExecute,
@@ -15,10 +15,7 @@ use tracing::error;
 use tracing::{event, info_span, Level};
 
 use crate::debug_utils::save_tries_to_disk;
-use crate::proof_types::{
-    BatchAggregatableProof, GeneratedBlockProof, GeneratedSegmentAggProof, GeneratedTxnAggProof,
-    SegmentAggregatableProof,
-};
+use crate::proof_types::{BatchAggregatableProof, GeneratedBlockProof, SegmentAggregatableProof};
 use crate::prover_state::ProverState;
 use crate::{debug_utils::save_inputs_to_disk, prover_state::p_state};
 
@@ -31,7 +28,7 @@ pub struct SegmentProof {
 
 impl Operation for SegmentProof {
     type Input = evm_arithmetization::AllData;
-    type Output = crate::proof_types::SegmentAggregatableProof;
+    type Output = SegmentAggregatableProof;
 
     fn execute(&self, all_data: Self::Input) -> Result<Self::Output> {
         let all_data =
@@ -65,7 +62,7 @@ impl Operation for SegmentProof {
                 .map_err(|e| FatalError::from_str(&e.to_string(), FatalStrategy::Terminate))?
         };
 
-        Ok(proof.into())
+        Ok(SegmentAggregatableProof::Segment(proof))
     }
 }
 
@@ -217,8 +214,8 @@ pub struct SegmentAggProof {
 
 fn get_seg_agg_proof_public_values(elem: SegmentAggregatableProof) -> PublicValues {
     match elem {
-        SegmentAggregatableProof::Seg(info) => info.p_vals,
-        SegmentAggregatableProof::Agg(info) => info.p_vals,
+        SegmentAggregatableProof::Segment(info) => info.public_values,
+        SegmentAggregatableProof::Agg(info) => info.public_values,
     }
 }
 
@@ -227,14 +224,14 @@ fn get_seg_agg_proof_public_values(elem: SegmentAggregatableProof) -> PublicValu
 /// Note that the child proofs may be either transaction or aggregation proofs.
 ///
 /// If a transaction only contains a single segment, this function must still be
-/// called to generate a `GeneratedSegmentAggProof`. In that case, you can set
+/// called to generate a `ProofWithPublicValues`. In that case, you can set
 /// `has_dummy` to `true`, and provide an arbitrary proof for the right child.
 pub fn generate_segment_agg_proof(
     p_state: &ProverState,
     lhs_child: &SegmentAggregatableProof,
     rhs_child: &SegmentAggregatableProof,
     has_dummy: bool,
-) -> anyhow::Result<GeneratedSegmentAggProof> {
+) -> anyhow::Result<ProofWithPublicValues> {
     if has_dummy {
         assert!(
             !lhs_child.is_agg(),
@@ -243,33 +240,27 @@ pub fn generate_segment_agg_proof(
     }
 
     let lhs_prover_output_data = ProverOutputData {
+        is_agg: lhs_child.is_agg(),
         is_dummy: false,
-        proof_with_pis: lhs_child.intern().clone(),
-        public_values: lhs_child.public_values(),
+        proof_with_pvs: lhs_child.proof_with_pvs(),
     };
     let rhs_prover_output_data = ProverOutputData {
+        is_agg: rhs_child.is_agg(),
         is_dummy: has_dummy,
-        proof_with_pis: rhs_child.intern().clone(),
-        public_values: rhs_child.public_values(),
+        proof_with_pvs: rhs_child.proof_with_pvs(),
     };
-    let agg_output_data = p_state.state.prove_segment_aggregation(
-        lhs_child.is_agg(),
-        &lhs_prover_output_data,
-        rhs_child.is_agg(),
-        &rhs_prover_output_data,
-    )?;
-
-    let p_vals = agg_output_data.public_values;
-    let intern = agg_output_data.proof_with_pis;
+    let agg_output_data = p_state
+        .state
+        .prove_segment_aggregation(&lhs_prover_output_data, &rhs_prover_output_data)?;
 
-    Ok(GeneratedSegmentAggProof { p_vals, intern })
+    Ok(agg_output_data.proof_with_pvs)
 }
 
 impl Monoid for SegmentAggProof {
     type Elem = SegmentAggregatableProof;
 
     fn combine(&self, a: Self::Elem, b: Self::Elem) -> Result<Self::Elem> {
-        let result = generate_segment_agg_proof(p_state(), &a, &b, false).map_err(|e| {
+        let proof = generate_segment_agg_proof(p_state(), &a, &b, false).map_err(|e| {
             if self.save_inputs_on_error {
                 let pv = vec![
                     get_seg_agg_proof_public_values(a),
@@ -289,7 +280,7 @@ impl Monoid for SegmentAggProof {
             FatalError::from_str(&e.to_string(), FatalStrategy::Terminate)
         })?;
 
-        Ok(result.into())
+        Ok(SegmentAggregatableProof::Agg(proof))
     }
 
     fn empty(&self) -> Self::Elem {
@@ -304,9 +295,9 @@ pub struct BatchAggProof {
 }
 fn get_agg_proof_public_values(elem: BatchAggregatableProof) -> PublicValues {
     match elem {
-        BatchAggregatableProof::Segment(info) => info.p_vals,
-        BatchAggregatableProof::Txn(info) => info.p_vals,
-        BatchAggregatableProof::Agg(info) => info.p_vals,
+        BatchAggregatableProof::Segment(info) => info.public_values,
+        BatchAggregatableProof::SegmentAgg(info) => info.public_values,
+        BatchAggregatableProof::BatchAgg(info) => info.public_values,
     }
 }
 
@@ -315,11 +306,11 @@ impl Monoid for BatchAggProof {
 
     fn combine(&self, a: Self::Elem, b: Self::Elem) -> Result<Self::Elem> {
         let lhs = match a {
-            BatchAggregatableProof::Segment(segment) => BatchAggregatableProof::from(
+            BatchAggregatableProof::Segment(segment) => BatchAggregatableProof::SegmentAgg(
                 generate_segment_agg_proof(
                     p_state(),
-                    &SegmentAggregatableProof::from(segment.clone()),
-                    &SegmentAggregatableProof::from(segment),
+                    &SegmentAggregatableProof::Segment(segment.clone()),
+                    &SegmentAggregatableProof::Segment(segment),
                     true,
                 )
                 .map_err(|e| FatalError::from_str(&e.to_string(), FatalStrategy::Terminate))?,
@@ -328,11 +319,11 @@ impl Monoid for BatchAggProof {
         };
 
         let rhs = match b {
-            BatchAggregatableProof::Segment(segment) => BatchAggregatableProof::from(
+            BatchAggregatableProof::Segment(segment) => BatchAggregatableProof::SegmentAgg(
                 generate_segment_agg_proof(
                     p_state(),
-                    &SegmentAggregatableProof::from(segment.clone()),
-                    &SegmentAggregatableProof::from(segment),
+                    &SegmentAggregatableProof::Segment(segment.clone()),
+                    &SegmentAggregatableProof::Segment(segment),
                     true,
                 )
                 .map_err(|e| FatalError::from_str(&e.to_string(), FatalStrategy::Terminate))?,
@@ -340,15 +331,13 @@ impl Monoid for BatchAggProof {
             _ => b,
         };
 
-        let (proof, p_vals) = p_state()
+        let proof = p_state()
             .state
             .prove_batch_aggregation(
                 lhs.is_agg(),
-                lhs.intern(),
-                lhs.public_values(),
+                lhs.proof_with_pvs(),
                 rhs.is_agg(),
-                rhs.intern(),
-                rhs.public_values(),
+                rhs.proof_with_pvs(),
             )
             .map_err(|e| {
                 if self.save_inputs_on_error {
@@ -370,11 +359,7 @@ impl Monoid for BatchAggProof {
                 FatalError::from_str(&e.to_string(), FatalStrategy::Terminate)
             })?;
 
-        Ok(GeneratedTxnAggProof {
-            p_vals,
-            intern: proof,
-        }
-        .into())
+        Ok(BatchAggregatableProof::BatchAgg(proof))
     }
 
     fn empty(&self) -> Self::Elem {
@@ -390,24 +375,24 @@ pub struct BlockProof {
 }
 
 impl Operation for BlockProof {
-    type Input = GeneratedTxnAggProof;
+    type Input = ProofWithPublicValues;
     type Output = GeneratedBlockProof;
 
     fn execute(&self, input: Self::Input) -> Result<Self::Output> {
-        let b_height = input.p_vals.block_metadata.block_number.low_u64();
+        let b_height = input.public_values.block_metadata.block_number.low_u64();
         let parent_intern = self.prev.as_ref().map(|p| &p.intern);
 
-        let (b_proof_intern, _) = p_state()
+        let block_proof = p_state()
             .state
-            .prove_block(parent_intern, &input.intern, input.p_vals.clone())
+            .prove_block(parent_intern, &input)
             .map_err(|e| {
                 if self.save_inputs_on_error {
                     if let Err(write_err) = save_inputs_to_disk(
                         format!(
                             "b{}_block_input.json",
-                            input.p_vals.block_metadata.block_number
+                            input.public_values.block_metadata.block_number
                         ),
-                        input.p_vals,
+                        input.public_values,
                     ) {
                         error!("Failed to save block proof input to disk: {:?}", write_err);
                     }
@@ -418,7 +403,7 @@ impl Operation for BlockProof {
 
         Ok(GeneratedBlockProof {
             b_height,
-            intern: b_proof_intern,
+            intern: block_proof.intern,
         })
     }
 }
diff --git a/zero/src/proof_types.rs b/zero/src/proof_types.rs
index 55ab492d0..2cf16b716 100644
--- a/zero/src/proof_types.rs
+++ b/zero/src/proof_types.rs
@@ -3,47 +3,11 @@
 
 use evm_arithmetization::{
     fixed_recursive_verifier::{extract_block_final_public_values, extract_two_to_one_block_hash},
-    BlockHeight, Hash, Hasher, ProofWithPublicInputs, PublicValues,
+    BlockHeight, Hash, Hasher, ProofWithPublicInputs, ProofWithPublicValues,
 };
 use plonky2::plonk::config::Hasher as _;
 use serde::{Deserialize, Serialize};
 
-/// A transaction proof along with its public values, for proper connection with
-/// contiguous proofs.
-#[derive(Clone, Debug, Deserialize, Serialize)]
-pub struct GeneratedSegmentProof {
-    /// Public values of this transaction proof.
-    pub p_vals: PublicValues,
-    /// Underlying plonky2 proof.
-    pub intern: ProofWithPublicInputs,
-}
-
-/// A segment aggregation proof along with its public values, for proper
-/// connection with contiguous proofs.
-///
-/// Aggregation proofs can represent any contiguous range of two or more
-/// segments, up to an entire transaction.
-#[derive(Clone, Debug, Deserialize, Serialize)]
-pub struct GeneratedSegmentAggProof {
-    /// Public values of this aggregation proof.
-    pub p_vals: PublicValues,
-    /// Underlying plonky2 proof.
-    pub intern: ProofWithPublicInputs,
-}
-
-/// A transaction aggregation proof along with its public values, for proper
-/// connection with contiguous proofs.
-///
-/// Transaction agregation proofs can represent any contiguous range of two or
-/// more transactions, up to an entire block.
-#[derive(Clone, Debug, Deserialize, Serialize)]
-pub struct GeneratedTxnAggProof {
-    /// Public values of this transaction aggregation proof.
-    pub p_vals: PublicValues,
-    /// Underlying plonky2 proof.
-    pub intern: ProofWithPublicInputs,
-}
-
 /// A block proof along with the block height against which this proof ensures
 /// the validity since the last proof checkpoint.
 #[derive(Clone, Debug, Deserialize, Serialize)]
@@ -71,9 +35,9 @@ pub struct GeneratedAggBlockProof {
 #[derive(Clone, Debug, Deserialize, Serialize)]
 pub enum SegmentAggregatableProof {
     /// The underlying proof is a segment proof.
-    Seg(GeneratedSegmentProof),
-    /// The underlying proof is an aggregation proof.
-    Agg(GeneratedSegmentAggProof),
+    Segment(ProofWithPublicValues),
+    /// The underlying proof is an aggregated segment proof.
+    Agg(ProofWithPublicValues),
 }
 
 /// Sometimes we don't care about the underlying proof type and instead only if
@@ -83,91 +47,52 @@ pub enum SegmentAggregatableProof {
 pub enum BatchAggregatableProof {
     /// The underlying proof is a segment proof. It first needs to be aggregated
     /// with another segment proof, or a dummy one.
-    Segment(GeneratedSegmentProof),
-    /// The underlying proof is a transaction proof.
-    Txn(GeneratedSegmentAggProof),
-    /// The underlying proof is an aggregation proof.
-    Agg(GeneratedTxnAggProof),
+    Segment(ProofWithPublicValues),
+    /// The underlying proof is an aggregated segment proof.
+    SegmentAgg(ProofWithPublicValues),
+    /// The underlying proof is an aggregated batch proof.
+    BatchAgg(ProofWithPublicValues),
 }
 
 impl SegmentAggregatableProof {
-    pub(crate) fn public_values(&self) -> PublicValues {
+    pub(crate) fn proof_with_pvs(&self) -> ProofWithPublicValues {
         match self {
-            SegmentAggregatableProof::Seg(info) => info.p_vals.clone(),
-            SegmentAggregatableProof::Agg(info) => info.p_vals.clone(),
+            SegmentAggregatableProof::Segment(info) => info.clone(),
+            SegmentAggregatableProof::Agg(info) => info.clone(),
         }
     }
 
     pub(crate) const fn is_agg(&self) -> bool {
         match self {
-            SegmentAggregatableProof::Seg(_) => false,
+            SegmentAggregatableProof::Segment(_) => false,
             SegmentAggregatableProof::Agg(_) => true,
         }
     }
-
-    pub(crate) const fn intern(&self) -> &ProofWithPublicInputs {
-        match self {
-            SegmentAggregatableProof::Seg(info) => &info.intern,
-            SegmentAggregatableProof::Agg(info) => &info.intern,
-        }
-    }
 }
 
 impl BatchAggregatableProof {
-    pub(crate) fn public_values(&self) -> PublicValues {
+    pub(crate) fn proof_with_pvs(&self) -> &ProofWithPublicValues {
         match self {
-            BatchAggregatableProof::Segment(info) => info.p_vals.clone(),
-            BatchAggregatableProof::Txn(info) => info.p_vals.clone(),
-            BatchAggregatableProof::Agg(info) => info.p_vals.clone(),
+            BatchAggregatableProof::Segment(info) => info,
+            BatchAggregatableProof::SegmentAgg(info) => info,
+            BatchAggregatableProof::BatchAgg(info) => info,
         }
     }
 
     pub(crate) const fn is_agg(&self) -> bool {
         match self {
             BatchAggregatableProof::Segment(_) => false,
-            BatchAggregatableProof::Txn(_) => false,
-            BatchAggregatableProof::Agg(_) => true,
-        }
-    }
-
-    pub(crate) const fn intern(&self) -> &ProofWithPublicInputs {
-        match self {
-            BatchAggregatableProof::Segment(info) => &info.intern,
-            BatchAggregatableProof::Txn(info) => &info.intern,
-            BatchAggregatableProof::Agg(info) => &info.intern,
+            BatchAggregatableProof::SegmentAgg(_) => false,
+            BatchAggregatableProof::BatchAgg(_) => true,
         }
     }
 }
 
-impl From<GeneratedSegmentProof> for SegmentAggregatableProof {
-    fn from(v: GeneratedSegmentProof) -> Self {
-        Self::Seg(v)
-    }
-}
-
-impl From<GeneratedSegmentAggProof> for SegmentAggregatableProof {
-    fn from(v: GeneratedSegmentAggProof) -> Self {
-        Self::Agg(v)
-    }
-}
-
-impl From<GeneratedSegmentAggProof> for BatchAggregatableProof {
-    fn from(v: GeneratedSegmentAggProof) -> Self {
-        Self::Txn(v)
-    }
-}
-
-impl From<GeneratedTxnAggProof> for BatchAggregatableProof {
-    fn from(v: GeneratedTxnAggProof) -> Self {
-        Self::Agg(v)
-    }
-}
-
 impl From<SegmentAggregatableProof> for BatchAggregatableProof {
     fn from(v: SegmentAggregatableProof) -> Self {
         match v {
-            SegmentAggregatableProof::Agg(agg) => BatchAggregatableProof::Txn(agg),
-            SegmentAggregatableProof::Seg(seg) => BatchAggregatableProof::Segment(seg),
+            SegmentAggregatableProof::Agg(agg) => BatchAggregatableProof::SegmentAgg(agg),
+            SegmentAggregatableProof::Segment(seg) => BatchAggregatableProof::Segment(seg),
         }
     }
 }
diff --git a/zero/src/prover.rs b/zero/src/prover.rs
index 665d4f828..6a194ddf1 100644
--- a/zero/src/prover.rs
+++ b/zero/src/prover.rs
@@ -129,7 +129,7 @@ impl BlockProverInput {
                 .run(&runtime)
                 .await?;
 
-        if let crate::proof_types::BatchAggregatableProof::Agg(proof) = final_batch_proof {
+        if let crate::proof_types::BatchAggregatableProof::BatchAgg(proof) = final_batch_proof {
             let block_number = block_number
                 .to_u64()
                 .context("block number overflows u64")?;
diff --git a/zero/src/prover_state/mod.rs b/zero/src/prover_state/mod.rs
index 5dc34a53d..edbad02ba 100644
--- a/zero/src/prover_state/mod.rs
+++ b/zero/src/prover_state/mod.rs
@@ -1,16 +1,16 @@
 //! Global prover state management and utilities.
 //!
 //! This module provides the following:
-//! - [`Circuit`] and [`CircuitConfig`] which can be used to dynamically
-//!   construct [`evm_arithmetization::fixed_recursive_verifier::AllRecursiveCircuits`]
-//!   from the specified circuit sizes.
+//! - [`ProverState`] and [`CircuitConfig`] which can be used to dynamically
+//!   construct [`evm_arithmetization::AllRecursiveCircuits`] from the specified
+//!   circuit sizes.
 //! - Command line arguments for constructing a [`CircuitConfig`].
 //!     - Provides default values for the circuit sizes.
 //!     - Allows the circuit sizes to be specified via environment variables.
 //! - Persistence utilities for saving and loading
-//!   [`evm_arithmetization::fixed_recursive_verifier::AllRecursiveCircuits`].
-//! - Global prover state management via the [`P_STATE`] static and the
-//!   [`set_prover_state_from_config`] function.
+//!   [`evm_arithmetization::AllRecursiveCircuits`].
+//! - Global prover state management via the `P_STATE` static and the
+//!   [`p_state`] function.
 use std::borrow::Borrow;
 use std::{fmt::Display, sync::OnceLock};
 
@@ -20,13 +20,12 @@ use evm_arithmetization::{
     AllStark, GenerationSegmentData, RecursiveCircuitsForTableSize, StarkConfig,
     TrimmedGenerationInputs,
 };
-use evm_arithmetization::{ProofWithPublicInputs, VerifierData};
+use evm_arithmetization::{ProofWithPublicInputs, ProofWithPublicValues, VerifierData};
 use plonky2::recursion::cyclic_recursion::check_cyclic_proof_verifier_data;
 use plonky2::util::timing::TimingTree;
 use tracing::info;
 
 use self::circuit::{CircuitConfig, NUM_TABLES};
-use crate::proof_types::GeneratedSegmentProof;
 use crate::prover_state::persistence::{
     BaseProverResource, DiskResource, MonolithicProverResource, RecursiveCircuitResource,
     VerifierResource,
@@ -223,7 +222,7 @@ impl ProverStateManager {
         &self,
         input: TrimmedGenerationInputs,
         segment_data: &mut GenerationSegmentData,
-    ) -> anyhow::Result<GeneratedSegmentProof> {
+    ) -> anyhow::Result<ProofWithPublicValues> {
         let config = StarkConfig::standard_fast_config();
         let all_stark = AllStark::default();
 
@@ -238,12 +237,12 @@ impl ProverStateManager {
 
         let table_circuits = self.load_table_circuits(&config, &all_proof)?;
 
-        let (intern, p_vals) =
+        let proof_with_pvs =
             p_state()
                 .state
                 .prove_segment_after_initial_stark(all_proof, &table_circuits, None)?;
 
-        Ok(GeneratedSegmentProof { p_vals, intern })
+        Ok(proof_with_pvs)
     }
 
     /// Generate a segment proof using the specified input on the monolithic
@@ -252,7 +251,7 @@ impl ProverStateManager {
         &self,
         input: TrimmedGenerationInputs,
         segment_data: &mut GenerationSegmentData,
-    ) -> anyhow::Result<GeneratedSegmentProof> {
+    ) -> anyhow::Result<ProofWithPublicValues> {
         let p_out = p_state().state.prove_segment(
             &AllStark::default(),
             &StarkConfig::standard_fast_config(),
@@ -263,12 +262,12 @@ impl ProverStateManager {
         )?;
 
         let ProverOutputData {
+            is_agg: _,
             is_dummy: _,
-            proof_with_pis: intern,
-            public_values: p_vals,
+            proof_with_pvs,
         } = p_out;
 
-        Ok(GeneratedSegmentProof { p_vals, intern })
+        Ok(proof_with_pvs)
     }
 
     /// Generate a segment proof using the specified input.
@@ -283,7 +282,7 @@ impl ProverStateManager {
     pub fn generate_segment_proof(
         &self,
         input: (TrimmedGenerationInputs, GenerationSegmentData),
-    ) -> anyhow::Result<GeneratedSegmentProof> {
+    ) -> anyhow::Result<ProofWithPublicValues> {
         let (generation_inputs, mut segment_data) = input;
 
         match self.persistence {