Skip to content
Merged
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Added

- Add disaster recovery for sequencer
- Catch up possible DA-only blocks when restarting. [#3057](https://github.com/evstack/ev-node/pull/3057)
- Verify DA and P2P state on restart (prevent double-signing). [#3061](https://github.com/evstack/ev-node/pull/3061)
- Node pruning support. [#2984](https://github.com/evstack/ev-node/pull/2984)
- Two different sort of pruning implemented:
_Classic pruning_ (`all`): prunes given `HEAD-n` blocks from the databases, including store metadatas.
Expand All @@ -21,6 +24,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changes

- Store pending blocks separately from executed blocks key. [#3073](https://github.com/evstack/ev-node/pull/3073)
- Fixes issues with force inclusion verification on sync nodes. [#3057](https://github.com/evstack/ev-node/pull/3057)
- Add flag to `local-da` to produce empty DA blocks (closer to the real system). [#3057](https://github.com/evstack/ev-node/pull/3057)

## v1.0.0-rc.4

Expand Down
4 changes: 4 additions & 0 deletions apps/evm/server/force_inclusion_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@ func (m *mockDA) HasForcedInclusionNamespace() bool {
return true
}

func (m *mockDA) GetLatestDAHeight(_ context.Context) (uint64, error) {
return 0, nil
}

func TestForceInclusionServer_handleSendRawTransaction_Success(t *testing.T) {
testHeight := uint64(100)

Expand Down
4 changes: 2 additions & 2 deletions block/internal/da/async_block_retriever.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ func (f *asyncBlockRetriever) GetCachedBlock(ctx context.Context, daHeight uint6

block := &BlockData{
Height: pbBlock.Height,
Timestamp: time.Unix(pbBlock.Timestamp, 0).UTC(),
Timestamp: time.Unix(0, pbBlock.Timestamp).UTC(),
Blobs: pbBlock.Blobs,
}

Expand Down Expand Up @@ -261,7 +261,7 @@ func (f *asyncBlockRetriever) fetchAndCacheBlock(height uint64) {
// Serialize and cache the block
pbBlock := &pb.BlockData{
Height: block.Height,
Timestamp: block.Timestamp.Unix(),
Timestamp: block.Timestamp.UnixNano(),
Blobs: block.Blobs,
}
data, err := proto.Marshal(pbBlock)
Expand Down
12 changes: 6 additions & 6 deletions block/internal/da/async_block_retriever_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ func TestAsyncBlockRetriever_StopGracefully(t *testing.T) {
func TestBlockData_Serialization(t *testing.T) {
block := &BlockData{
Height: 100,
Timestamp: time.Unix(12345, 0).UTC(),
Timestamp: time.Unix(12345, 123456789).UTC(),
Blobs: [][]byte{
[]byte("blob1"),
[]byte("blob2"),
Expand All @@ -212,7 +212,7 @@ func TestBlockData_Serialization(t *testing.T) {
// Serialize using protobuf
pbBlock := &pb.BlockData{
Height: block.Height,
Timestamp: block.Timestamp.Unix(),
Timestamp: block.Timestamp.UnixNano(),
Blobs: block.Blobs,
}
data, err := proto.Marshal(pbBlock)
Expand All @@ -226,11 +226,11 @@ func TestBlockData_Serialization(t *testing.T) {

decoded := &BlockData{
Height: decodedPb.Height,
Timestamp: time.Unix(decodedPb.Timestamp, 0).UTC(),
Timestamp: time.Unix(0, decodedPb.Timestamp).UTC(),
Blobs: decodedPb.Blobs,
}

assert.Equal(t, block.Timestamp.Unix(), decoded.Timestamp.Unix())
assert.Equal(t, block.Timestamp.UnixNano(), decoded.Timestamp.UnixNano())
assert.Equal(t, block.Height, decoded.Height)
assert.Equal(t, len(block.Blobs), len(decoded.Blobs))
for i := range block.Blobs {
Expand All @@ -248,7 +248,7 @@ func TestBlockData_SerializationEmpty(t *testing.T) {
// Serialize using protobuf
pbBlock := &pb.BlockData{
Height: block.Height,
Timestamp: block.Timestamp.Unix(),
Timestamp: block.Timestamp.UnixNano(),
Blobs: block.Blobs,
}
data, err := proto.Marshal(pbBlock)
Expand All @@ -261,7 +261,7 @@ func TestBlockData_SerializationEmpty(t *testing.T) {

decoded := &BlockData{
Height: decodedPb.Height,
Timestamp: time.Unix(decodedPb.Timestamp, 0).UTC(),
Timestamp: time.Unix(0, decodedPb.Timestamp).UTC(),
Blobs: decodedPb.Blobs,
}

Expand Down
17 changes: 17 additions & 0 deletions block/internal/da/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,23 @@ func (c *client) Retrieve(ctx context.Context, height uint64, namespace []byte)
}
}

// GetLatestDAHeight returns the latest height available on the DA layer by
// querying the network head.
func (c *client) GetLatestDAHeight(ctx context.Context) (uint64, error) {
headCtx, cancel := context.WithTimeout(ctx, c.defaultTimeout)
defer cancel()

header, err := c.headerAPI.NetworkHead(headCtx)
if err != nil {
return 0, fmt.Errorf("failed to get DA network head: %w", err)
}
if header == nil {
return 0, fmt.Errorf("DA network head returned nil header")
}

return header.Height, nil
}

// RetrieveForcedInclusion retrieves blobs from the forced inclusion namespace at the specified height.
func (c *client) RetrieveForcedInclusion(ctx context.Context, height uint64) datypes.ResultRetrieve {
if !c.hasForcedNamespace {
Expand Down
4 changes: 4 additions & 0 deletions block/internal/da/forced_inclusion_retriever.go
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,9 @@ func (r *forcedInclusionRetriever) RetrieveForcedIncludedTxs(ctx context.Context

if result.Code == datypes.StatusNotFound {
r.logger.Debug().Uint64("height", h).Msg("no forced inclusion blobs at height")
syncFetchedBlocks[h] = &BlockData{
Timestamp: result.Timestamp,
}
continue
}

Expand Down Expand Up @@ -229,6 +232,7 @@ func (r *forcedInclusionRetriever) RetrieveForcedIncludedTxs(ctx context.Context
Msg("Failed to retrieve DA epoch.. retrying next iteration")

return &ForcedInclusionEvent{
Timestamp: event.Timestamp,
StartDaHeight: daHeight,
EndDaHeight: daHeight,
Txs: [][]byte{},
Expand Down
3 changes: 3 additions & 0 deletions block/internal/da/interface.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ type Client interface {
// Get retrieves blobs by their IDs. Used for visualization and fetching specific blobs.
Get(ctx context.Context, ids []datypes.ID, namespace []byte) ([]datypes.Blob, error)

// GetLatestDAHeight returns the latest height available on the DA layer.
GetLatestDAHeight(ctx context.Context) (uint64, error)

// Namespace accessors.
GetHeaderNamespace() []byte
GetDataNamespace() []byte
Expand Down
14 changes: 14 additions & 0 deletions block/internal/da/tracing.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,20 @@ func (t *tracedClient) Validate(ctx context.Context, ids []datypes.ID, proofs []
return res, nil
}

func (t *tracedClient) GetLatestDAHeight(ctx context.Context) (uint64, error) {
ctx, span := t.tracer.Start(ctx, "DA.GetLatestDAHeight")
defer span.End()

height, err := t.inner.GetLatestDAHeight(ctx)
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
return 0, err
}
span.SetAttributes(attribute.Int64("da.latest_height", int64(height)))
return height, nil
}

func (t *tracedClient) GetHeaderNamespace() []byte { return t.inner.GetHeaderNamespace() }
func (t *tracedClient) GetDataNamespace() []byte { return t.inner.GetDataNamespace() }
func (t *tracedClient) GetForcedInclusionNamespace() []byte {
Expand Down
9 changes: 5 additions & 4 deletions block/internal/da/tracing_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,11 @@ func (m *mockFullClient) Validate(ctx context.Context, ids []datypes.ID, proofs
}
return nil, nil
}
func (m *mockFullClient) GetHeaderNamespace() []byte { return []byte{0x01} }
func (m *mockFullClient) GetDataNamespace() []byte { return []byte{0x02} }
func (m *mockFullClient) GetForcedInclusionNamespace() []byte { return []byte{0x03} }
func (m *mockFullClient) HasForcedInclusionNamespace() bool { return true }
func (m *mockFullClient) GetLatestDAHeight(_ context.Context) (uint64, error) { return 0, nil }
func (m *mockFullClient) GetHeaderNamespace() []byte { return []byte{0x01} }
func (m *mockFullClient) GetDataNamespace() []byte { return []byte{0x02} }
func (m *mockFullClient) GetForcedInclusionNamespace() []byte { return []byte{0x03} }
func (m *mockFullClient) HasForcedInclusionNamespace() bool { return true }

// setup a tracer provider + span recorder
func setupDATrace(t *testing.T, inner FullClient) (FullClient, *tracetest.SpanRecorder) {
Expand Down
16 changes: 16 additions & 0 deletions block/internal/executing/executor.go
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,14 @@ func (e *Executor) initializeState() error {
return fmt.Errorf("failed to sync execution layer: %w", err)
}

// For based sequencer, advance safe/finalized since it comes from DA.
if e.config.Node.BasedSequencer && syncTargetHeight > 0 {
if err := e.exec.SetFinal(e.ctx, syncTargetHeight); err != nil {
e.sendCriticalError(fmt.Errorf("failed to set final height in based sequencer mode: %w", err))
return fmt.Errorf("failed to set final height in based sequencer mode: %w", err)
}
}

// Double-check state against Raft after replay
if e.raftNode != nil {
raftState := e.raftNode.GetState()
Expand Down Expand Up @@ -627,6 +635,14 @@ func (e *Executor) ProduceBlock(ctx context.Context) error {
Int("txs", len(data.Txs)).
Msg("produced block")

// For based sequencer, advance safe/finalized since it comes from DA.
if e.config.Node.BasedSequencer {
if err := e.exec.SetFinal(e.ctx, newHeight); err != nil {
e.sendCriticalError(fmt.Errorf("failed to set final height in based sequencer mode: %w", err))
return fmt.Errorf("failed to set final height in based sequencer mode: %w", err)
}
}

return nil
}

Expand Down
2 changes: 1 addition & 1 deletion block/internal/syncing/block_syncer.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,5 @@ type BlockSyncer interface {
ValidateBlock(ctx context.Context, currState types.State, data *types.Data, header *types.SignedHeader) error

// VerifyForcedInclusionTxs verifies that forced inclusion transactions are properly handled.
VerifyForcedInclusionTxs(ctx context.Context, currentState types.State, data *types.Data) error
VerifyForcedInclusionTxs(ctx context.Context, daHeight uint64, data *types.Data) error
}
Loading
Loading