From bf4cc9a27868da214f6af2df3bbea22440c39c87 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Fri, 8 Nov 2024 09:28:17 +0200 Subject: [PATCH 01/36] Add new websocket handler and skeleton for its deps * Added websocket controller * Added mock block provider * Added data provider factory * Added websocket handler * Added websocket config * Added a tiny POC test for websocket handler --- cmd/observer/node_builder/observer_builder.go | 3 + cmd/util/cmd/run-script/cmd.go | 2 + .../access/handle_irrecoverable_state_test.go | 2 + .../integration_unsecure_grpc_server_test.go | 2 + engine/access/rest/router/router.go | 26 ++- .../access/rest/router/router_test_helpers.go | 4 +- engine/access/rest/server.go | 6 +- engine/access/rest/websockets/config.go | 19 ++ engine/access/rest/websockets/controller.go | 166 ++++++++++++++++++ .../rest/websockets/data_provider/blocks.go | 61 +++++++ .../rest/websockets/data_provider/factory.go | 33 ++++ .../rest/websockets/data_provider/provider.go | 12 ++ engine/access/rest/websockets/handler.go | 63 +++++++ engine/access/rest/websockets/handler_test.go | 85 +++++++++ .../legacy/routes/subscribe_events_test.go | 10 +- engine/access/rest/websockets/models.go | 59 +++++++ .../access/rest/websockets/threadsafe_map.go | 55 ++++++ engine/access/rest_api_test.go | 2 + engine/access/rpc/engine.go | 25 ++- engine/access/rpc/rate_limit_test.go | 2 + engine/access/secure_grpcr_test.go | 2 + 21 files changed, 620 insertions(+), 19 deletions(-) create mode 100644 engine/access/rest/websockets/config.go create mode 100644 engine/access/rest/websockets/controller.go create mode 100644 engine/access/rest/websockets/data_provider/blocks.go create mode 100644 engine/access/rest/websockets/data_provider/factory.go create mode 100644 engine/access/rest/websockets/data_provider/provider.go create mode 100644 engine/access/rest/websockets/handler.go create mode 100644 engine/access/rest/websockets/handler_test.go create mode 100644 engine/access/rest/websockets/models.go create mode 100644 engine/access/rest/websockets/threadsafe_map.go diff --git a/cmd/observer/node_builder/observer_builder.go b/cmd/observer/node_builder/observer_builder.go index 63721725711..4033e18830b 100644 --- a/cmd/observer/node_builder/observer_builder.go +++ b/cmd/observer/node_builder/observer_builder.go @@ -44,6 +44,7 @@ import ( "github.com/onflow/flow-go/engine/access/rest" restapiproxy "github.com/onflow/flow-go/engine/access/rest/apiproxy" "github.com/onflow/flow-go/engine/access/rest/router" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc" "github.com/onflow/flow-go/engine/access/rpc/backend" rpcConnection "github.com/onflow/flow-go/engine/access/rpc/connection" @@ -167,6 +168,7 @@ type ObserverServiceConfig struct { registerCacheSize uint programCacheSize uint registerDBPruneThreshold uint64 + websocketConfig websockets.Config } // DefaultObserverServiceConfig defines all the default values for the ObserverServiceConfig @@ -250,6 +252,7 @@ func DefaultObserverServiceConfig() *ObserverServiceConfig { registerCacheSize: 0, programCacheSize: 0, registerDBPruneThreshold: pruner.DefaultThreshold, + websocketConfig: *websockets.NewDefaultWebsocketConfig(), } } diff --git a/cmd/util/cmd/run-script/cmd.go b/cmd/util/cmd/run-script/cmd.go index 1f24d2599c2..dc4d6e381a0 100644 --- a/cmd/util/cmd/run-script/cmd.go +++ b/cmd/util/cmd/run-script/cmd.go @@ -16,6 +16,7 @@ import ( "github.com/onflow/flow-go/cmd/util/ledger/util" "github.com/onflow/flow-go/cmd/util/ledger/util/registers" "github.com/onflow/flow-go/engine/access/rest" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/state_stream/backend" "github.com/onflow/flow-go/engine/access/subscription" "github.com/onflow/flow-go/engine/execution/computation" @@ -169,6 +170,7 @@ func run(*cobra.Command, []string) { metrics.NewNoopCollector(), nil, backend.Config{}, + *websockets.NewDefaultWebsocketConfig(), ) if err != nil { log.Fatal().Err(err).Msg("failed to create server") diff --git a/engine/access/handle_irrecoverable_state_test.go b/engine/access/handle_irrecoverable_state_test.go index 466e94090aa..e9db308e86c 100644 --- a/engine/access/handle_irrecoverable_state_test.go +++ b/engine/access/handle_irrecoverable_state_test.go @@ -22,6 +22,7 @@ import ( accessmock "github.com/onflow/flow-go/engine/access/mock" "github.com/onflow/flow-go/engine/access/rest" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc" "github.com/onflow/flow-go/engine/access/rpc/backend" statestreambackend "github.com/onflow/flow-go/engine/access/state_stream/backend" @@ -108,6 +109,7 @@ func (suite *IrrecoverableStateTestSuite) SetupTest() { RestConfig: rest.Config{ ListenAddress: unittest.DefaultAddress, }, + WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server diff --git a/engine/access/integration_unsecure_grpc_server_test.go b/engine/access/integration_unsecure_grpc_server_test.go index f99805687ba..98de205ad66 100644 --- a/engine/access/integration_unsecure_grpc_server_test.go +++ b/engine/access/integration_unsecure_grpc_server_test.go @@ -21,6 +21,7 @@ import ( "github.com/onflow/flow-go/engine" "github.com/onflow/flow-go/engine/access/index" accessmock "github.com/onflow/flow-go/engine/access/mock" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc" "github.com/onflow/flow-go/engine/access/rpc/backend" "github.com/onflow/flow-go/engine/access/state_stream" @@ -138,6 +139,7 @@ func (suite *SameGRPCPortTestSuite) SetupTest() { UnsecureGRPCListenAddr: unittest.DefaultAddress, SecureGRPCListenAddr: unittest.DefaultAddress, HTTPListenAddr: unittest.DefaultAddress, + WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), } blockCount := 5 diff --git a/engine/access/rest/router/router.go b/engine/access/rest/router/router.go index c623669d916..74f34f8ff7f 100644 --- a/engine/access/rest/router/router.go +++ b/engine/access/rest/router/router.go @@ -2,6 +2,7 @@ package router import ( "fmt" + "net/http" "regexp" "strings" @@ -10,8 +11,9 @@ import ( "github.com/onflow/flow-go/access" "github.com/onflow/flow-go/engine/access/rest/common/middleware" - "github.com/onflow/flow-go/engine/access/rest/http" + flowhttp "github.com/onflow/flow-go/engine/access/rest/http" "github.com/onflow/flow-go/engine/access/rest/http/models" + "github.com/onflow/flow-go/engine/access/rest/websockets" legacyws "github.com/onflow/flow-go/engine/access/rest/websockets/legacy" "github.com/onflow/flow-go/engine/access/state_stream" "github.com/onflow/flow-go/engine/access/state_stream/backend" @@ -50,7 +52,7 @@ func NewRouterBuilder( func (b *RouterBuilder) AddRestRoutes(backend access.API, chain flow.Chain) *RouterBuilder { linkGenerator := models.NewLinkGeneratorImpl(b.v1SubRouter) for _, r := range Routes { - h := http.NewHandler(b.logger, backend, r.Handler, linkGenerator, chain) + h := flowhttp.NewHandler(b.logger, backend, r.Handler, linkGenerator, chain) b.v1SubRouter. Methods(r.Method). Path(r.Pattern). @@ -60,8 +62,8 @@ func (b *RouterBuilder) AddRestRoutes(backend access.API, chain flow.Chain) *Rou return b } -// AddWsRoutes adds WebSocket routes to the router. -func (b *RouterBuilder) AddWsRoutes( +// AddLegacyWebsocketsRoutes adds WebSocket routes to the router. +func (b *RouterBuilder) AddLegacyWebsocketsRoutes( stateStreamApi state_stream.API, chain flow.Chain, stateStreamConfig backend.Config, @@ -79,6 +81,22 @@ func (b *RouterBuilder) AddWsRoutes( return b } +func (b *RouterBuilder) AddWebsocketsRoute( + chain flow.Chain, + config *websockets.Config, + streamApi state_stream.API, + streamConfig backend.Config, +) *RouterBuilder { + handler := websockets.NewWebSocketHandler(b.logger, config, chain, streamApi, streamConfig) + b.v1SubRouter. + Methods(http.MethodGet). + Path("/ws"). + Name("ws"). + Handler(handler) + + return b +} + func (b *RouterBuilder) Build() *mux.Router { return b.router } diff --git a/engine/access/rest/router/router_test_helpers.go b/engine/access/rest/router/router_test_helpers.go index 0256e529457..68c46df34f3 100644 --- a/engine/access/rest/router/router_test_helpers.go +++ b/engine/access/rest/router/router_test_helpers.go @@ -133,7 +133,7 @@ func ExecuteRequest(req *http.Request, backend access.API) *httptest.ResponseRec return rr } -func ExecuteWsRequest(req *http.Request, stateStreamApi state_stream.API, responseRecorder *TestHijackResponseRecorder, chain flow.Chain) { +func ExecuteLegacyWsRequest(req *http.Request, stateStreamApi state_stream.API, responseRecorder *TestHijackResponseRecorder, chain flow.Chain) { restCollector := metrics.NewNoopCollector() config := backend.Config{ @@ -145,7 +145,7 @@ func ExecuteWsRequest(req *http.Request, stateStreamApi state_stream.API, respon router := NewRouterBuilder( unittest.Logger(), restCollector, - ).AddWsRoutes( + ).AddLegacyWebsocketsRoutes( stateStreamApi, chain, config, ).Build() diff --git a/engine/access/rest/server.go b/engine/access/rest/server.go index caed80a27c3..d74c8e361ef 100644 --- a/engine/access/rest/server.go +++ b/engine/access/rest/server.go @@ -9,6 +9,7 @@ import ( "github.com/onflow/flow-go/access" "github.com/onflow/flow-go/engine/access/rest/router" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/state_stream" "github.com/onflow/flow-go/engine/access/state_stream/backend" "github.com/onflow/flow-go/model/flow" @@ -41,12 +42,15 @@ func NewServer(serverAPI access.API, restCollector module.RestMetrics, stateStreamApi state_stream.API, stateStreamConfig backend.Config, + wsConfig websockets.Config, ) (*http.Server, error) { builder := router.NewRouterBuilder(logger, restCollector).AddRestRoutes(serverAPI, chain) if stateStreamApi != nil { - builder.AddWsRoutes(stateStreamApi, chain, stateStreamConfig) + builder.AddLegacyWebsocketsRoutes(stateStreamApi, chain, stateStreamConfig) } + builder.AddWebsocketsRoute(chain, &wsConfig, stateStreamApi, stateStreamConfig) + c := cors.New(cors.Options{ AllowedOrigins: []string{"*"}, AllowedHeaders: []string{"*"}, diff --git a/engine/access/rest/websockets/config.go b/engine/access/rest/websockets/config.go new file mode 100644 index 00000000000..8354ca4c11d --- /dev/null +++ b/engine/access/rest/websockets/config.go @@ -0,0 +1,19 @@ +package websockets + +import ( + "time" +) + +type Config struct { + MaxSubscriptionsPerConnection uint64 + MaxResponsesPerSecond uint64 + SendMessageTimeout time.Duration +} + +func NewDefaultWebsocketConfig() *Config { + return &Config{ + MaxSubscriptionsPerConnection: 1000, + MaxResponsesPerSecond: 1000, + SendMessageTimeout: 10 * time.Second, + } +} diff --git a/engine/access/rest/websockets/controller.go b/engine/access/rest/websockets/controller.go new file mode 100644 index 00000000000..6d1c6f6416b --- /dev/null +++ b/engine/access/rest/websockets/controller.go @@ -0,0 +1,166 @@ +package websockets + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/google/uuid" + "github.com/gorilla/websocket" + + "github.com/rs/zerolog" + + dp "github.com/onflow/flow-go/engine/access/rest/websockets/data_provider" + "github.com/onflow/flow-go/engine/access/state_stream" + "github.com/onflow/flow-go/engine/access/state_stream/backend" +) + +type Controller struct { + ctx context.Context + logger zerolog.Logger + config *Config + conn *websocket.Conn + communicationChannel chan interface{} + dataProviders *ThreadSafeMap[uuid.UUID, dp.DataProvider] + dataProvidersFactory *dp.Factory +} + +func NewWebSocketController( + ctx context.Context, + logger zerolog.Logger, + config *Config, + streamApi state_stream.API, + streamConfig backend.Config, + conn *websocket.Conn, +) *Controller { + return &Controller{ + ctx: ctx, + logger: logger.With().Str("component", "websocket-controller").Logger(), + config: config, + conn: conn, + communicationChannel: make(chan interface{}), //TODO: should it be buffered chan? + dataProviders: NewThreadSafeMap[uuid.UUID, dp.DataProvider](), + dataProvidersFactory: dp.NewDataProviderFactory(logger, streamApi, streamConfig), + } +} + +// HandleConnection manages the WebSocket connection, adding context and error handling. +func (c *Controller) HandleConnection() { + //TODO: configure the connection with ping-pong and deadlines + + go c.readMessagesFromClient(c.ctx) + go c.writeMessagesToClient(c.ctx) +} + +func (c *Controller) writeMessagesToClient(ctx context.Context) { + for { + select { + case <-ctx.Done(): + return + case msg := <-c.communicationChannel: + // TODO: handle 'response per second' limits + c.conn.WriteJSON(msg) + } + } +} + +func (c *Controller) readMessagesFromClient(ctx context.Context) { + defer close(c.communicationChannel) + defer c.conn.Close() + + for { + select { + case <-ctx.Done(): + c.logger.Info().Msg("context canceled, stopping read message loop") + return + default: + msg, err := c.readMessage() + if err != nil { + if websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseAbnormalClosure) { + return + } + c.logger.Warn().Err(err).Msg("error reading message from client") + return + } + + baseMsg, err := c.parseMessage(msg) + if err != nil { + c.logger.Warn().Err(err).Msg("error parsing base message") + return + } + + if err := c.dispatchAction(baseMsg.Action, msg); err != nil { + c.logger.Warn().Err(err).Str("action", baseMsg.Action).Msg("error handling action") + } + } + } +} + +func (c *Controller) readMessage() (json.RawMessage, error) { + var message json.RawMessage + if err := c.conn.ReadJSON(&message); err != nil { + return nil, fmt.Errorf("error reading JSON from client: %w", err) + } + return message, nil +} + +func (c *Controller) parseMessage(message json.RawMessage) (BaseMessageRequest, error) { + var baseMsg BaseMessageRequest + if err := json.Unmarshal(message, &baseMsg); err != nil { + return BaseMessageRequest{}, fmt.Errorf("error unmarshalling base message: %w", err) + } + return baseMsg, nil +} + +// dispatchAction routes the action to the appropriate handler based on the action type. +func (c *Controller) dispatchAction(action string, message json.RawMessage) error { + switch action { + case "subscribe": + var subscribeMsg SubscribeMessageRequest + if err := json.Unmarshal(message, &subscribeMsg); err != nil { + return fmt.Errorf("error unmarshalling subscribe message: %w", err) + } + c.handleSubscribe(subscribeMsg) + + case "unsubscribe": + var unsubscribeMsg UnsubscribeMessageRequest + if err := json.Unmarshal(message, &unsubscribeMsg); err != nil { + return fmt.Errorf("error unmarshalling unsubscribe message: %w", err) + } + c.handleUnsubscribe(unsubscribeMsg) + + case "list_subscriptions": + var listMsg ListSubscriptionsMessageRequest + if err := json.Unmarshal(message, &listMsg); err != nil { + return fmt.Errorf("error unmarshalling list subscriptions message: %w", err) + } + c.handleListSubscriptions(listMsg) + + default: + c.logger.Warn().Str("action", action).Msg("unknown action type") + return fmt.Errorf("unknown action type: %s", action) + } + return nil +} + +func (c *Controller) handleSubscribe(msg SubscribeMessageRequest) { + dp := c.dataProvidersFactory.NewDataProvider(c.ctx, c.communicationChannel, msg.Topic) + c.dataProviders.Insert(dp.ID(), dp) + dp.Run() +} + +func (c *Controller) handleUnsubscribe(msg UnsubscribeMessageRequest) { + id, err := uuid.Parse(msg.ID) + if err != nil { + c.logger.Warn().Err(err).Str("topic", msg.Topic).Msg("error parsing message ID") + return + } + + dp, ok := c.dataProviders.Get(id) + if ok { + dp.Close() + c.dataProviders.Remove(id) + } +} + +func (c *Controller) handleListSubscriptions(msg ListSubscriptionsMessageRequest) {} diff --git a/engine/access/rest/websockets/data_provider/blocks.go b/engine/access/rest/websockets/data_provider/blocks.go new file mode 100644 index 00000000000..4c23bd4b587 --- /dev/null +++ b/engine/access/rest/websockets/data_provider/blocks.go @@ -0,0 +1,61 @@ +package data_provider + +import ( + "context" + + "github.com/google/uuid" + "github.com/rs/zerolog" + + "github.com/onflow/flow-go/engine/access/state_stream" +) + +type MockBlockProvider struct { + id uuid.UUID + ch chan<- interface{} + topic string + logger zerolog.Logger + ctx context.Context + stopProviderFunc context.CancelFunc + streamApi state_stream.API +} + +func NewMockBlockProvider( + ctx context.Context, + ch chan<- interface{}, + topic string, + logger zerolog.Logger, + streamApi state_stream.API, +) *MockBlockProvider { + ctx, cancel := context.WithCancel(ctx) + return &MockBlockProvider{ + id: uuid.New(), + ch: ch, + topic: topic, + logger: logger.With().Str("component", "block-provider").Logger(), + ctx: ctx, + stopProviderFunc: cancel, + streamApi: streamApi, + } +} + +func (p *MockBlockProvider) Run() { + select { + case <-p.ctx.Done(): + return + default: + p.ch <- "hello" + p.ch <- "world" + } +} + +func (p *MockBlockProvider) ID() uuid.UUID { + return p.id +} + +func (p *MockBlockProvider) Topic() string { + return p.topic +} + +func (p *MockBlockProvider) Close() { + p.stopProviderFunc() +} diff --git a/engine/access/rest/websockets/data_provider/factory.go b/engine/access/rest/websockets/data_provider/factory.go new file mode 100644 index 00000000000..86d69475377 --- /dev/null +++ b/engine/access/rest/websockets/data_provider/factory.go @@ -0,0 +1,33 @@ +package data_provider + +import ( + "context" + + "github.com/rs/zerolog" + + "github.com/onflow/flow-go/engine/access/state_stream" + "github.com/onflow/flow-go/engine/access/state_stream/backend" +) + +type Factory struct { + logger zerolog.Logger + streamApi state_stream.API + streamConfig backend.Config +} + +func NewDataProviderFactory(logger zerolog.Logger, streamApi state_stream.API, streamConfig backend.Config) *Factory { + return &Factory{ + logger: logger, + streamApi: streamApi, + streamConfig: streamConfig, + } +} + +func (f *Factory) NewDataProvider(ctx context.Context, ch chan<- interface{}, topic string) DataProvider { + switch topic { + case "blocks": + return NewMockBlockProvider(ctx, ch, topic, f.logger, f.streamApi) + default: + return nil + } +} diff --git a/engine/access/rest/websockets/data_provider/provider.go b/engine/access/rest/websockets/data_provider/provider.go new file mode 100644 index 00000000000..e919af590b6 --- /dev/null +++ b/engine/access/rest/websockets/data_provider/provider.go @@ -0,0 +1,12 @@ +package data_provider + +import ( + "github.com/google/uuid" +) + +type DataProvider interface { + Run() + ID() uuid.UUID + Topic() string + Close() +} diff --git a/engine/access/rest/websockets/handler.go b/engine/access/rest/websockets/handler.go new file mode 100644 index 00000000000..09fd537bb02 --- /dev/null +++ b/engine/access/rest/websockets/handler.go @@ -0,0 +1,63 @@ +package websockets + +import ( + "context" + "net/http" + + "github.com/gorilla/websocket" + "github.com/rs/zerolog" + + "github.com/onflow/flow-go/engine/access/rest/common" + "github.com/onflow/flow-go/engine/access/state_stream" + "github.com/onflow/flow-go/engine/access/state_stream/backend" + "github.com/onflow/flow-go/model/flow" +) + +type Handler struct { + *common.BaseHttpHandler + + logger zerolog.Logger + websocketConfig *Config + streamApi state_stream.API + streamConfig backend.Config +} + +var _ http.Handler = (*Handler)(nil) + +func NewWebSocketHandler(logger zerolog.Logger, config *Config, chain flow.Chain, streamApi state_stream.API, streamConfig backend.Config) *Handler { + return &Handler{ + BaseHttpHandler: common.NewHttpHandler(logger, chain), + websocketConfig: config, + logger: logger, + streamApi: streamApi, + streamConfig: streamConfig, + } +} +func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + //TODO: change to accept topic instead of URL + logger := h.BaseHttpHandler.Logger.With().Str("websocket_subscribe_url", r.URL.String()).Logger() + + err := h.BaseHttpHandler.VerifyRequest(w, r) + if err != nil { + // VerifyRequest sets the response error before returning + logger.Warn().Err(err).Msg("error validating websocket request") + return + } + + upgrader := websocket.Upgrader{ + // allow all origins by default, operators can override using a proxy + CheckOrigin: func(r *http.Request) bool { + return true + }, + } + + conn, err := upgrader.Upgrade(w, r, nil) + if err != nil { + h.BaseHttpHandler.ErrorHandler(w, common.NewRestError(http.StatusInternalServerError, "webSocket upgrade error: ", err), logger) + return + } + + ctx := context.Background() + controller := NewWebSocketController(ctx, logger, h.websocketConfig, h.streamApi, h.streamConfig, conn) + controller.HandleConnection() +} diff --git a/engine/access/rest/websockets/handler_test.go b/engine/access/rest/websockets/handler_test.go new file mode 100644 index 00000000000..124fd2f8a00 --- /dev/null +++ b/engine/access/rest/websockets/handler_test.go @@ -0,0 +1,85 @@ +package websockets_test + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/gorilla/websocket" + "github.com/rs/zerolog" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + + "github.com/onflow/flow-go/engine/access/rest/websockets" + "github.com/onflow/flow-go/engine/access/state_stream/backend" + streammock "github.com/onflow/flow-go/engine/access/state_stream/mock" + + "github.com/onflow/flow-go/model/flow" + "github.com/onflow/flow-go/utils/unittest" +) + +var ( + chainID = flow.Testnet +) + +type WsHandlerSuite struct { + suite.Suite + + logger zerolog.Logger + handler *websockets.Handler +} + +func (s *WsHandlerSuite) SetupTest() { + s.logger = unittest.Logger() + wsConfig := websockets.NewDefaultWebsocketConfig() + streamApi := streammock.NewAPI(s.T()) + streamConfig := backend.Config{} + s.handler = websockets.NewWebSocketHandler(s.logger, wsConfig, chainID.Chain(), streamApi, streamConfig) +} + +func TestWsHandlerSuite(t *testing.T) { + suite.Run(t, new(WsHandlerSuite)) +} + +func ClientConnection(url string) (*websocket.Conn, *http.Response, error) { + wsURL := "ws" + strings.TrimPrefix(url, "http") + return websocket.DefaultDialer.Dial(wsURL, nil) +} + +func (s *WsHandlerSuite) TestSubscribeRequest() { + s.Run("Happy path", func() { + server := httptest.NewServer(s.handler) + defer server.Close() + + conn, _, err := ClientConnection(server.URL) + require.NoError(s.T(), err) + + args := map[string]interface{}{ + "start_block_height": 10, + } + body := websockets.SubscribeMessageRequest{ + BaseMessageRequest: websockets.BaseMessageRequest{Action: "subscribe"}, + Topic: "blocks", + Arguments: args, + } + bodyJSON, err := json.Marshal(body) + require.NoError(s.T(), err) + + err = conn.WriteMessage(websocket.TextMessage, bodyJSON) + require.NoError(s.T(), err) + + _, msg, err := conn.ReadMessage() + require.NoError(s.T(), err) + + actualMsg := strings.Trim(string(msg), "\n\"\\ ") + require.Equal(s.T(), "hello", actualMsg) + + _, msg, err = conn.ReadMessage() + require.NoError(s.T(), err) + + actualMsg = strings.Trim(string(msg), "\n\"\\ ") + require.Equal(s.T(), "world", actualMsg) + }) +} diff --git a/engine/access/rest/websockets/legacy/routes/subscribe_events_test.go b/engine/access/rest/websockets/legacy/routes/subscribe_events_test.go index c4353cecae2..a423bd4622f 100644 --- a/engine/access/rest/websockets/legacy/routes/subscribe_events_test.go +++ b/engine/access/rest/websockets/legacy/routes/subscribe_events_test.go @@ -252,7 +252,7 @@ func (s *SubscribeEventsSuite) TestSubscribeEvents() { time.Sleep(1 * time.Second) respRecorder.Close() }() - router.ExecuteWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) + router.ExecuteLegacyWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) requireResponse(s.T(), respRecorder, expectedEventsResponses) }) } @@ -264,7 +264,7 @@ func (s *SubscribeEventsSuite) TestSubscribeEventsHandlesErrors() { req, err := getSubscribeEventsRequest(s.T(), s.blocks[0].ID(), s.blocks[0].Header.Height, nil, nil, nil, 1, nil) require.NoError(s.T(), err) respRecorder := router.NewTestHijackResponseRecorder() - router.ExecuteWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) + router.ExecuteLegacyWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) requireError(s.T(), respRecorder, "can only provide either block ID or start height") }) @@ -289,7 +289,7 @@ func (s *SubscribeEventsSuite) TestSubscribeEventsHandlesErrors() { req, err := getSubscribeEventsRequest(s.T(), invalidBlock.ID(), request.EmptyHeight, nil, nil, nil, 1, nil) require.NoError(s.T(), err) respRecorder := router.NewTestHijackResponseRecorder() - router.ExecuteWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) + router.ExecuteLegacyWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) requireError(s.T(), respRecorder, "stream encountered an error: subscription error") }) @@ -298,7 +298,7 @@ func (s *SubscribeEventsSuite) TestSubscribeEventsHandlesErrors() { req, err := getSubscribeEventsRequest(s.T(), s.blocks[0].ID(), request.EmptyHeight, []string{"foo"}, nil, nil, 1, nil) require.NoError(s.T(), err) respRecorder := router.NewTestHijackResponseRecorder() - router.ExecuteWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) + router.ExecuteLegacyWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) requireError(s.T(), respRecorder, "invalid event type format") }) @@ -323,7 +323,7 @@ func (s *SubscribeEventsSuite) TestSubscribeEventsHandlesErrors() { req, err := getSubscribeEventsRequest(s.T(), s.blocks[0].ID(), request.EmptyHeight, nil, nil, nil, 1, nil) require.NoError(s.T(), err) respRecorder := router.NewTestHijackResponseRecorder() - router.ExecuteWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) + router.ExecuteLegacyWsRequest(req, stateStreamBackend, respRecorder, chainID.Chain()) requireError(s.T(), respRecorder, "subscription channel closed") }) } diff --git a/engine/access/rest/websockets/models.go b/engine/access/rest/websockets/models.go new file mode 100644 index 00000000000..42abb8b7241 --- /dev/null +++ b/engine/access/rest/websockets/models.go @@ -0,0 +1,59 @@ +package websockets + +// BaseMessageRequest represents a base structure for incoming messages. +type BaseMessageRequest struct { + Action string `json:"action"` // Action type of the request +} + +// BaseMessageResponse represents a base structure for outgoing messages. +type BaseMessageResponse struct { + Action string `json:"action,omitempty"` // Action type of the response + Success bool `json:"success"` // Indicates success or failure + ErrorMessage string `json:"error_message,omitempty"` // Error message, if any +} + +// SubscribeMessageRequest represents a request to subscribe to a topic. +type SubscribeMessageRequest struct { + BaseMessageRequest + Topic string `json:"topic"` // Topic to subscribe to + Arguments map[string]interface{} `json:"arguments"` // Additional arguments for subscription +} + +// SubscribeMessageResponse represents the response to a subscription request. +type SubscribeMessageResponse struct { + BaseMessageResponse + Topic string `json:"topic"` // Topic of the subscription + ID string `json:"id"` // Unique subscription ID +} + +// UnsubscribeMessageRequest represents a request to unsubscribe from a topic. +type UnsubscribeMessageRequest struct { + BaseMessageRequest + Topic string `json:"topic"` // Topic to unsubscribe from + ID string `json:"id"` // Unique subscription ID +} + +// UnsubscribeMessageResponse represents the response to an unsubscription request. +type UnsubscribeMessageResponse struct { + BaseMessageResponse + Topic string `json:"topic"` // Topic of the unsubscription + ID string `json:"id"` // Unique subscription ID +} + +// ListSubscriptionsMessageRequest represents a request to list active subscriptions. +type ListSubscriptionsMessageRequest struct { + BaseMessageRequest +} + +// SubscriptionEntry represents an active subscription entry. +type SubscriptionEntry struct { + Topic string `json:"topic,omitempty"` // Topic of the subscription + ID string `json:"id,omitempty"` // Unique subscription ID +} + +// ListSubscriptionsMessageResponse is the structure used to respond to list_subscriptions requests. +// It contains a list of active subscriptions for the current WebSocket connection. +type ListSubscriptionsMessageResponse struct { + BaseMessageResponse + Subscriptions []*SubscriptionEntry `json:"subscriptions,omitempty"` +} diff --git a/engine/access/rest/websockets/threadsafe_map.go b/engine/access/rest/websockets/threadsafe_map.go new file mode 100644 index 00000000000..3ab265a40fb --- /dev/null +++ b/engine/access/rest/websockets/threadsafe_map.go @@ -0,0 +1,55 @@ +package websockets + +import ( + "sync" +) + +// ThreadSafeMap is a thread-safe map with read-write locking. +type ThreadSafeMap[K comparable, V any] struct { + mu sync.RWMutex + m map[K]V +} + +// NewThreadSafeMap initializes a new ThreadSafeMap. +func NewThreadSafeMap[K comparable, V any]() *ThreadSafeMap[K, V] { + return &ThreadSafeMap[K, V]{ + m: make(map[K]V), + } +} + +// Get retrieves a value for a key, returning the value and a boolean indicating if the key exists. +func (s *ThreadSafeMap[K, V]) Get(key K) (V, bool) { + s.mu.RLock() + defer s.mu.RUnlock() + value, ok := s.m[key] + return value, ok +} + +// Insert inserts or updates a value for a key. +func (s *ThreadSafeMap[K, V]) Insert(key K, value V) { + s.mu.Lock() + defer s.mu.Unlock() + s.m[key] = value +} + +// Remove removes a key and its value from the map. +func (s *ThreadSafeMap[K, V]) Remove(key K) { + s.mu.Lock() + defer s.mu.Unlock() + delete(s.m, key) +} + +// Exists checks if a key exists in the map. +func (s *ThreadSafeMap[K, V]) Exists(key K) bool { + s.mu.RLock() + defer s.mu.RUnlock() + _, ok := s.m[key] + return ok +} + +// Len returns the number of elements in the map. +func (s *ThreadSafeMap[K, V]) Len() int { + s.mu.RLock() + defer s.mu.RUnlock() + return len(s.m) +} diff --git a/engine/access/rest_api_test.go b/engine/access/rest_api_test.go index 5d48f6091e4..6c68d3c0553 100644 --- a/engine/access/rest_api_test.go +++ b/engine/access/rest_api_test.go @@ -23,6 +23,7 @@ import ( "github.com/onflow/flow-go/engine/access/rest" "github.com/onflow/flow-go/engine/access/rest/common" "github.com/onflow/flow-go/engine/access/rest/http/request" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc" "github.com/onflow/flow-go/engine/access/rpc/backend" statestreambackend "github.com/onflow/flow-go/engine/access/state_stream/backend" @@ -136,6 +137,7 @@ func (suite *RestAPITestSuite) SetupTest() { RestConfig: rest.Config{ ListenAddress: unittest.DefaultAddress, }, + WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server diff --git a/engine/access/rpc/engine.go b/engine/access/rpc/engine.go index 145e3d62143..37b60b1a4d3 100644 --- a/engine/access/rpc/engine.go +++ b/engine/access/rpc/engine.go @@ -14,6 +14,7 @@ import ( "github.com/onflow/flow-go/access" "github.com/onflow/flow-go/consensus/hotstuff/model" "github.com/onflow/flow-go/engine/access/rest" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc/backend" "github.com/onflow/flow-go/engine/access/state_stream" statestreambackend "github.com/onflow/flow-go/engine/access/state_stream/backend" @@ -38,10 +39,11 @@ type Config struct { CollectionAddr string // the address of the upstream collection node HistoricalAccessAddrs string // the list of all access nodes from previous spork - BackendConfig backend.Config // configurable options for creating Backend - RestConfig rest.Config // the REST server configuration - MaxMsgSize uint // GRPC max message size - CompressorName string // GRPC compressor name + BackendConfig backend.Config // configurable options for creating Backend + RestConfig rest.Config // the REST server configuration + MaxMsgSize uint // GRPC max message size + CompressorName string // GRPC compressor name + WebSocketConfig websockets.Config } // Engine exposes the server with a simplified version of the Access API. @@ -75,7 +77,8 @@ type Engine struct { type Option func(*RPCEngineBuilder) // NewBuilder returns a new RPC engine builder. -func NewBuilder(log zerolog.Logger, +func NewBuilder( + log zerolog.Logger, state protocol.State, config Config, chainID flow.ChainID, @@ -240,8 +243,16 @@ func (e *Engine) serveREST(ctx irrecoverable.SignalerContext, ready component.Re e.log.Info().Str("rest_api_address", e.config.RestConfig.ListenAddress).Msg("starting REST server on address") - r, err := rest.NewServer(e.restHandler, e.config.RestConfig, e.log, e.chain, e.restCollector, e.stateStreamBackend, - e.stateStreamConfig) + r, err := rest.NewServer( + e.restHandler, + e.config.RestConfig, + e.log, + e.chain, + e.restCollector, + e.stateStreamBackend, + e.stateStreamConfig, + e.config.WebSocketConfig, + ) if err != nil { e.log.Err(err).Msg("failed to initialize the REST server") ctx.Throw(err) diff --git a/engine/access/rpc/rate_limit_test.go b/engine/access/rpc/rate_limit_test.go index 622b06e3f54..e4f923e98fb 100644 --- a/engine/access/rpc/rate_limit_test.go +++ b/engine/access/rpc/rate_limit_test.go @@ -21,6 +21,7 @@ import ( "google.golang.org/grpc/status" accessmock "github.com/onflow/flow-go/engine/access/mock" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc/backend" statestreambackend "github.com/onflow/flow-go/engine/access/state_stream/backend" "github.com/onflow/flow-go/model/flow" @@ -115,6 +116,7 @@ func (suite *RateLimitTestSuite) SetupTest() { UnsecureGRPCListenAddr: unittest.DefaultAddress, SecureGRPCListenAddr: unittest.DefaultAddress, HTTPListenAddr: unittest.DefaultAddress, + WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server diff --git a/engine/access/secure_grpcr_test.go b/engine/access/secure_grpcr_test.go index cc1d1a75cc8..aa92c5db052 100644 --- a/engine/access/secure_grpcr_test.go +++ b/engine/access/secure_grpcr_test.go @@ -19,6 +19,7 @@ import ( "github.com/onflow/crypto" accessmock "github.com/onflow/flow-go/engine/access/mock" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc" "github.com/onflow/flow-go/engine/access/rpc/backend" statestreambackend "github.com/onflow/flow-go/engine/access/state_stream/backend" @@ -110,6 +111,7 @@ func (suite *SecureGRPCTestSuite) SetupTest() { UnsecureGRPCListenAddr: unittest.DefaultAddress, SecureGRPCListenAddr: unittest.DefaultAddress, HTTPListenAddr: unittest.DefaultAddress, + WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server From b76c811c6def23c11700085b1bddb650ab1c4838 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Fri, 8 Nov 2024 10:39:33 +0200 Subject: [PATCH 02/36] fix issue after merge --- engine/access/rest/server.go | 2 +- engine/access/rest/websockets/config.go | 2 ++ engine/access/rest/websockets/handler_test.go | 1 - 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/engine/access/rest/server.go b/engine/access/rest/server.go index 9fa07e63ff4..f23a683f39d 100644 --- a/engine/access/rest/server.go +++ b/engine/access/rest/server.go @@ -50,7 +50,7 @@ func NewServer(serverAPI access.API, builder.AddLegacyWebsocketsRoutes(stateStreamApi, chain, stateStreamConfig, config.MaxRequestSize) } - builder.AddWebsocketsRoute(chain, &wsConfig, stateStreamApi, stateStreamConfig) + builder.AddWebsocketsRoute(chain, &wsConfig, stateStreamApi, stateStreamConfig, config.MaxRequestSize) c := cors.New(cors.Options{ AllowedOrigins: []string{"*"}, diff --git a/engine/access/rest/websockets/config.go b/engine/access/rest/websockets/config.go index 8354ca4c11d..13138e54539 100644 --- a/engine/access/rest/websockets/config.go +++ b/engine/access/rest/websockets/config.go @@ -8,6 +8,7 @@ type Config struct { MaxSubscriptionsPerConnection uint64 MaxResponsesPerSecond uint64 SendMessageTimeout time.Duration + MaxRequestSize int64 } func NewDefaultWebsocketConfig() *Config { @@ -15,5 +16,6 @@ func NewDefaultWebsocketConfig() *Config { MaxSubscriptionsPerConnection: 1000, MaxResponsesPerSecond: 1000, SendMessageTimeout: 10 * time.Second, + MaxRequestSize: 1024, } } diff --git a/engine/access/rest/websockets/handler_test.go b/engine/access/rest/websockets/handler_test.go index 41079159e40..396cfd00dac 100644 --- a/engine/access/rest/websockets/handler_test.go +++ b/engine/access/rest/websockets/handler_test.go @@ -15,7 +15,6 @@ import ( "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/state_stream/backend" streammock "github.com/onflow/flow-go/engine/access/state_stream/mock" - "github.com/onflow/flow-go/model/flow" "github.com/onflow/flow-go/utils/unittest" ) From f88cf9bb827b2d8da8de3525294ab6f040be1ad9 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Fri, 8 Nov 2024 11:11:33 +0200 Subject: [PATCH 03/36] generate mocks. add graceful shutdown for controller --- Makefile | 1 + engine/access/rest/websockets/controller.go | 20 ++++- .../data_provider/mock/data_provider.go | 75 +++++++++++++++++++ engine/access/rest/websockets/handler_test.go | 15 ++-- .../access/rest/websockets/threadsafe_map.go | 15 ++++ 5 files changed, 117 insertions(+), 9 deletions(-) create mode 100644 engine/access/rest/websockets/data_provider/mock/data_provider.go diff --git a/Makefile b/Makefile index d0557991462..bc14ed50f9f 100644 --- a/Makefile +++ b/Makefile @@ -203,6 +203,7 @@ generate-mocks: install-mock-generators mockery --name 'API' --dir="./engine/protocol" --case=underscore --output="./engine/protocol/mock" --outpkg="mock" mockery --name '.*' --dir="./engine/access/state_stream" --case=underscore --output="./engine/access/state_stream/mock" --outpkg="mock" mockery --name 'BlockTracker' --dir="./engine/access/subscription" --case=underscore --output="./engine/access/subscription/mock" --outpkg="mock" + mockery --name 'DataProvider' --dir="./engine/access/rest/websockets/data_provider" --case=underscore --output="./engine/access/rest/websockets/data_provider/mock" --outpkg="mock" mockery --name 'ExecutionDataTracker' --dir="./engine/access/subscription" --case=underscore --output="./engine/access/subscription/mock" --outpkg="mock" mockery --name 'ConnectionFactory' --dir="./engine/access/rpc/connection" --case=underscore --output="./engine/access/rpc/connection/mock" --outpkg="mock" mockery --name 'Communicator' --dir="./engine/access/rpc/backend" --case=underscore --output="./engine/access/rpc/backend/mock" --outpkg="mock" diff --git a/engine/access/rest/websockets/controller.go b/engine/access/rest/websockets/controller.go index 6d1c6f6416b..751e4ec82a1 100644 --- a/engine/access/rest/websockets/controller.go +++ b/engine/access/rest/websockets/controller.go @@ -59,14 +59,17 @@ func (c *Controller) writeMessagesToClient(ctx context.Context) { return case msg := <-c.communicationChannel: // TODO: handle 'response per second' limits - c.conn.WriteJSON(msg) + + err := c.conn.WriteJSON(msg) + if err != nil { + c.logger.Error().Err(err).Msg("error writing to connection") + } } } } func (c *Controller) readMessagesFromClient(ctx context.Context) { - defer close(c.communicationChannel) - defer c.conn.Close() + defer c.shutdownConnection() for { select { @@ -164,3 +167,14 @@ func (c *Controller) handleUnsubscribe(msg UnsubscribeMessageRequest) { } func (c *Controller) handleListSubscriptions(msg ListSubscriptionsMessageRequest) {} + +func (c *Controller) shutdownConnection() { + defer c.conn.Close() + defer close(c.communicationChannel) + + c.dataProviders.ForEach(func(_ uuid.UUID, dp dp.DataProvider) { + dp.Close() + }) + + c.dataProviders.Clear() +} diff --git a/engine/access/rest/websockets/data_provider/mock/data_provider.go b/engine/access/rest/websockets/data_provider/mock/data_provider.go new file mode 100644 index 00000000000..6a4aab6e130 --- /dev/null +++ b/engine/access/rest/websockets/data_provider/mock/data_provider.go @@ -0,0 +1,75 @@ +// Code generated by mockery v2.43.2. DO NOT EDIT. + +package mock + +import ( + uuid "github.com/google/uuid" + mock "github.com/stretchr/testify/mock" +) + +// DataProvider is an autogenerated mock type for the DataProvider type +type DataProvider struct { + mock.Mock +} + +// Close provides a mock function with given fields: +func (_m *DataProvider) Close() { + _m.Called() +} + +// ID provides a mock function with given fields: +func (_m *DataProvider) ID() uuid.UUID { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for ID") + } + + var r0 uuid.UUID + if rf, ok := ret.Get(0).(func() uuid.UUID); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(uuid.UUID) + } + } + + return r0 +} + +// Run provides a mock function with given fields: +func (_m *DataProvider) Run() { + _m.Called() +} + +// Topic provides a mock function with given fields: +func (_m *DataProvider) Topic() string { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Topic") + } + + var r0 string + if rf, ok := ret.Get(0).(func() string); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// NewDataProvider creates a new instance of DataProvider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewDataProvider(t interface { + mock.TestingT + Cleanup(func()) +}) *DataProvider { + mock := &DataProvider{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/engine/access/rest/websockets/handler_test.go b/engine/access/rest/websockets/handler_test.go index 396cfd00dac..80fd0cdb377 100644 --- a/engine/access/rest/websockets/handler_test.go +++ b/engine/access/rest/websockets/handler_test.go @@ -26,16 +26,19 @@ var ( type WsHandlerSuite struct { suite.Suite - logger zerolog.Logger - handler *websockets.Handler + logger zerolog.Logger + handler *websockets.Handler + wsConfig *websockets.Config + streamApi *streammock.API + streamConfig *backend.Config } func (s *WsHandlerSuite) SetupTest() { s.logger = unittest.Logger() - wsConfig := websockets.NewDefaultWebsocketConfig() - streamApi := streammock.NewAPI(s.T()) - streamConfig := backend.Config{} - s.handler = websockets.NewWebSocketHandler(s.logger, wsConfig, chainID.Chain(), streamApi, streamConfig, 1024) + s.wsConfig = websockets.NewDefaultWebsocketConfig() + s.streamApi = streammock.NewAPI(s.T()) + s.streamConfig = &backend.Config{} + s.handler = websockets.NewWebSocketHandler(s.logger, s.wsConfig, chainID.Chain(), s.streamApi, *s.streamConfig, 1024) } func TestWsHandlerSuite(t *testing.T) { diff --git a/engine/access/rest/websockets/threadsafe_map.go b/engine/access/rest/websockets/threadsafe_map.go index 3ab265a40fb..2c3f3438e40 100644 --- a/engine/access/rest/websockets/threadsafe_map.go +++ b/engine/access/rest/websockets/threadsafe_map.go @@ -53,3 +53,18 @@ func (s *ThreadSafeMap[K, V]) Len() int { defer s.mu.RUnlock() return len(s.m) } + +// ForEach applies a function to each key-value pair in the map. +func (s *ThreadSafeMap[K, V]) ForEach(f func(K, V)) { + s.mu.RLock() + defer s.mu.RUnlock() + for k, v := range s.m { + f(k, v) + } +} + +func (s *ThreadSafeMap[K, V]) Clear() { + s.mu.Lock() + defer s.mu.Unlock() + s.m = make(map[K]V) +} From 29380d0ab1466620e0a7f56f149d9a0c7aff3135 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Fri, 8 Nov 2024 11:29:11 +0200 Subject: [PATCH 04/36] check err when closing conn --- Makefile | 1 - engine/access/handle_irrecoverable_state_test.go | 2 +- engine/access/rest/websockets/controller.go | 8 +++++--- engine/access/rest/websockets/handler_test.go | 5 ++++- engine/access/rest_api_test.go | 2 +- 5 files changed, 11 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index bc14ed50f9f..e79dfc571f6 100644 --- a/Makefile +++ b/Makefile @@ -207,7 +207,6 @@ generate-mocks: install-mock-generators mockery --name 'ExecutionDataTracker' --dir="./engine/access/subscription" --case=underscore --output="./engine/access/subscription/mock" --outpkg="mock" mockery --name 'ConnectionFactory' --dir="./engine/access/rpc/connection" --case=underscore --output="./engine/access/rpc/connection/mock" --outpkg="mock" mockery --name 'Communicator' --dir="./engine/access/rpc/backend" --case=underscore --output="./engine/access/rpc/backend/mock" --outpkg="mock" - mockery --name '.*' --dir=model/fingerprint --case=underscore --output="./model/fingerprint/mock" --outpkg="mock" mockery --name 'ExecForkActor' --structname 'ExecForkActorMock' --dir=module/mempool/consensus/mock/ --case=underscore --output="./module/mempool/consensus/mock/" --outpkg="mock" mockery --name '.*' --dir=engine/verification/fetcher/ --case=underscore --output="./engine/verification/fetcher/mock" --outpkg="mockfetcher" diff --git a/engine/access/handle_irrecoverable_state_test.go b/engine/access/handle_irrecoverable_state_test.go index 78a5cd33931..9ab1e58870b 100644 --- a/engine/access/handle_irrecoverable_state_test.go +++ b/engine/access/handle_irrecoverable_state_test.go @@ -22,8 +22,8 @@ import ( accessmock "github.com/onflow/flow-go/engine/access/mock" "github.com/onflow/flow-go/engine/access/rest" - "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rest/router" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc" "github.com/onflow/flow-go/engine/access/rpc/backend" statestreambackend "github.com/onflow/flow-go/engine/access/state_stream/backend" diff --git a/engine/access/rest/websockets/controller.go b/engine/access/rest/websockets/controller.go index 751e4ec82a1..a8c5e1275fc 100644 --- a/engine/access/rest/websockets/controller.go +++ b/engine/access/rest/websockets/controller.go @@ -7,7 +7,6 @@ import ( "github.com/google/uuid" "github.com/gorilla/websocket" - "github.com/rs/zerolog" dp "github.com/onflow/flow-go/engine/access/rest/websockets/data_provider" @@ -169,12 +168,15 @@ func (c *Controller) handleUnsubscribe(msg UnsubscribeMessageRequest) { func (c *Controller) handleListSubscriptions(msg ListSubscriptionsMessageRequest) {} func (c *Controller) shutdownConnection() { - defer c.conn.Close() defer close(c.communicationChannel) + defer func(conn *websocket.Conn) { + if err := c.conn.Close(); err != nil { + c.logger.Error().Err(err).Msg("error closing connection") + } + }(c.conn) c.dataProviders.ForEach(func(_ uuid.UUID, dp dp.DataProvider) { dp.Close() }) - c.dataProviders.Clear() } diff --git a/engine/access/rest/websockets/handler_test.go b/engine/access/rest/websockets/handler_test.go index 80fd0cdb377..a7fb4f109ff 100644 --- a/engine/access/rest/websockets/handler_test.go +++ b/engine/access/rest/websockets/handler_test.go @@ -56,7 +56,10 @@ func (s *WsHandlerSuite) TestSubscribeRequest() { defer server.Close() conn, _, err := ClientConnection(server.URL) - defer conn.Close() + defer func(conn *websocket.Conn) { + err := conn.Close() + require.NoError(s.T(), err) + }(conn) require.NoError(s.T(), err) args := map[string]interface{}{ diff --git a/engine/access/rest_api_test.go b/engine/access/rest_api_test.go index 56a8dde971d..c0f50e9a2b1 100644 --- a/engine/access/rest_api_test.go +++ b/engine/access/rest_api_test.go @@ -23,8 +23,8 @@ import ( "github.com/onflow/flow-go/engine/access/rest" "github.com/onflow/flow-go/engine/access/rest/common" "github.com/onflow/flow-go/engine/access/rest/http/request" - "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rest/router" + "github.com/onflow/flow-go/engine/access/rest/websockets" "github.com/onflow/flow-go/engine/access/rpc" "github.com/onflow/flow-go/engine/access/rpc/backend" statestreambackend "github.com/onflow/flow-go/engine/access/state_stream/backend" From b08370d2f86adcbfdb79b6a2f19595a57773d4b5 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Mon, 11 Nov 2024 17:51:12 +0200 Subject: [PATCH 05/36] Fixed comments * Use contexts as function arguments * Move models to folder for consistency * Change parse msg function * Simplify mock block provider to remove dedlock --- cmd/observer/node_builder/observer_builder.go | 2 +- cmd/util/cmd/run-script/cmd.go | 2 +- .../access/handle_irrecoverable_state_test.go | 2 +- .../integration_unsecure_grpc_server_test.go | 2 +- engine/access/rest/router/router.go | 2 +- engine/access/rest/server.go | 2 +- engine/access/rest/websockets/config.go | 4 +- engine/access/rest/websockets/controller.go | 98 +++++++++++-------- .../rest/websockets/data_provider/blocks.go | 5 +- .../rest/websockets/data_provider/provider.go | 4 +- engine/access/rest/websockets/handler.go | 9 +- engine/access/rest/websockets/handler_test.go | 21 ++-- engine/access/rest/websockets/models.go | 59 ----------- .../rest/websockets/models/base_message.go | 13 +++ .../websockets/models/list_subscriptions.go | 13 +++ .../rest/websockets/models/subscribe.go | 15 +++ .../websockets/models/subscription_entry.go | 7 ++ .../rest/websockets/models/unsubscribe.go | 13 +++ .../access/rest/websockets/threadsafe_map.go | 70 ------------- engine/access/rest_api_test.go | 2 +- engine/access/rpc/rate_limit_test.go | 2 +- engine/access/secure_grpcr_test.go | 2 +- engine/common/worker/worker_builder_test.go | 3 +- .../test/gossipsub/scoring/ihave_spam_test.go | 7 +- .../p2p/connection/connection_gater_test.go | 9 +- network/p2p/node/libp2pNode_test.go | 5 +- network/test/cohort1/network_test.go | 3 +- .../concurrent_map.go} | 30 +++--- 28 files changed, 181 insertions(+), 225 deletions(-) delete mode 100644 engine/access/rest/websockets/models.go create mode 100644 engine/access/rest/websockets/models/base_message.go create mode 100644 engine/access/rest/websockets/models/list_subscriptions.go create mode 100644 engine/access/rest/websockets/models/subscribe.go create mode 100644 engine/access/rest/websockets/models/subscription_entry.go create mode 100644 engine/access/rest/websockets/models/unsubscribe.go delete mode 100644 engine/access/rest/websockets/threadsafe_map.go rename utils/{unittest/protected_map.go => concurrentmap/concurrent_map.go} (58%) diff --git a/cmd/observer/node_builder/observer_builder.go b/cmd/observer/node_builder/observer_builder.go index 9ee3b1bd124..1bb6a8c04bb 100644 --- a/cmd/observer/node_builder/observer_builder.go +++ b/cmd/observer/node_builder/observer_builder.go @@ -254,7 +254,7 @@ func DefaultObserverServiceConfig() *ObserverServiceConfig { registerCacheSize: 0, programCacheSize: 0, registerDBPruneThreshold: pruner.DefaultThreshold, - websocketConfig: *websockets.NewDefaultWebsocketConfig(), + websocketConfig: websockets.NewDefaultWebsocketConfig(), } } diff --git a/cmd/util/cmd/run-script/cmd.go b/cmd/util/cmd/run-script/cmd.go index dc4d6e381a0..171f97e76b7 100644 --- a/cmd/util/cmd/run-script/cmd.go +++ b/cmd/util/cmd/run-script/cmd.go @@ -170,7 +170,7 @@ func run(*cobra.Command, []string) { metrics.NewNoopCollector(), nil, backend.Config{}, - *websockets.NewDefaultWebsocketConfig(), + websockets.NewDefaultWebsocketConfig(), ) if err != nil { log.Fatal().Err(err).Msg("failed to create server") diff --git a/engine/access/handle_irrecoverable_state_test.go b/engine/access/handle_irrecoverable_state_test.go index 9ab1e58870b..456c5cd97fd 100644 --- a/engine/access/handle_irrecoverable_state_test.go +++ b/engine/access/handle_irrecoverable_state_test.go @@ -110,7 +110,7 @@ func (suite *IrrecoverableStateTestSuite) SetupTest() { RestConfig: rest.Config{ ListenAddress: unittest.DefaultAddress, }, - WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), + WebSocketConfig: websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server diff --git a/engine/access/integration_unsecure_grpc_server_test.go b/engine/access/integration_unsecure_grpc_server_test.go index 98de205ad66..3c4aeca97d4 100644 --- a/engine/access/integration_unsecure_grpc_server_test.go +++ b/engine/access/integration_unsecure_grpc_server_test.go @@ -139,7 +139,7 @@ func (suite *SameGRPCPortTestSuite) SetupTest() { UnsecureGRPCListenAddr: unittest.DefaultAddress, SecureGRPCListenAddr: unittest.DefaultAddress, HTTPListenAddr: unittest.DefaultAddress, - WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), + WebSocketConfig: websockets.NewDefaultWebsocketConfig(), } blockCount := 5 diff --git a/engine/access/rest/router/router.go b/engine/access/rest/router/router.go index d5b37781f7c..a2d81cb0a58 100644 --- a/engine/access/rest/router/router.go +++ b/engine/access/rest/router/router.go @@ -88,7 +88,7 @@ func (b *RouterBuilder) AddLegacyWebsocketsRoutes( func (b *RouterBuilder) AddWebsocketsRoute( chain flow.Chain, - config *websockets.Config, + config websockets.Config, streamApi state_stream.API, streamConfig backend.Config, maxRequestSize int64, diff --git a/engine/access/rest/server.go b/engine/access/rest/server.go index f23a683f39d..0e582d0bee4 100644 --- a/engine/access/rest/server.go +++ b/engine/access/rest/server.go @@ -50,7 +50,7 @@ func NewServer(serverAPI access.API, builder.AddLegacyWebsocketsRoutes(stateStreamApi, chain, stateStreamConfig, config.MaxRequestSize) } - builder.AddWebsocketsRoute(chain, &wsConfig, stateStreamApi, stateStreamConfig, config.MaxRequestSize) + builder.AddWebsocketsRoute(chain, wsConfig, stateStreamApi, stateStreamConfig, config.MaxRequestSize) c := cors.New(cors.Options{ AllowedOrigins: []string{"*"}, diff --git a/engine/access/rest/websockets/config.go b/engine/access/rest/websockets/config.go index 13138e54539..7f563ba94b9 100644 --- a/engine/access/rest/websockets/config.go +++ b/engine/access/rest/websockets/config.go @@ -11,8 +11,8 @@ type Config struct { MaxRequestSize int64 } -func NewDefaultWebsocketConfig() *Config { - return &Config{ +func NewDefaultWebsocketConfig() Config { + return Config{ MaxSubscriptionsPerConnection: 1000, MaxResponsesPerSecond: 1000, SendMessageTimeout: 10 * time.Second, diff --git a/engine/access/rest/websockets/controller.go b/engine/access/rest/websockets/controller.go index a8c5e1275fc..87ceae35b7a 100644 --- a/engine/access/rest/websockets/controller.go +++ b/engine/access/rest/websockets/controller.go @@ -10,45 +10,44 @@ import ( "github.com/rs/zerolog" dp "github.com/onflow/flow-go/engine/access/rest/websockets/data_provider" + "github.com/onflow/flow-go/engine/access/rest/websockets/models" "github.com/onflow/flow-go/engine/access/state_stream" "github.com/onflow/flow-go/engine/access/state_stream/backend" + "github.com/onflow/flow-go/utils/concurrentmap" ) type Controller struct { - ctx context.Context logger zerolog.Logger - config *Config + config Config conn *websocket.Conn communicationChannel chan interface{} - dataProviders *ThreadSafeMap[uuid.UUID, dp.DataProvider] + dataProviders *concurrentmap.ConcurrentMap[uuid.UUID, dp.DataProvider] dataProvidersFactory *dp.Factory } func NewWebSocketController( - ctx context.Context, logger zerolog.Logger, - config *Config, + config Config, streamApi state_stream.API, streamConfig backend.Config, conn *websocket.Conn, ) *Controller { return &Controller{ - ctx: ctx, logger: logger.With().Str("component", "websocket-controller").Logger(), config: config, conn: conn, communicationChannel: make(chan interface{}), //TODO: should it be buffered chan? - dataProviders: NewThreadSafeMap[uuid.UUID, dp.DataProvider](), + dataProviders: concurrentmap.NewConcurrentMap[uuid.UUID, dp.DataProvider](), dataProvidersFactory: dp.NewDataProviderFactory(logger, streamApi, streamConfig), } } // HandleConnection manages the WebSocket connection, adding context and error handling. -func (c *Controller) HandleConnection() { +func (c *Controller) HandleConnection(ctx context.Context) { //TODO: configure the connection with ping-pong and deadlines - - go c.readMessagesFromClient(c.ctx) - go c.writeMessagesToClient(c.ctx) + //TODO: spin up a response limit tracker routine + go c.readMessagesFromClient(ctx) + go c.writeMessagesToClient(ctx) } func (c *Controller) writeMessagesToClient(ctx context.Context) { @@ -85,13 +84,13 @@ func (c *Controller) readMessagesFromClient(ctx context.Context) { return } - baseMsg, err := c.parseMessage(msg) + baseMsg, validatedMsg, err := c.parseAndValidateMessage(msg) if err != nil { - c.logger.Warn().Err(err).Msg("error parsing base message") + c.logger.Debug().Err(err).Msg("error parsing and validating client message") return } - if err := c.dispatchAction(baseMsg.Action, msg); err != nil { + if err := c.handleAction(ctx, baseMsg.Action, validatedMsg); err != nil { c.logger.Warn().Err(err).Str("action", baseMsg.Action).Msg("error handling action") } } @@ -106,55 +105,68 @@ func (c *Controller) readMessage() (json.RawMessage, error) { return message, nil } -func (c *Controller) parseMessage(message json.RawMessage) (BaseMessageRequest, error) { - var baseMsg BaseMessageRequest +func (c *Controller) parseAndValidateMessage(message json.RawMessage) (models.BaseMessageRequest, interface{}, error) { + var baseMsg models.BaseMessageRequest if err := json.Unmarshal(message, &baseMsg); err != nil { - return BaseMessageRequest{}, fmt.Errorf("error unmarshalling base message: %w", err) + return models.BaseMessageRequest{}, nil, fmt.Errorf("error unmarshalling base message: %w", err) } - return baseMsg, nil -} -// dispatchAction routes the action to the appropriate handler based on the action type. -func (c *Controller) dispatchAction(action string, message json.RawMessage) error { - switch action { + var validatedMsg interface{} + switch baseMsg.Action { case "subscribe": - var subscribeMsg SubscribeMessageRequest + var subscribeMsg models.SubscribeMessageRequest if err := json.Unmarshal(message, &subscribeMsg); err != nil { - return fmt.Errorf("error unmarshalling subscribe message: %w", err) + return baseMsg, nil, fmt.Errorf("error unmarshalling subscribe message: %w", err) } - c.handleSubscribe(subscribeMsg) + //TODO: add validation logic for `topic` field + validatedMsg = subscribeMsg case "unsubscribe": - var unsubscribeMsg UnsubscribeMessageRequest + var unsubscribeMsg models.UnsubscribeMessageRequest if err := json.Unmarshal(message, &unsubscribeMsg); err != nil { - return fmt.Errorf("error unmarshalling unsubscribe message: %w", err) + return baseMsg, nil, fmt.Errorf("error unmarshalling unsubscribe message: %w", err) } - c.handleUnsubscribe(unsubscribeMsg) + validatedMsg = unsubscribeMsg case "list_subscriptions": - var listMsg ListSubscriptionsMessageRequest + var listMsg models.ListSubscriptionsMessageRequest if err := json.Unmarshal(message, &listMsg); err != nil { - return fmt.Errorf("error unmarshalling list subscriptions message: %w", err) + return baseMsg, nil, fmt.Errorf("error unmarshalling list subscriptions message: %w", err) } - c.handleListSubscriptions(listMsg) + validatedMsg = listMsg + + default: + c.logger.Debug().Str("action", baseMsg.Action).Msg("unknown action type") + return baseMsg, nil, fmt.Errorf("unknown action type: %s", baseMsg.Action) + } + + return baseMsg, validatedMsg, nil +} +func (c *Controller) handleAction(ctx context.Context, action string, message interface{}) error { + switch action { + case "subscribe": + c.handleSubscribe(ctx, message.(models.SubscribeMessageRequest)) + case "unsubscribe": + c.handleUnsubscribe(ctx, message.(models.UnsubscribeMessageRequest)) + case "list_subscriptions": + c.handleListSubscriptions(ctx, message.(models.ListSubscriptionsMessageRequest)) default: - c.logger.Warn().Str("action", action).Msg("unknown action type") return fmt.Errorf("unknown action type: %s", action) } return nil } -func (c *Controller) handleSubscribe(msg SubscribeMessageRequest) { - dp := c.dataProvidersFactory.NewDataProvider(c.ctx, c.communicationChannel, msg.Topic) - c.dataProviders.Insert(dp.ID(), dp) - dp.Run() +func (c *Controller) handleSubscribe(ctx context.Context, msg models.SubscribeMessageRequest) { + dp := c.dataProvidersFactory.NewDataProvider(ctx, c.communicationChannel, msg.Topic) + c.dataProviders.Add(dp.ID(), dp) + dp.Run(ctx) } -func (c *Controller) handleUnsubscribe(msg UnsubscribeMessageRequest) { +func (c *Controller) handleUnsubscribe(ctx context.Context, msg models.UnsubscribeMessageRequest) { id, err := uuid.Parse(msg.ID) if err != nil { - c.logger.Warn().Err(err).Str("topic", msg.Topic).Msg("error parsing message ID") + c.logger.Debug().Err(err).Msg("error parsing message ID") return } @@ -165,7 +177,8 @@ func (c *Controller) handleUnsubscribe(msg UnsubscribeMessageRequest) { } } -func (c *Controller) handleListSubscriptions(msg ListSubscriptionsMessageRequest) {} +func (c *Controller) handleListSubscriptions(ctx context.Context, msg models.ListSubscriptionsMessageRequest) { +} func (c *Controller) shutdownConnection() { defer close(c.communicationChannel) @@ -175,8 +188,13 @@ func (c *Controller) shutdownConnection() { } }(c.conn) - c.dataProviders.ForEach(func(_ uuid.UUID, dp dp.DataProvider) { + err := c.dataProviders.ForEach(func(_ uuid.UUID, dp dp.DataProvider) error { dp.Close() + return nil }) + if err != nil { + c.logger.Error().Err(err).Msg("error closing data provider") + } + c.dataProviders.Clear() } diff --git a/engine/access/rest/websockets/data_provider/blocks.go b/engine/access/rest/websockets/data_provider/blocks.go index 4c23bd4b587..7ec83e30fcd 100644 --- a/engine/access/rest/websockets/data_provider/blocks.go +++ b/engine/access/rest/websockets/data_provider/blocks.go @@ -38,13 +38,12 @@ func NewMockBlockProvider( } } -func (p *MockBlockProvider) Run() { +func (p *MockBlockProvider) Run(_ context.Context) { select { case <-p.ctx.Done(): return default: - p.ch <- "hello" - p.ch <- "world" + p.ch <- "hello world" } } diff --git a/engine/access/rest/websockets/data_provider/provider.go b/engine/access/rest/websockets/data_provider/provider.go index e919af590b6..ce2914140ba 100644 --- a/engine/access/rest/websockets/data_provider/provider.go +++ b/engine/access/rest/websockets/data_provider/provider.go @@ -1,11 +1,13 @@ package data_provider import ( + "context" + "github.com/google/uuid" ) type DataProvider interface { - Run() + Run(ctx context.Context) ID() uuid.UUID Topic() string Close() diff --git a/engine/access/rest/websockets/handler.go b/engine/access/rest/websockets/handler.go index 7bc381349f9..ff385f826ef 100644 --- a/engine/access/rest/websockets/handler.go +++ b/engine/access/rest/websockets/handler.go @@ -17,7 +17,7 @@ type Handler struct { *common.HttpHandler logger zerolog.Logger - websocketConfig *Config + websocketConfig Config streamApi state_stream.API streamConfig backend.Config } @@ -26,7 +26,7 @@ var _ http.Handler = (*Handler)(nil) func NewWebSocketHandler( logger zerolog.Logger, - config *Config, + config Config, chain flow.Chain, streamApi state_stream.API, streamConfig backend.Config, @@ -64,7 +64,6 @@ func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { return } - ctx := context.Background() - controller := NewWebSocketController(ctx, logger, h.websocketConfig, h.streamApi, h.streamConfig, conn) - controller.HandleConnection() + controller := NewWebSocketController(logger, h.websocketConfig, h.streamApi, h.streamConfig, conn) + controller.HandleConnection(context.TODO()) } diff --git a/engine/access/rest/websockets/handler_test.go b/engine/access/rest/websockets/handler_test.go index a7fb4f109ff..ebc83b00bdd 100644 --- a/engine/access/rest/websockets/handler_test.go +++ b/engine/access/rest/websockets/handler_test.go @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/suite" "github.com/onflow/flow-go/engine/access/rest/websockets" + "github.com/onflow/flow-go/engine/access/rest/websockets/models" "github.com/onflow/flow-go/engine/access/state_stream/backend" streammock "github.com/onflow/flow-go/engine/access/state_stream/mock" "github.com/onflow/flow-go/model/flow" @@ -28,17 +29,17 @@ type WsHandlerSuite struct { logger zerolog.Logger handler *websockets.Handler - wsConfig *websockets.Config + wsConfig websockets.Config streamApi *streammock.API - streamConfig *backend.Config + streamConfig backend.Config } func (s *WsHandlerSuite) SetupTest() { s.logger = unittest.Logger() s.wsConfig = websockets.NewDefaultWebsocketConfig() s.streamApi = streammock.NewAPI(s.T()) - s.streamConfig = &backend.Config{} - s.handler = websockets.NewWebSocketHandler(s.logger, s.wsConfig, chainID.Chain(), s.streamApi, *s.streamConfig, 1024) + s.streamConfig = backend.Config{} + s.handler = websockets.NewWebSocketHandler(s.logger, s.wsConfig, chainID.Chain(), s.streamApi, s.streamConfig, 1024) } func TestWsHandlerSuite(t *testing.T) { @@ -65,8 +66,8 @@ func (s *WsHandlerSuite) TestSubscribeRequest() { args := map[string]interface{}{ "start_block_height": 10, } - body := websockets.SubscribeMessageRequest{ - BaseMessageRequest: websockets.BaseMessageRequest{Action: "subscribe"}, + body := models.SubscribeMessageRequest{ + BaseMessageRequest: models.BaseMessageRequest{Action: "subscribe"}, Topic: "blocks", Arguments: args, } @@ -80,12 +81,6 @@ func (s *WsHandlerSuite) TestSubscribeRequest() { require.NoError(s.T(), err) actualMsg := strings.Trim(string(msg), "\n\"\\ ") - require.Equal(s.T(), "hello", actualMsg) - - _, msg, err = conn.ReadMessage() - require.NoError(s.T(), err) - - actualMsg = strings.Trim(string(msg), "\n\"\\ ") - require.Equal(s.T(), "world", actualMsg) + require.Equal(s.T(), "hello world", actualMsg) }) } diff --git a/engine/access/rest/websockets/models.go b/engine/access/rest/websockets/models.go deleted file mode 100644 index 42abb8b7241..00000000000 --- a/engine/access/rest/websockets/models.go +++ /dev/null @@ -1,59 +0,0 @@ -package websockets - -// BaseMessageRequest represents a base structure for incoming messages. -type BaseMessageRequest struct { - Action string `json:"action"` // Action type of the request -} - -// BaseMessageResponse represents a base structure for outgoing messages. -type BaseMessageResponse struct { - Action string `json:"action,omitempty"` // Action type of the response - Success bool `json:"success"` // Indicates success or failure - ErrorMessage string `json:"error_message,omitempty"` // Error message, if any -} - -// SubscribeMessageRequest represents a request to subscribe to a topic. -type SubscribeMessageRequest struct { - BaseMessageRequest - Topic string `json:"topic"` // Topic to subscribe to - Arguments map[string]interface{} `json:"arguments"` // Additional arguments for subscription -} - -// SubscribeMessageResponse represents the response to a subscription request. -type SubscribeMessageResponse struct { - BaseMessageResponse - Topic string `json:"topic"` // Topic of the subscription - ID string `json:"id"` // Unique subscription ID -} - -// UnsubscribeMessageRequest represents a request to unsubscribe from a topic. -type UnsubscribeMessageRequest struct { - BaseMessageRequest - Topic string `json:"topic"` // Topic to unsubscribe from - ID string `json:"id"` // Unique subscription ID -} - -// UnsubscribeMessageResponse represents the response to an unsubscription request. -type UnsubscribeMessageResponse struct { - BaseMessageResponse - Topic string `json:"topic"` // Topic of the unsubscription - ID string `json:"id"` // Unique subscription ID -} - -// ListSubscriptionsMessageRequest represents a request to list active subscriptions. -type ListSubscriptionsMessageRequest struct { - BaseMessageRequest -} - -// SubscriptionEntry represents an active subscription entry. -type SubscriptionEntry struct { - Topic string `json:"topic,omitempty"` // Topic of the subscription - ID string `json:"id,omitempty"` // Unique subscription ID -} - -// ListSubscriptionsMessageResponse is the structure used to respond to list_subscriptions requests. -// It contains a list of active subscriptions for the current WebSocket connection. -type ListSubscriptionsMessageResponse struct { - BaseMessageResponse - Subscriptions []*SubscriptionEntry `json:"subscriptions,omitempty"` -} diff --git a/engine/access/rest/websockets/models/base_message.go b/engine/access/rest/websockets/models/base_message.go new file mode 100644 index 00000000000..f56d62fda8f --- /dev/null +++ b/engine/access/rest/websockets/models/base_message.go @@ -0,0 +1,13 @@ +package models + +// BaseMessageRequest represents a base structure for incoming messages. +type BaseMessageRequest struct { + Action string `json:"action"` // Action type of the request +} + +// BaseMessageResponse represents a base structure for outgoing messages. +type BaseMessageResponse struct { + Action string `json:"action,omitempty"` // Action type of the response + Success bool `json:"success"` // Indicates success or failure + ErrorMessage string `json:"error_message,omitempty"` // Error message, if any +} diff --git a/engine/access/rest/websockets/models/list_subscriptions.go b/engine/access/rest/websockets/models/list_subscriptions.go new file mode 100644 index 00000000000..26174869585 --- /dev/null +++ b/engine/access/rest/websockets/models/list_subscriptions.go @@ -0,0 +1,13 @@ +package models + +// ListSubscriptionsMessageRequest represents a request to list active subscriptions. +type ListSubscriptionsMessageRequest struct { + BaseMessageRequest +} + +// ListSubscriptionsMessageResponse is the structure used to respond to list_subscriptions requests. +// It contains a list of active subscriptions for the current WebSocket connection. +type ListSubscriptionsMessageResponse struct { + BaseMessageResponse + Subscriptions []*SubscriptionEntry `json:"subscriptions,omitempty"` +} diff --git a/engine/access/rest/websockets/models/subscribe.go b/engine/access/rest/websockets/models/subscribe.go new file mode 100644 index 00000000000..993bd63b811 --- /dev/null +++ b/engine/access/rest/websockets/models/subscribe.go @@ -0,0 +1,15 @@ +package models + +// SubscribeMessageRequest represents a request to subscribe to a topic. +type SubscribeMessageRequest struct { + BaseMessageRequest + Topic string `json:"topic"` // Topic to subscribe to + Arguments map[string]interface{} `json:"arguments"` // Additional arguments for subscription +} + +// SubscribeMessageResponse represents the response to a subscription request. +type SubscribeMessageResponse struct { + BaseMessageResponse + Topic string `json:"topic"` // Topic of the subscription + ID string `json:"id"` // Unique subscription ID +} diff --git a/engine/access/rest/websockets/models/subscription_entry.go b/engine/access/rest/websockets/models/subscription_entry.go new file mode 100644 index 00000000000..d3f2b352bb7 --- /dev/null +++ b/engine/access/rest/websockets/models/subscription_entry.go @@ -0,0 +1,7 @@ +package models + +// SubscriptionEntry represents an active subscription entry. +type SubscriptionEntry struct { + Topic string `json:"topic,omitempty"` // Topic of the subscription + ID string `json:"id,omitempty"` // Unique subscription ID +} diff --git a/engine/access/rest/websockets/models/unsubscribe.go b/engine/access/rest/websockets/models/unsubscribe.go new file mode 100644 index 00000000000..2024bb922e0 --- /dev/null +++ b/engine/access/rest/websockets/models/unsubscribe.go @@ -0,0 +1,13 @@ +package models + +// UnsubscribeMessageRequest represents a request to unsubscribe from a topic. +type UnsubscribeMessageRequest struct { + BaseMessageRequest + ID string `json:"id"` // Unique subscription ID +} + +// UnsubscribeMessageResponse represents the response to an unsubscription request. +type UnsubscribeMessageResponse struct { + BaseMessageResponse + ID string `json:"id"` // Unique subscription ID +} diff --git a/engine/access/rest/websockets/threadsafe_map.go b/engine/access/rest/websockets/threadsafe_map.go deleted file mode 100644 index 2c3f3438e40..00000000000 --- a/engine/access/rest/websockets/threadsafe_map.go +++ /dev/null @@ -1,70 +0,0 @@ -package websockets - -import ( - "sync" -) - -// ThreadSafeMap is a thread-safe map with read-write locking. -type ThreadSafeMap[K comparable, V any] struct { - mu sync.RWMutex - m map[K]V -} - -// NewThreadSafeMap initializes a new ThreadSafeMap. -func NewThreadSafeMap[K comparable, V any]() *ThreadSafeMap[K, V] { - return &ThreadSafeMap[K, V]{ - m: make(map[K]V), - } -} - -// Get retrieves a value for a key, returning the value and a boolean indicating if the key exists. -func (s *ThreadSafeMap[K, V]) Get(key K) (V, bool) { - s.mu.RLock() - defer s.mu.RUnlock() - value, ok := s.m[key] - return value, ok -} - -// Insert inserts or updates a value for a key. -func (s *ThreadSafeMap[K, V]) Insert(key K, value V) { - s.mu.Lock() - defer s.mu.Unlock() - s.m[key] = value -} - -// Remove removes a key and its value from the map. -func (s *ThreadSafeMap[K, V]) Remove(key K) { - s.mu.Lock() - defer s.mu.Unlock() - delete(s.m, key) -} - -// Exists checks if a key exists in the map. -func (s *ThreadSafeMap[K, V]) Exists(key K) bool { - s.mu.RLock() - defer s.mu.RUnlock() - _, ok := s.m[key] - return ok -} - -// Len returns the number of elements in the map. -func (s *ThreadSafeMap[K, V]) Len() int { - s.mu.RLock() - defer s.mu.RUnlock() - return len(s.m) -} - -// ForEach applies a function to each key-value pair in the map. -func (s *ThreadSafeMap[K, V]) ForEach(f func(K, V)) { - s.mu.RLock() - defer s.mu.RUnlock() - for k, v := range s.m { - f(k, v) - } -} - -func (s *ThreadSafeMap[K, V]) Clear() { - s.mu.Lock() - defer s.mu.Unlock() - s.m = make(map[K]V) -} diff --git a/engine/access/rest_api_test.go b/engine/access/rest_api_test.go index c0f50e9a2b1..651adb41a63 100644 --- a/engine/access/rest_api_test.go +++ b/engine/access/rest_api_test.go @@ -138,7 +138,7 @@ func (suite *RestAPITestSuite) SetupTest() { RestConfig: rest.Config{ ListenAddress: unittest.DefaultAddress, }, - WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), + WebSocketConfig: websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server diff --git a/engine/access/rpc/rate_limit_test.go b/engine/access/rpc/rate_limit_test.go index e4f923e98fb..7148cdfefad 100644 --- a/engine/access/rpc/rate_limit_test.go +++ b/engine/access/rpc/rate_limit_test.go @@ -116,7 +116,7 @@ func (suite *RateLimitTestSuite) SetupTest() { UnsecureGRPCListenAddr: unittest.DefaultAddress, SecureGRPCListenAddr: unittest.DefaultAddress, HTTPListenAddr: unittest.DefaultAddress, - WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), + WebSocketConfig: websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server diff --git a/engine/access/secure_grpcr_test.go b/engine/access/secure_grpcr_test.go index aa92c5db052..6ffa8f8d324 100644 --- a/engine/access/secure_grpcr_test.go +++ b/engine/access/secure_grpcr_test.go @@ -111,7 +111,7 @@ func (suite *SecureGRPCTestSuite) SetupTest() { UnsecureGRPCListenAddr: unittest.DefaultAddress, SecureGRPCListenAddr: unittest.DefaultAddress, HTTPListenAddr: unittest.DefaultAddress, - WebSocketConfig: *websockets.NewDefaultWebsocketConfig(), + WebSocketConfig: websockets.NewDefaultWebsocketConfig(), } // generate a server certificate that will be served by the GRPC server diff --git a/engine/common/worker/worker_builder_test.go b/engine/common/worker/worker_builder_test.go index c08da0769c3..160f23844f5 100644 --- a/engine/common/worker/worker_builder_test.go +++ b/engine/common/worker/worker_builder_test.go @@ -14,6 +14,7 @@ import ( "github.com/onflow/flow-go/module/irrecoverable" "github.com/onflow/flow-go/module/mempool/queue" "github.com/onflow/flow-go/module/metrics" + "github.com/onflow/flow-go/utils/concurrentmap" "github.com/onflow/flow-go/utils/unittest" ) @@ -115,7 +116,7 @@ func TestWorkerPool_TwoWorkers_ConcurrentEvents(t *testing.T) { } q := queue.NewHeroStore(uint32(size), unittest.Logger(), metrics.NewNoopCollector()) - distributedEvents := unittest.NewProtectedMap[string, struct{}]() + distributedEvents := concurrentmap.NewConcurrentMap[string, struct{}]() allEventsDistributed := sync.WaitGroup{} allEventsDistributed.Add(size) diff --git a/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go b/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go index c43b7435f55..5f2ff0f0e6d 100644 --- a/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go +++ b/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go @@ -19,6 +19,7 @@ import ( "github.com/onflow/flow-go/network/channels" "github.com/onflow/flow-go/network/p2p" p2ptest "github.com/onflow/flow-go/network/p2p/test" + "github.com/onflow/flow-go/utils/concurrentmap" "github.com/onflow/flow-go/utils/unittest" ) @@ -36,7 +37,7 @@ func TestGossipSubIHaveBrokenPromises_Below_Threshold(t *testing.T) { sporkId := unittest.IdentifierFixture() blockTopic := channels.TopicFromChannel(channels.PushBlocks, sporkId) - receivedIWants := unittest.NewProtectedMap[string, struct{}]() + receivedIWants := concurrentmap.NewConcurrentMap[string, struct{}]() idProvider := unittest.NewUpdatableIDProvider(flow.IdentityList{}) spammer := corruptlibp2p.NewGossipSubRouterSpammerWithRpcInspector(t, sporkId, role, idProvider, func(id peer.ID, rpc *corrupt.RPC) error { // override rpc inspector of the spammer node to keep track of the iwants it has received. @@ -188,7 +189,7 @@ func TestGossipSubIHaveBrokenPromises_Above_Threshold(t *testing.T) { sporkId := unittest.IdentifierFixture() blockTopic := channels.TopicFromChannel(channels.PushBlocks, sporkId) - receivedIWants := unittest.NewProtectedMap[string, struct{}]() + receivedIWants := concurrentmap.NewConcurrentMap[string, struct{}]() idProvider := unittest.NewUpdatableIDProvider(flow.IdentityList{}) spammer := corruptlibp2p.NewGossipSubRouterSpammerWithRpcInspector(t, sporkId, role, idProvider, func(id peer.ID, rpc *corrupt.RPC) error { // override rpc inspector of the spammer node to keep track of the iwants it has received. @@ -437,7 +438,7 @@ func TestGossipSubIHaveBrokenPromises_Above_Threshold(t *testing.T) { func spamIHaveBrokenPromise(t *testing.T, spammer *corruptlibp2p.GossipSubRouterSpammer, topic string, - receivedIWants *unittest.ProtectedMap[string, struct{}], + receivedIWants *concurrentmap.ConcurrentMap[string, struct{}], victimNode p2p.LibP2PNode) { rpcCount := 10 // we can't send more than one iHave per RPC in this test, as each iHave should have a distinct topic, and we only have one subscribed topic. diff --git a/network/p2p/connection/connection_gater_test.go b/network/p2p/connection/connection_gater_test.go index ed8777d3f90..7794caa8110 100644 --- a/network/p2p/connection/connection_gater_test.go +++ b/network/p2p/connection/connection_gater_test.go @@ -24,6 +24,7 @@ import ( mockp2p "github.com/onflow/flow-go/network/p2p/mock" p2ptest "github.com/onflow/flow-go/network/p2p/test" "github.com/onflow/flow-go/network/p2p/unicast/stream" + "github.com/onflow/flow-go/utils/concurrentmap" "github.com/onflow/flow-go/utils/unittest" ) @@ -35,7 +36,7 @@ func TestConnectionGating(t *testing.T) { sporkID := unittest.IdentifierFixture() idProvider := mockmodule.NewIdentityProvider(t) // create 2 nodes - node1Peers := unittest.NewProtectedMap[peer.ID, struct{}]() + node1Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() node1, node1Id := p2ptest.NodeFixture( t, sporkID, @@ -49,7 +50,7 @@ func TestConnectionGating(t *testing.T) { }))) idProvider.On("ByPeerID", node1.ID()).Return(&node1Id, true).Maybe() - node2Peers := unittest.NewProtectedMap[peer.ID, struct{}]() + node2Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() node2, node2Id := p2ptest.NodeFixture( t, sporkID, @@ -246,7 +247,7 @@ func TestConnectionGater_InterceptUpgrade(t *testing.T) { inbounds := make([]chan string, 0, count) identities := make(flow.IdentityList, 0, count) - disallowedPeerIds := unittest.NewProtectedMap[peer.ID, struct{}]() + disallowedPeerIds := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() allPeerIds := make(peer.IDSlice, 0, count) idProvider := mockmodule.NewIdentityProvider(t) connectionGater := mockp2p.NewConnectionGater(t) @@ -331,7 +332,7 @@ func TestConnectionGater_Disallow_Integration(t *testing.T) { ids := flow.IdentityList{} inbounds := make([]chan string, 0, 5) - disallowedList := unittest.NewProtectedMap[*flow.Identity, struct{}]() + disallowedList := concurrentmap.NewConcurrentMap[*flow.Identity, struct{}]() for i := 0; i < count; i++ { handler, inbound := p2ptest.StreamHandlerFixture(t) diff --git a/network/p2p/node/libp2pNode_test.go b/network/p2p/node/libp2pNode_test.go index 9a538bd269b..b0c08560e43 100644 --- a/network/p2p/node/libp2pNode_test.go +++ b/network/p2p/node/libp2pNode_test.go @@ -24,6 +24,7 @@ import ( p2ptest "github.com/onflow/flow-go/network/p2p/test" "github.com/onflow/flow-go/network/p2p/utils" validator "github.com/onflow/flow-go/network/validator/pubsub" + "github.com/onflow/flow-go/utils/concurrentmap" "github.com/onflow/flow-go/utils/unittest" ) @@ -158,7 +159,7 @@ func TestConnGater(t *testing.T) { sporkID := unittest.IdentifierFixture() idProvider := mockmodule.NewIdentityProvider(t) - node1Peers := unittest.NewProtectedMap[peer.ID, struct{}]() + node1Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() node1, identity1 := p2ptest.NodeFixture(t, sporkID, t.Name(), idProvider, p2ptest.WithConnectionGater(p2ptest.NewConnectionGater(idProvider, func(pid peer.ID) error { if !node1Peers.Has(pid) { return fmt.Errorf("peer id not found: %s", p2plogging.PeerId(pid)) @@ -173,7 +174,7 @@ func TestConnGater(t *testing.T) { node1Info, err := utils.PeerAddressInfo(identity1.IdentitySkeleton) assert.NoError(t, err) - node2Peers := unittest.NewProtectedMap[peer.ID, struct{}]() + node2Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() node2, identity2 := p2ptest.NodeFixture(t, sporkID, t.Name(), idProvider, p2ptest.WithConnectionGater(p2ptest.NewConnectionGater(idProvider, func(pid peer.ID) error { if !node2Peers.Has(pid) { return fmt.Errorf("id not found: %s", p2plogging.PeerId(pid)) diff --git a/network/test/cohort1/network_test.go b/network/test/cohort1/network_test.go index bffd3ac52b7..f546dcfa54d 100644 --- a/network/test/cohort1/network_test.go +++ b/network/test/cohort1/network_test.go @@ -40,6 +40,7 @@ import ( "github.com/onflow/flow-go/network/p2p/unicast/ratelimit" "github.com/onflow/flow-go/network/p2p/utils/ratelimiter" "github.com/onflow/flow-go/network/underlay" + "github.com/onflow/flow-go/utils/concurrentmap" "github.com/onflow/flow-go/utils/unittest" ) @@ -617,7 +618,7 @@ func (suite *NetworkTestSuite) MultiPing(count int) { senderNodeIndex := 0 targetNodeIndex := suite.size - 1 - receivedPayloads := unittest.NewProtectedMap[string, struct{}]() // keep track of unique payloads received. + receivedPayloads := concurrentmap.NewConcurrentMap[string, struct{}]() // keep track of unique payloads received. // regex to extract the payload from the message regex := regexp.MustCompile(`^hello from: \d`) diff --git a/utils/unittest/protected_map.go b/utils/concurrentmap/concurrent_map.go similarity index 58% rename from utils/unittest/protected_map.go rename to utils/concurrentmap/concurrent_map.go index a2af2f5f513..fb946733a24 100644 --- a/utils/unittest/protected_map.go +++ b/utils/concurrentmap/concurrent_map.go @@ -1,36 +1,36 @@ -package unittest +package concurrentmap import "sync" -// ProtectedMap is a thread-safe map. -type ProtectedMap[K comparable, V any] struct { +// ConcurrentMap is a thread-safe map. +type ConcurrentMap[K comparable, V any] struct { mu sync.RWMutex m map[K]V } -// NewProtectedMap returns a new ProtectedMap with the given types -func NewProtectedMap[K comparable, V any]() *ProtectedMap[K, V] { - return &ProtectedMap[K, V]{ +// NewConcurrentMap returns a new ConcurrentMap with the given types +func NewConcurrentMap[K comparable, V any]() *ConcurrentMap[K, V] { + return &ConcurrentMap[K, V]{ m: make(map[K]V), } } // Add adds a key-value pair to the map -func (p *ProtectedMap[K, V]) Add(key K, value V) { +func (p *ConcurrentMap[K, V]) Add(key K, value V) { p.mu.Lock() defer p.mu.Unlock() p.m[key] = value } // Remove removes a key-value pair from the map -func (p *ProtectedMap[K, V]) Remove(key K) { +func (p *ConcurrentMap[K, V]) Remove(key K) { p.mu.Lock() defer p.mu.Unlock() delete(p.m, key) } // Has returns true if the map contains the given key -func (p *ProtectedMap[K, V]) Has(key K) bool { +func (p *ConcurrentMap[K, V]) Has(key K) bool { p.mu.RLock() defer p.mu.RUnlock() _, ok := p.m[key] @@ -38,7 +38,7 @@ func (p *ProtectedMap[K, V]) Has(key K) bool { } // Get returns the value for the given key and a boolean indicating if the key was found -func (p *ProtectedMap[K, V]) Get(key K) (V, bool) { +func (p *ConcurrentMap[K, V]) Get(key K) (V, bool) { p.mu.RLock() defer p.mu.RUnlock() value, ok := p.m[key] @@ -47,7 +47,7 @@ func (p *ProtectedMap[K, V]) Get(key K) (V, bool) { // ForEach iterates over the map and calls the given function for each key-value pair. // If the function returns an error, the iteration is stopped and the error is returned. -func (p *ProtectedMap[K, V]) ForEach(fn func(k K, v V) error) error { +func (p *ConcurrentMap[K, V]) ForEach(fn func(k K, v V) error) error { p.mu.RLock() defer p.mu.RUnlock() for k, v := range p.m { @@ -59,8 +59,14 @@ func (p *ProtectedMap[K, V]) ForEach(fn func(k K, v V) error) error { } // Size returns the size of the map. -func (p *ProtectedMap[K, V]) Size() int { +func (p *ConcurrentMap[K, V]) Size() int { p.mu.RLock() defer p.mu.RUnlock() return len(p.m) } + +func (p *ConcurrentMap[K, V]) Clear() { + p.mu.Lock() + defer p.mu.Unlock() + p.m = make(map[K]V) +} From dbaa54524d350ce74f890f397c6c20b8bfd06f09 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Mon, 11 Nov 2024 18:06:47 +0200 Subject: [PATCH 06/36] add additional space --- engine/access/rest/websockets/handler.go | 1 + 1 file changed, 1 insertion(+) diff --git a/engine/access/rest/websockets/handler.go b/engine/access/rest/websockets/handler.go index ff385f826ef..911c8fc55b4 100644 --- a/engine/access/rest/websockets/handler.go +++ b/engine/access/rest/websockets/handler.go @@ -40,6 +40,7 @@ func NewWebSocketHandler( streamConfig: streamConfig, } } + func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { //TODO: change to accept topic instead of URL logger := h.HttpHandler.Logger.With().Str("websocket_subscribe_url", r.URL.String()).Logger() From b30d63d4494c341ccc0d38fa89ab7a7ebc40f68e Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Mon, 11 Nov 2024 18:41:41 +0200 Subject: [PATCH 07/36] regen data provider mock --- .../websockets/data_provider/mock/data_provider.go | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/engine/access/rest/websockets/data_provider/mock/data_provider.go b/engine/access/rest/websockets/data_provider/mock/data_provider.go index 6a4aab6e130..4a2a22a44a0 100644 --- a/engine/access/rest/websockets/data_provider/mock/data_provider.go +++ b/engine/access/rest/websockets/data_provider/mock/data_provider.go @@ -3,8 +3,11 @@ package mock import ( - uuid "github.com/google/uuid" + context "context" + mock "github.com/stretchr/testify/mock" + + uuid "github.com/google/uuid" ) // DataProvider is an autogenerated mock type for the DataProvider type @@ -37,9 +40,9 @@ func (_m *DataProvider) ID() uuid.UUID { return r0 } -// Run provides a mock function with given fields: -func (_m *DataProvider) Run() { - _m.Called() +// Run provides a mock function with given fields: ctx +func (_m *DataProvider) Run(ctx context.Context) { + _m.Called(ctx) } // Topic provides a mock function with given fields: From 839c35c26d462a22793d50d6ede39a52602bec10 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Thu, 14 Nov 2024 17:24:48 +0200 Subject: [PATCH 08/36] rename concurrent map. add more todos for error handling --- .../rest/websockets/data_provider/blocks.go | 27 ++++++++++--------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/engine/access/rest/websockets/data_provider/blocks.go b/engine/access/rest/websockets/data_provider/blocks.go index 7ec83e30fcd..43d551539bc 100644 --- a/engine/access/rest/websockets/data_provider/blocks.go +++ b/engine/access/rest/websockets/data_provider/blocks.go @@ -11,39 +11,40 @@ import ( type MockBlockProvider struct { id uuid.UUID - ch chan<- interface{} + topicChan chan<- interface{} // provider is not the one who is responsible to close this channel topic string logger zerolog.Logger - ctx context.Context stopProviderFunc context.CancelFunc streamApi state_stream.API } func NewMockBlockProvider( - ctx context.Context, ch chan<- interface{}, topic string, logger zerolog.Logger, streamApi state_stream.API, ) *MockBlockProvider { - ctx, cancel := context.WithCancel(ctx) return &MockBlockProvider{ id: uuid.New(), - ch: ch, + topicChan: ch, topic: topic, logger: logger.With().Str("component", "block-provider").Logger(), - ctx: ctx, - stopProviderFunc: cancel, + stopProviderFunc: nil, streamApi: streamApi, } } -func (p *MockBlockProvider) Run(_ context.Context) { - select { - case <-p.ctx.Done(): - return - default: - p.ch <- "hello world" +func (p *MockBlockProvider) Run(ctx context.Context) { + ctx, cancel := context.WithCancel(ctx) + p.stopProviderFunc = cancel + + for { + select { + case <-ctx.Done(): + return + case p.topicChan <- "hello world": + return + } } } From 48aaa566123d4759f7ea4d83ff01af4b2d3aece3 Mon Sep 17 00:00:00 2001 From: Illia Malachyn Date: Tue, 19 Nov 2024 15:15:19 +0200 Subject: [PATCH 09/36] Fix comments * make handle_connection blocking * rename concurrent_map * use type switch instead of switch * add todos for error handling --- engine/access/rest/websockets/controller.go | 42 ++++++++++++------- .../rest/websockets/data_provider/blocks.go | 2 +- .../rest/websockets/data_provider/factory.go | 6 +-- engine/access/rest/websockets/handler_test.go | 2 +- engine/common/worker/worker_builder_test.go | 2 +- .../test/gossipsub/scoring/ihave_spam_test.go | 6 +-- .../p2p/connection/connection_gater_test.go | 8 ++-- network/p2p/node/libp2pNode_test.go | 4 +- network/test/cohort1/network_test.go | 2 +- utils/concurrentmap/concurrent_map.go | 26 ++++++------ 10 files changed, 55 insertions(+), 45 deletions(-) diff --git a/engine/access/rest/websockets/controller.go b/engine/access/rest/websockets/controller.go index 87ceae35b7a..fe873f5f61c 100644 --- a/engine/access/rest/websockets/controller.go +++ b/engine/access/rest/websockets/controller.go @@ -21,7 +21,7 @@ type Controller struct { config Config conn *websocket.Conn communicationChannel chan interface{} - dataProviders *concurrentmap.ConcurrentMap[uuid.UUID, dp.DataProvider] + dataProviders *concurrentmap.Map[uuid.UUID, dp.DataProvider] dataProvidersFactory *dp.Factory } @@ -37,7 +37,7 @@ func NewWebSocketController( config: config, conn: conn, communicationChannel: make(chan interface{}), //TODO: should it be buffered chan? - dataProviders: concurrentmap.NewConcurrentMap[uuid.UUID, dp.DataProvider](), + dataProviders: concurrentmap.New[uuid.UUID, dp.DataProvider](), dataProvidersFactory: dp.NewDataProviderFactory(logger, streamApi, streamConfig), } } @@ -47,10 +47,14 @@ func (c *Controller) HandleConnection(ctx context.Context) { //TODO: configure the connection with ping-pong and deadlines //TODO: spin up a response limit tracker routine go c.readMessagesFromClient(ctx) - go c.writeMessagesToClient(ctx) + c.writeMessagesToClient(ctx) } +// writeMessagesToClient reads a messages from communication channel and passes them on to a client WebSocket connection. +// The communication channel is filled by data providers. Besides, the response limit tracker is involved in +// write message regulation func (c *Controller) writeMessagesToClient(ctx context.Context) { + //TODO: can it run forever? maybe we should cancel the ctx in the reader routine for { select { case <-ctx.Done(): @@ -66,6 +70,8 @@ func (c *Controller) writeMessagesToClient(ctx context.Context) { } } +// readMessagesFromClient continuously reads messages from a client WebSocket connection, +// processes each message, and handles actions based on the message type. func (c *Controller) readMessagesFromClient(ctx context.Context) { defer c.shutdownConnection() @@ -90,7 +96,7 @@ func (c *Controller) readMessagesFromClient(ctx context.Context) { return } - if err := c.handleAction(ctx, baseMsg.Action, validatedMsg); err != nil { + if err := c.handleAction(ctx, validatedMsg); err != nil { c.logger.Warn().Err(err).Str("action", baseMsg.Action).Msg("error handling action") } } @@ -143,30 +149,35 @@ func (c *Controller) parseAndValidateMessage(message json.RawMessage) (models.Ba return baseMsg, validatedMsg, nil } -func (c *Controller) handleAction(ctx context.Context, action string, message interface{}) error { - switch action { - case "subscribe": - c.handleSubscribe(ctx, message.(models.SubscribeMessageRequest)) - case "unsubscribe": - c.handleUnsubscribe(ctx, message.(models.UnsubscribeMessageRequest)) - case "list_subscriptions": - c.handleListSubscriptions(ctx, message.(models.ListSubscriptionsMessageRequest)) +func (c *Controller) handleAction(ctx context.Context, message interface{}) error { + switch msg := message.(type) { + case models.SubscribeMessageRequest: + c.handleSubscribe(ctx, msg) + case models.UnsubscribeMessageRequest: + c.handleUnsubscribe(ctx, msg) + case models.ListSubscriptionsMessageRequest: + c.handleListSubscriptions(ctx, msg) default: - return fmt.Errorf("unknown action type: %s", action) + return fmt.Errorf("unknown message type: %T", msg) } return nil } func (c *Controller) handleSubscribe(ctx context.Context, msg models.SubscribeMessageRequest) { - dp := c.dataProvidersFactory.NewDataProvider(ctx, c.communicationChannel, msg.Topic) + dp := c.dataProvidersFactory.NewDataProvider(c.communicationChannel, msg.Topic) c.dataProviders.Add(dp.ID(), dp) dp.Run(ctx) + + //TODO: return OK response to client + c.communicationChannel <- msg } -func (c *Controller) handleUnsubscribe(ctx context.Context, msg models.UnsubscribeMessageRequest) { +func (c *Controller) handleUnsubscribe(_ context.Context, msg models.UnsubscribeMessageRequest) { id, err := uuid.Parse(msg.ID) if err != nil { c.logger.Debug().Err(err).Msg("error parsing message ID") + //TODO: return an error response to client + c.communicationChannel <- err return } @@ -178,6 +189,7 @@ func (c *Controller) handleUnsubscribe(ctx context.Context, msg models.Unsubscri } func (c *Controller) handleListSubscriptions(ctx context.Context, msg models.ListSubscriptionsMessageRequest) { + //TODO: return a response to client } func (c *Controller) shutdownConnection() { diff --git a/engine/access/rest/websockets/data_provider/blocks.go b/engine/access/rest/websockets/data_provider/blocks.go index 43d551539bc..01b4d07d2e7 100644 --- a/engine/access/rest/websockets/data_provider/blocks.go +++ b/engine/access/rest/websockets/data_provider/blocks.go @@ -42,7 +42,7 @@ func (p *MockBlockProvider) Run(ctx context.Context) { select { case <-ctx.Done(): return - case p.topicChan <- "hello world": + case p.topicChan <- "block{height: 42}": return } } diff --git a/engine/access/rest/websockets/data_provider/factory.go b/engine/access/rest/websockets/data_provider/factory.go index 86d69475377..6a2658b1b95 100644 --- a/engine/access/rest/websockets/data_provider/factory.go +++ b/engine/access/rest/websockets/data_provider/factory.go @@ -1,8 +1,6 @@ package data_provider import ( - "context" - "github.com/rs/zerolog" "github.com/onflow/flow-go/engine/access/state_stream" @@ -23,10 +21,10 @@ func NewDataProviderFactory(logger zerolog.Logger, streamApi state_stream.API, s } } -func (f *Factory) NewDataProvider(ctx context.Context, ch chan<- interface{}, topic string) DataProvider { +func (f *Factory) NewDataProvider(ch chan<- interface{}, topic string) DataProvider { switch topic { case "blocks": - return NewMockBlockProvider(ctx, ch, topic, f.logger, f.streamApi) + return NewMockBlockProvider(ch, topic, f.logger, f.streamApi) default: return nil } diff --git a/engine/access/rest/websockets/handler_test.go b/engine/access/rest/websockets/handler_test.go index ebc83b00bdd..6b9cce06572 100644 --- a/engine/access/rest/websockets/handler_test.go +++ b/engine/access/rest/websockets/handler_test.go @@ -81,6 +81,6 @@ func (s *WsHandlerSuite) TestSubscribeRequest() { require.NoError(s.T(), err) actualMsg := strings.Trim(string(msg), "\n\"\\ ") - require.Equal(s.T(), "hello world", actualMsg) + require.Equal(s.T(), "block{height: 42}", actualMsg) }) } diff --git a/engine/common/worker/worker_builder_test.go b/engine/common/worker/worker_builder_test.go index 160f23844f5..09aebe1cc41 100644 --- a/engine/common/worker/worker_builder_test.go +++ b/engine/common/worker/worker_builder_test.go @@ -116,7 +116,7 @@ func TestWorkerPool_TwoWorkers_ConcurrentEvents(t *testing.T) { } q := queue.NewHeroStore(uint32(size), unittest.Logger(), metrics.NewNoopCollector()) - distributedEvents := concurrentmap.NewConcurrentMap[string, struct{}]() + distributedEvents := concurrentmap.New[string, struct{}]() allEventsDistributed := sync.WaitGroup{} allEventsDistributed.Add(size) diff --git a/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go b/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go index 5f2ff0f0e6d..8debc74e7d7 100644 --- a/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go +++ b/insecure/integration/functional/test/gossipsub/scoring/ihave_spam_test.go @@ -37,7 +37,7 @@ func TestGossipSubIHaveBrokenPromises_Below_Threshold(t *testing.T) { sporkId := unittest.IdentifierFixture() blockTopic := channels.TopicFromChannel(channels.PushBlocks, sporkId) - receivedIWants := concurrentmap.NewConcurrentMap[string, struct{}]() + receivedIWants := concurrentmap.New[string, struct{}]() idProvider := unittest.NewUpdatableIDProvider(flow.IdentityList{}) spammer := corruptlibp2p.NewGossipSubRouterSpammerWithRpcInspector(t, sporkId, role, idProvider, func(id peer.ID, rpc *corrupt.RPC) error { // override rpc inspector of the spammer node to keep track of the iwants it has received. @@ -189,7 +189,7 @@ func TestGossipSubIHaveBrokenPromises_Above_Threshold(t *testing.T) { sporkId := unittest.IdentifierFixture() blockTopic := channels.TopicFromChannel(channels.PushBlocks, sporkId) - receivedIWants := concurrentmap.NewConcurrentMap[string, struct{}]() + receivedIWants := concurrentmap.New[string, struct{}]() idProvider := unittest.NewUpdatableIDProvider(flow.IdentityList{}) spammer := corruptlibp2p.NewGossipSubRouterSpammerWithRpcInspector(t, sporkId, role, idProvider, func(id peer.ID, rpc *corrupt.RPC) error { // override rpc inspector of the spammer node to keep track of the iwants it has received. @@ -438,7 +438,7 @@ func TestGossipSubIHaveBrokenPromises_Above_Threshold(t *testing.T) { func spamIHaveBrokenPromise(t *testing.T, spammer *corruptlibp2p.GossipSubRouterSpammer, topic string, - receivedIWants *concurrentmap.ConcurrentMap[string, struct{}], + receivedIWants *concurrentmap.Map[string, struct{}], victimNode p2p.LibP2PNode) { rpcCount := 10 // we can't send more than one iHave per RPC in this test, as each iHave should have a distinct topic, and we only have one subscribed topic. diff --git a/network/p2p/connection/connection_gater_test.go b/network/p2p/connection/connection_gater_test.go index 7794caa8110..e84bfe0042f 100644 --- a/network/p2p/connection/connection_gater_test.go +++ b/network/p2p/connection/connection_gater_test.go @@ -36,7 +36,7 @@ func TestConnectionGating(t *testing.T) { sporkID := unittest.IdentifierFixture() idProvider := mockmodule.NewIdentityProvider(t) // create 2 nodes - node1Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() + node1Peers := concurrentmap.New[peer.ID, struct{}]() node1, node1Id := p2ptest.NodeFixture( t, sporkID, @@ -50,7 +50,7 @@ func TestConnectionGating(t *testing.T) { }))) idProvider.On("ByPeerID", node1.ID()).Return(&node1Id, true).Maybe() - node2Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() + node2Peers := concurrentmap.New[peer.ID, struct{}]() node2, node2Id := p2ptest.NodeFixture( t, sporkID, @@ -247,7 +247,7 @@ func TestConnectionGater_InterceptUpgrade(t *testing.T) { inbounds := make([]chan string, 0, count) identities := make(flow.IdentityList, 0, count) - disallowedPeerIds := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() + disallowedPeerIds := concurrentmap.New[peer.ID, struct{}]() allPeerIds := make(peer.IDSlice, 0, count) idProvider := mockmodule.NewIdentityProvider(t) connectionGater := mockp2p.NewConnectionGater(t) @@ -332,7 +332,7 @@ func TestConnectionGater_Disallow_Integration(t *testing.T) { ids := flow.IdentityList{} inbounds := make([]chan string, 0, 5) - disallowedList := concurrentmap.NewConcurrentMap[*flow.Identity, struct{}]() + disallowedList := concurrentmap.New[*flow.Identity, struct{}]() for i := 0; i < count; i++ { handler, inbound := p2ptest.StreamHandlerFixture(t) diff --git a/network/p2p/node/libp2pNode_test.go b/network/p2p/node/libp2pNode_test.go index b0c08560e43..d53fabb0e17 100644 --- a/network/p2p/node/libp2pNode_test.go +++ b/network/p2p/node/libp2pNode_test.go @@ -159,7 +159,7 @@ func TestConnGater(t *testing.T) { sporkID := unittest.IdentifierFixture() idProvider := mockmodule.NewIdentityProvider(t) - node1Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() + node1Peers := concurrentmap.New[peer.ID, struct{}]() node1, identity1 := p2ptest.NodeFixture(t, sporkID, t.Name(), idProvider, p2ptest.WithConnectionGater(p2ptest.NewConnectionGater(idProvider, func(pid peer.ID) error { if !node1Peers.Has(pid) { return fmt.Errorf("peer id not found: %s", p2plogging.PeerId(pid)) @@ -174,7 +174,7 @@ func TestConnGater(t *testing.T) { node1Info, err := utils.PeerAddressInfo(identity1.IdentitySkeleton) assert.NoError(t, err) - node2Peers := concurrentmap.NewConcurrentMap[peer.ID, struct{}]() + node2Peers := concurrentmap.New[peer.ID, struct{}]() node2, identity2 := p2ptest.NodeFixture(t, sporkID, t.Name(), idProvider, p2ptest.WithConnectionGater(p2ptest.NewConnectionGater(idProvider, func(pid peer.ID) error { if !node2Peers.Has(pid) { return fmt.Errorf("id not found: %s", p2plogging.PeerId(pid)) diff --git a/network/test/cohort1/network_test.go b/network/test/cohort1/network_test.go index f546dcfa54d..723df438960 100644 --- a/network/test/cohort1/network_test.go +++ b/network/test/cohort1/network_test.go @@ -618,7 +618,7 @@ func (suite *NetworkTestSuite) MultiPing(count int) { senderNodeIndex := 0 targetNodeIndex := suite.size - 1 - receivedPayloads := concurrentmap.NewConcurrentMap[string, struct{}]() // keep track of unique payloads received. + receivedPayloads := concurrentmap.New[string, struct{}]() // keep track of unique payloads received. // regex to extract the payload from the message regex := regexp.MustCompile(`^hello from: \d`) diff --git a/utils/concurrentmap/concurrent_map.go b/utils/concurrentmap/concurrent_map.go index fb946733a24..148c3741428 100644 --- a/utils/concurrentmap/concurrent_map.go +++ b/utils/concurrentmap/concurrent_map.go @@ -2,35 +2,35 @@ package concurrentmap import "sync" -// ConcurrentMap is a thread-safe map. -type ConcurrentMap[K comparable, V any] struct { +// Map is a thread-safe map. +type Map[K comparable, V any] struct { mu sync.RWMutex m map[K]V } -// NewConcurrentMap returns a new ConcurrentMap with the given types -func NewConcurrentMap[K comparable, V any]() *ConcurrentMap[K, V] { - return &ConcurrentMap[K, V]{ +// New returns a new Map with the given types +func New[K comparable, V any]() *Map[K, V] { + return &Map[K, V]{ m: make(map[K]V), } } // Add adds a key-value pair to the map -func (p *ConcurrentMap[K, V]) Add(key K, value V) { +func (p *Map[K, V]) Add(key K, value V) { p.mu.Lock() defer p.mu.Unlock() p.m[key] = value } // Remove removes a key-value pair from the map -func (p *ConcurrentMap[K, V]) Remove(key K) { +func (p *Map[K, V]) Remove(key K) { p.mu.Lock() defer p.mu.Unlock() delete(p.m, key) } // Has returns true if the map contains the given key -func (p *ConcurrentMap[K, V]) Has(key K) bool { +func (p *Map[K, V]) Has(key K) bool { p.mu.RLock() defer p.mu.RUnlock() _, ok := p.m[key] @@ -38,7 +38,7 @@ func (p *ConcurrentMap[K, V]) Has(key K) bool { } // Get returns the value for the given key and a boolean indicating if the key was found -func (p *ConcurrentMap[K, V]) Get(key K) (V, bool) { +func (p *Map[K, V]) Get(key K) (V, bool) { p.mu.RLock() defer p.mu.RUnlock() value, ok := p.m[key] @@ -47,7 +47,7 @@ func (p *ConcurrentMap[K, V]) Get(key K) (V, bool) { // ForEach iterates over the map and calls the given function for each key-value pair. // If the function returns an error, the iteration is stopped and the error is returned. -func (p *ConcurrentMap[K, V]) ForEach(fn func(k K, v V) error) error { +func (p *Map[K, V]) ForEach(fn func(k K, v V) error) error { p.mu.RLock() defer p.mu.RUnlock() for k, v := range p.m { @@ -59,14 +59,14 @@ func (p *ConcurrentMap[K, V]) ForEach(fn func(k K, v V) error) error { } // Size returns the size of the map. -func (p *ConcurrentMap[K, V]) Size() int { +func (p *Map[K, V]) Size() int { p.mu.RLock() defer p.mu.RUnlock() return len(p.m) } -func (p *ConcurrentMap[K, V]) Clear() { +func (p *Map[K, V]) Clear() { p.mu.Lock() defer p.mu.Unlock() - p.m = make(map[K]V) + clear(p.m) } From 006c80a5f1b382eb40259c927819cdbf7a80c7b4 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Tue, 19 Nov 2024 16:47:11 -0800 Subject: [PATCH 10/36] update height in comment --- fvm/evm/handler/handler.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fvm/evm/handler/handler.go b/fvm/evm/handler/handler.go index f2943b9243b..102ec960238 100644 --- a/fvm/evm/handler/handler.go +++ b/fvm/evm/handler/handler.go @@ -356,7 +356,7 @@ func (h *ContractHandler) commitBlockProposal() error { // log evm block commitment logger := h.backend.Logger() logger.Info(). - Uint64("height", bp.Height). + Uint64("evm_height", bp.Height). Int("tx_count", len(bp.TxHashes)). Uint64("total_gas_used", bp.TotalGasUsed). Uint64("total_supply", bp.TotalSupply.Uint64()). From 39a777a7d6b42294b2fc00727a5d22cb0b81a8cc Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Thu, 21 Nov 2024 12:45:42 +0100 Subject: [PATCH 11/36] add offchain block context creation method --- fvm/evm/offchain/blocks/block_context.go | 98 ++++++++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 fvm/evm/offchain/blocks/block_context.go diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go new file mode 100644 index 00000000000..38003e0b771 --- /dev/null +++ b/fvm/evm/offchain/blocks/block_context.go @@ -0,0 +1,98 @@ +package blocks + +import ( + evmTypes "github.com/onflow/flow-go/fvm/evm/types" + "github.com/onflow/flow-go/model/flow" + gethCommon "github.com/onflow/go-ethereum/common" + "github.com/onflow/go-ethereum/eth/tracers" +) + +// NewBlockContext creates a new block context for the given chain ID and height. +// This is for use in offchain re-execution of transactions. +// It includes special casing for some historical block heights: +// - On Mainnet and Testnet the block hash list was stuck in a loop of 256 block hashes until fixed. +// https://github.com/onflow/flow-go/issues/6552 +// - The coinbase address was different on testnet until https://github.com/onflow/flow-evm-gateway/pull/491. +func NewBlockContext( + chainID flow.ChainID, + height uint64, + timestamp uint64, + getHashByHeight func(uint64) gethCommon.Hash, + prevRandao gethCommon.Hash, + tracer *tracers.Tracer, +) (evmTypes.BlockContext, error) { + + // coinbase address fix + miner := evmTypes.CoinbaseAddress + if chainID == flow.Testnet && height < coinbaseAddressChangeEVMHeightTestnet { + miner = evmTypes.Address(gethCommon.HexToAddress("0000000000000000000000021169100eecb7c1a6")) + } + + return evmTypes.BlockContext{ + ChainID: evmTypes.EVMChainIDFromFlowChainID(chainID), + BlockNumber: height, + BlockTimestamp: timestamp, + DirectCallBaseGasUsage: evmTypes.DefaultDirectCallBaseGasUsage, + DirectCallGasPrice: evmTypes.DefaultDirectCallGasPrice, + GasFeeCollector: miner, + GetHashFunc: func(n uint64) gethCommon.Hash { + // For block heights greater than or equal to the current, + // return an empty block hash. + if n >= height { + return gethCommon.Hash{} + } + // If the given block height, is more than 256 blocks + // in the past, return an empty block hash. + if height-n > 256 { + return gethCommon.Hash{} + } + + // For testnet & mainnet, we fetch the block hash from the hard-coded + // array of hashes. + if chainID == flow.Mainnet && height < blockHashListFixHCUEVMHeightMainnet { + return fixedHashes[flow.Mainnet][height%256] + } else if chainID == flow.Testnet && height < blockHashListFixHCUEVMHeightTestnet { + return fixedHashes[flow.Testnet][height%256] + } + + return getHashByHeight(n) + + }, + Random: prevRandao, + Tracer: tracer, + }, nil +} + +// TODO: HCU not done yet +const blockHashListFixHCUEVMHeightMainnet = 0 + +// Testnet52 - Height Coordinated Upgrade 4, Nov 20, 2024 +const blockHashListFixHCUEVMHeightTestnet = 0 + +// TODO: HCU name and date +const coinbaseAddressChangeEVMHeightTestnet = 1385490 + +var fixedHashes map[flow.ChainID][256]gethCommon.Hash + +// generate the fixed hashes for mainnet and testnet +func generateFixedHashes() { + mainnetFixedHashes := [256]gethCommon.Hash{} + testnetFixedHashes := [256]gethCommon.Hash{} + + mainnetHashes := []string{"acb08ca38e1f155f1d038c6d2e1acc0a38915624b5772551c4f985f3ebc3a3e0", "5914c330c16ee5e6b8e60d0435d0390ef3a29cde3e177090c23cf34e111792eb", "89efffebedded274fc0c72a4d3e953d990b5f54b82b696c65390f87b2f9b331c", "824a13a4d2252ff045cc785aa77c6ab8f85b48a24aa5ac198417bc05248e3d20", "5c0eefa82e36a4a7bc8b67f4856f756407189f4011d74c1cc6125599bcd6a18d", "6e6435cf4a9dc503a213fee4c8e5909f32ef284b3dbe780fadf78ce2a70a6a56", "f312e8571dbd7e2c347a0032d7ac42b62966a833ddacf2ba1fd1b0c1dbf755c0", "e9ef75691eadf0e6e9ca88cc0dc9c29e520aa611dc21ba390eead58284949873", "bc2fedff2ca293a75dc36c577dd05742671549586a333e458c8c723a3d3ba40e", "04256e11dc4ecc63eee1b3ad22e31860d26a1cc2103e34f91f12f4a61cd3150f", "454081c5e315537eda05e5fdd8e5b34df7473386c16d140dcf0df9c35159310c", "f4a897310404d46b19a87a45f4e53743c12c1b4530383d3a8ecc972940461cf0", "81765ca144baff8e65ebe989403c8f86ede26cee5580ff5320817a108e54e887", "cb96415a6f8d3ac6abed34fcc83b2745775c0bdffb7561392e1eeab63c28bd19", "c70a0e0279c46e6fae534bf1dbee7796078ae1a9c214d7719b12dcfd4fbbf55c", "4668064ef2d42bbae07e44276b55922ff7830f8ff203d91f91854252bf42dcfc", "bc966f7acce679568f84c0f6164079a4b238b856bce15091fd62d3d94506b92e", "b6d1beb5b1be5715eb61f0b6528e339c75604f2ebf0605238905a0c1fc4f0594", "e367fba588c1fa71fec1e769963d3106a0e73d13e2ec441d2de44949673804b0", "2ea9607ed6160325c0fb6514ad2d0eb4397afe131c787a6d189e39907ade71ac", "5297cff89b9f573c2f7920be0c8d3e71c32f3016a1c893e9f41048e979533a70", "9f81c00947b14ddfb6793437a787d0bb8ba5692e264f7f5087bbd4e8bdc961f7", "6ef10778647ef844ce9c53b740890980a564619e3ce866faea7bd75b67971873", "db1d873cfb81b4aa32b7d575542d545176782737d7a7f3c9a288205124e91467", "cdab6dc09455023a24c38ae89232d4dd5e76d13935f38eb4d60a8ae3c2f87270", "9cc982be62369ee866334c1ce1660046cf23f109e7baea6a6525ac9fa2657143", "8ef919c45b46bbee779b7511a0dfe23cc9631bbd48103e2f9b7cbe9b492ac61e", "26da1293ebe6664711039a56d9f2fcb245a7b3803c4219cbd27430307624977b", "681c1001f30ebd45fe2ec071e168476c3d3367a18cbbb14784f5ad549b6f6c76", "03a921c3db624982c82090c5f562599f0bef0e542bf145c3784e055dafc43f75", "e0304d9cd962ba44165e5dcd4c29bae6e9eeaa263413c8e0ca41d0cf883130a9", "a931939f13b5dc447464b115b487820467743016fb5bee8b43864ad836a98261", "c7ed304fca9a13944e3f81c5fe4e8df4a7327d1c89fd63de357551e99996d9bd", "80f7f4870cdd284f129babe4d08c002acb3a63a43b6d73ea39c28820c9002d20", "e2d09b3b3d27e1d5448c1772e9d300fa1615c791e2d8b4ebce3d4c24e6058cbf", "754869cba21c3bd12d94a23571a947f8a33dc369e6cf4ca18f2fd40e7b5f5a53", "e2dc7e12450ddbc50087fd76041623416c13010be0a804f4676af1768e5d61ac", "7bb9175b93b7cab1e02a3f571f399db950908b57b45f1d584f3a5ac1781496a8", "2e7e5f02e2c107b73dae8b20a0f52d52ea1aa7248c6b4876f70808e5168dada9", "e19d12c9f01d7b8cdf6831e269e0a011cd744a10aa1da65780f80a50c812eafb", "6bdaa6249d9616d1244a7e23995dc416b9f3cc464ca2d5941cccb8d5b1a1eac8", "38e68d98e93683c14c3c0cbf82298c329857503bd88e488c6cc8ce83436295bd", "e6149e3ed7747619bcba88919daf4e9dc167a276887e8bad88158fe796aff9a9", "e6c8562da3023e8d864f04545f26ec68f5a3d9ad984e225104ee63617e22cdec", "677b31d0b0fd1be96e13968e9cf679797082b5fe7a2c3a9934516f9d05a35c5d", "d894c76d4e18fdd1424435028631b008224fa480faf7dd4404faa26805f226f4", "38421bae5c3e39fb73178621e97fc5a8eeb1b6e25a8faba76aa6f808819aa263", "ac90729f29643e4107280ae8b69fe4e862c1cfbeef57a9f70579b16db9479410", "a671eff0c687d04816889d0c56e704796690771cb5a1eff8c83ae5ef417f79bf", "601fc2b0ca9979c4b092a812a88934f46381e42037278523f191e8207ad1e20b", "0dcadeeb37a0836c4aa287c43a7f1e6e72eaabc8fb0f5ad6209f87e69f2bf412", "02bd187372fe4c6bf894fabf7191afca7f2f052e5d42a2cc6fb7f2e6b1a07716", "39da57b24b312b1838a44de3286c51a0189608bfaa5904a7a2a35c675b947322", "fe16a19cdeacc8ce05bf38d3617c1e90579b6991775d3c0358bf3dc52aeae032", "9e7e8957797b6fb78679c60c249cb8b83e045e760a6ec24c506d565ae94c1730", "7ae42245a1611e7d32d573ddc2863f9f1902683a17d46b57794ec90ad61a9d6d", "f599ba650e87dcf51733485aef08e71f1d8f5e47c459250902daf3db9bb9f526", "7d914de318e12963c059aa04fe03cb45849b16620a1e7c2a883164bca65ad3e7", "d66014e30f72e1bb309235b6d9b8db6f6fe13b624c0ec81ed657ef215d13e29f", "7d25f2ab344c3ce12cad30a992264dae73668e694d8690bff158c0c66951b6eb", "c4eeb03288ac4ec6166d31909e3fdea938b76f637bdd4963910aa0cfedf4496f", "a30beb208f4ccaec67f83f72b72d18a428b5876ebf53184248ab3438c226729c", "67d9c883f3f8df5afdd8e897f54f4ddd4522d761d23429c1697034e3bebe8df6", "fffc4c5760e75dc839acb6654e0274afbe07b20ca46e90b0da0262432473955c", "3238927e1ff0d18a573cff7ea6d081579bd9ec9de9f6ba2f67301ef88d65057c", "3af6b7b1124dbabca4aa2734711484ff6fc6d76130cf81633e051ffdc01b3583", "0475c59145cad6563ed3f0cae8d03a09c73d4862c5a090f8d5ba5c43f3f744fe", "896c5230f74946f18dc31d879d228715303ddaf01d6c1050dc4cac1cab8f5273", "a0959444effc54fc3d04a31a87ec466063c510762b2b4e099cda3794c0d59c07", "0f7b8362a5f8bfe9104a2dbcbf25dac6dfaae4fd41862cb0f0e028062b7db9fd", "83303d47daa193a0e9f1cb38b7fef57508b6f8f80aa46d5663f64800c9bd25de", "82892728f36bf81b17e2fc6762444d938f5b8b6e80c09c7189e73a8b6b9b2b04", "39f93ea2ce0afb9ea531662a38cd65984ae38b10076a37ddd10fd45ed35674d0", "6783668b699abfe0b3bcbbc79988e7c1c5191038497cd73e52502702d18b8cdd", "6fb5147a8b6cff70490dd2dcbee8c26e32808034978f8989bd0d73ac1c5cd79b", "b3b550f194004cfb54f24f738fe901058fa4eea680d5f704a1e49996aa7df019", "2e81de6c3c6a1828322fea4a5d7add9c9c4bc940d37760cd78a15f7185bbec5e", "8ea70bc7c983074e7d32d9c47e24ebea9ddc0a04aa4061e82c40566cbb886061", "f0c2785d27868755124be6ae2cdde18804893493f53c2bf3b9ce3bc37a983afb", "f7e684111cb2b43644b5e2a07bcdfbb9231ba8647dae01103bb15ee84ed59dce", "a4c839a3ec06907bc87e18a06aab314e184bf55d8d66acf57012d81bed5f5a0a", "e6f94c1f935b7505d65b70571a169572a5586582dfcd7ec43614eb5a53169556", "5f67958fb79aec5be7e950deb0a9a86fbcd328eb75298946014f06c200fd8dcb", "ba54fe0ea8a35e899ec5e5ff9aa888ae7c5ed8630336a098c131809b6e3a815c", "11eea2b61707439bfcc198d3d483fc7fbc8f5c83f70b6190b6bd1bd11a0edfd4", "d1088b19e8814dca954f5a78f827ea6e20de2b8e0839d5f2d2ece9cc58d72c76", "c5e0a35346bc8c9a45338f5844cd13f5f5b94ae90494c8609b7fe2dd69925429", "32089be74f3bd2191d7e8116a742b40f613d75bd77765c28a11d937755ca52fe", "f584db14565d9abc212b02935724bf05da840670b46e83a64990d7463571f9c2", "a959dab01d61bfb54bfcb58bcdb609f42e2a062fcc63eb3d5b866e582fc589a8", "cf09a5617dde47025acdc7dc544f9d78fa396c383ecee103b5b74b532d9a586a", "03bbdbf7f22cd92a696f1ffac34c99d7e57c201edfdfc617826ef2f648d38475", "7f1a8c24c456052fcc3721707a56c457eb7d80ce8d83d8d23c5a9a0cb70eeaec", "2ed7147f47b4f12924358d18f778cec7d28dd53e9189a5096a7449f42a1ce29d", "c78b588dc0e967fc85abad5d5a18f2be86b7a77363ce701f245507a7043de3f2", "bcc0b4ed36d1512825bb2a2db5ed41cf5a7f5fa5634c8199415eae7a145ab772", "c520f97ac043cf2431641d4532c4a44f9664e728c08382ec798ac49997f19695", "a7825ca8bc2f6ac8556b88cc9a3c2a533504e5a8e011149cc15eaace9320c23d", "de8cc99029674ccd55105e8b5182b22e8c219a8a35e9e5fbb386d232e8e1ffd4", "2ae0a0239db0cdc5108215d38f30d783b4619824a5b420cbfca4fd6242586fa5", "3ce22aea444053e3456ca4edacb1060a5a355a7ca7e585af873388f99e654028", "f18aea7d73a0a8b2c313eaf7e742a08225e68341de787a4003fe49c06a5d4d13", "82dfb93809f99c59f6d41402e863580fe080278faca77cf2eddb651fedb77b05", "08c1d039a238c625ff6715aafc33ee8a1675bfa482ba6edbd0d9cc63d947b5b5", "a90aa55518cc9000eceade9b79644cf723c22f60caf849604dbb2ac22a8b5a86", "077cd67222a27be3640cd4d5cb3946bbc0f7df3fca7c5dd2ab66e4f9187f979e", "e8ccaf643c060c92fca26ab6adec347a6c3fee28bfb2089c5973bfc319cd8da5", "49e9c991a4d793b5ab62c3dc16290cfee8389fb12ab90e182964dbbefb72ad3e", "9ab6a29e6b5cc88f1791de37ab48ea5daa9e222365fb2b590c8f04109a372a5c", "8022544ade8da7ab8b34bf3bc8cd15e90697a4e72e760c809880830e2aadbdc5", "cc6b301ae355cdadf19e48e6cde96d98961c9aee896bf9ed815cb47dae0e1c22", "02b5781e6a697fdd26883f63ecb7d947e0789ed1fffb551bec429a139c0dcef8", "bfd97e8342aee5eee212638e37b691edac398b54bc535ae3458ca72c530ebba6", "1546f5334900491745f87d82fec8082c65cbf6e975b9474041cc7e22fe369130", "8d42b170698fc0c2662a2fc6d1017a45bb7af9c335e1c5a2cc107759d3aab7dd", "0cd379d9866856ed9fc3ad93190cee5d5ab8dc738b71fbb4bfa14e44d2b342bd", "b287e14cd59493d0f0a8d6a8a8ccb056da71527af9610bd38a80f89f43ba9e0e", "699f32bab442ea206544c0478fbf8e55093bdf246346014f242bdf1be60b9b9f", "ef55d74e0c1b660dd69bbef8b1d87ad827da29f2ab5169c14fd17e5ab3f2906e", "cef5074e106ea292c52651ae438bd80ff34b8ebfd31e00ab137190ac8829967f", "312dba438b767fb62feeed74223e5345241e3c9d078863b82c9768d52635d6e2", "102da56cd23259629a60db3c3e60eb2ddcf124ee47ed6e37e09b1cbd023a2a55", "d8a8e6bd81976810315c0950dffc466ae9ef5440629cbfdae970adf9be85a2eb", "d890f76eed51ff1f08b2e08c13123b5b59b92db93874c3a1774c22589ccdfbfa", "8b9a63cd3ff092638e11ed29b542cce0b5098f2f3ef965f5c0b4c18cae90bc69", "1abf154ec1d34306d97189ea9af96a6a33c4bdc597cbc14897b89decbf38661e", "07ca0710e82029b6385832a4b546e0336c587b8ea9280fc384afb611d80ab7ba", "b043f239fe9bb9e78e4102b7ed49d35beea61ed7d677eb53cdbcbbc2783b4079", "1b50849c36638c9afe17cb095d4bc978d8883404b1c58cd3acf2ed09f188c602", "51853c7a3fa6b70dde4f16610ab43241a89ebd3bcd0c473606833551358a8f7c", "350b1984c35d9d48f6f0dbf97e33e76edabb0125538b52927182ea00a4736021", "126ea9840493f9ecbdb8cf04327f0ab8c9315a7420772b2bfd263fd16d1e28b1", "b0190ad8ed68d4f8f91a54240ac7e2205be58b5f8ac5f23e8ffd280c3e554c96", "47e46e9f19a2088625dcb5a1a5c6210f3b4f30e748ba23c6391f314fea4f5bcb", "82b39c4162a1e38739942ca62fb80aa1de7f9a833c0de58d67796a243175b917", "d0cd963ed709c3573789cd8e4c35ae28692db1a6c99f7b38aceb7411a4f7be98", "37a325b033d3f6f1d56f27dd4c5169301f7eba32e8f4c8a8349cc7ece87ddd9d", "51c95c79b6819aa2efb727fd29cd73368488e828fbde2b64af4576e79bf242f6", "3bc469e4ad8a997d246006f09febb05acfe065db25c4a33c8f2437b0dfef0878", "b58599ffd76d2147235706a200780a5ec6195e2a5c13d2b7b8d242b7c1958d16", "140462e616516eb56075d1ea6c01661c2f2638e471a28ccbfcb5d5cf94eb3e74", "9d1cd56f1a33c62840af5b75f5b1e3b0a1475db362a7b8999b8897c8defe8579", "5adea11dd63543557d0f95028656284e482e894672342b664c2d483654c96271", "b01f5826fe1aaf8cfa9955d9f2d66fa2e896e8406117b87291a05c8c0b1510d2", "5974e67c55df5f4c6a0e3230d4322aa70ee9ff975a6e0c65b4fdcf6b84d4b31e", "329fdcec3d7c1e61b190fa5ab4c6d58cdce2441671c695470c95e00679390289", "a5f0189e64f96ef6e06f5208718ba903d1934eb7f0b85aa38fade6e45e1278bb", "0b4ab1c1a890edd1b714c390399293cf1e1d1deef68ee4ef005e3b68ff17ce6d", "5449ba71016c81101f874c61702fe7c472d50c2bff7c815028cc6c84d761aaf9", "d5d5fdd27c59b705652ba82caf7ec3ddd07d4e3168ec4006b3c21b431cf82971", "8a2e4c552b152c8b76cb7e07ea727f26c607b600bb382af4b9f066041156a7fe", "96a49a267355918ba085c665fefbcc6a53e29b35734ec8d1570cbbec61081154", "f040e21168602b67d8afba7aff7cf0aaa4acdb463aedab7a29fe2248f41582d3", "a83dc07b7b05d05954aebd19afb76ab9794e35d1f0bfeb0222f7434579a9fec8", "3259f7323e6e0a7ca95dbec594b4b7ce5f7350bd54ac97a3bdc35c333e1024f2", "c84287dce56c2837eb140485775c13645d3d7195bad44174497c1624e3d6bcf8", "5326aed27fbdb6a4e59a974bee60aa1ae71195aaa311bfeba212c152e0f56266", "8d83acde8c0c2606bbda85fef834620309546855d5917d6162a3f14683095b47", "cba4417044bed9ff8f494919f23661efce69821367fe850a837f7cdd64f5d814", "1bf83c9a48b54e8b4b095bee90f5bcc1ac8e8897b351d93205a64c133bc5bd7b", "0ebb774b03cadda941343d9b2bbf2e7075f049e6e309dd232cf44a36578935ad", "d193e2601554fb3d1fa0c638e147297a76e4a6ac2c02209bc65d7294dbf002e6", "a9b3ba41d99da589a8dd1dfd776d121e6d4ac4f1ee52d1cc3517d2226fc09ad9", "dd53cbd732125e3f22ef9fadd789685d10a49f88f21a6dd66c3790a4b7f2b85b", "6f827b1068f38167235778d893da3e6c7a949a6641fa5b0aa4a116449e7545ba", "80c4debfeb8d3433350b12856003a0378485b087a0e51d4a974ec88fe8b899b2", "addf88642352377a5d80a9f576e1ed7b8754c09aba6be508e2b8f3b1d7d9e042", "8c961cd106e03576e181925fa16dfda42302f96da8679ef61eb64c1a4742e5b6", "7c02dcbda0fa59f3e843836105151bc1a49a66e2a02fb5941595d23abdd376c6", "45da6f88684c89476755a45df16d1bb602fde60f95d8756311495bb53b441637", "3df1b14731bb4b7a070864eeade24fa37c3584475fe3cf199f41709710ac7f4a", "6638900a817ceda30dbfcc8931ab64d047b281c71ce9e7d203f8790fcea042b1", "b2378c5c9b4812924571836703eeae38364924c2c0430e0a671f2b3a8d338130", "f4825a9397baa4bf07ad69e8dc7e69c03a76c0d394160729542f1b46ff03f338", "50573280946a2c75b36064277f4bbb79875881c6f9f55dc834b0f408ce02be00", "3a6903db22957442e3bd81727d3038c69562403aa8584302f49c28e5f0f4f5ce", "081be91f15adc3c6591e317a188d524c1d16d01ba396508e5ed6a897c169e9a8", "84bddbda2880e71a37578cd427c7602c3580b6af74fe9640cdad994678ed6edc", "c1b6f2cf31192cf7a3643b57fa98ee056e0dd6c6f28eec65821f4fb5b6721971", "f9d11cea4b504a360c0d62c3d908d35f5742112588f2a9fa7eefb5d90c1383f5", "478adc2d34dce7af32071a0e2eedb8c7fb6ebb90bfa404f6ebe10776badf1fbd", "8d809a7afb8b0f327646e1efa6f00670642ba9dde2fa2569d67e5c11a2c822da", "0231b304c4325ac717cce997b2f33f885523062f931d812253035916abfb8e47", "f49b278ef762922930de0e7d4b8ada81b64d010539dbf5a2530e1f88c4a6ad29", "617f5ec465f421abd0e6291b6ca5f8e027f2d500b406d87b6056101bac98a1b4", "1081fddf73cb61f080a9fcef1d3ee2bdf466c3ed35876ee82482c1a49bdf2385", "25b819d32eb42de93e50bfbb656030051d7e4ff20d3c78e11506df28a64707ae", "97f38910f204943718d61a88cc539a3f281d540477b0fb2c7929aada1061a1aa", "bf46882478c2a7955093126c7072d7b7fe472967979de522c2c14739bbab7d07", "31a8a2038327e176933240df416d3035861e959eac4528560ff348347c716f27", "d827a95da4a08258897313e839a9613c62de031517db363580c29ccfccfaacc8", "b5de63a660dae61c272f9dc1e646da96eca8a62ef3764c2e3b0ae6b258532268", "60d8f10911e03d48eb7274864a09b19756096e0c28f5ca42a26c4f9b3b7fdc5c", "e5bfc9d179f5fb0810cacbed185cc2b2042b774b95dde4048e8c9b4b4043bd31", "c061bf4ed829c8a43e2c5aa336c67cb4e22635c8e15791cf67ab92e0efb73d30", "aea5b83e75a1dd4f705ef09097965dcd010806537361e228cbe275d783d03a6c", "61fab563337233435da3d3be1e8c0d2332edcbd5bb7085c931e5ed4de2f80ed4", "83044467ce97ee203e81fedac56db84ca469ecf40d278d6e18380db17a719cb6", "fc1dfdc26e01d3974267abd90281f512a6497cea25c198e79318c49a069987f6", "2190499382ade5b6211f7cb7ee8301140c25a8a1e9f95f78a253dd0cee72a9b3", "cdc317b64a7c7d6146d3e63d295b690cea5c8c5deed5e42b094361dcf2038614", "8496b471f706842289855bd5dad8e8ce5a45a0244a537407a62ae82bf28f283e", "dd68dde67735cf4fac77a75f658c01f30b3dd373b7443597c93cf1ee9e1c375c", "7d9fc45eb9727f3a1bc09abb274a904bd1c7c4a8b0ddc131a66d0c35fab12c6b", "d3212e0196e6716a17f83983cfd28a90d4ffd7e7aeb93659a85cc5585266d153", "529b13f078978955ed8c139326647f68298aad6515c978fd532d67814d68a819", "047170f4b389cb5ea020d89957aa1c263d00c7e5923c357fafe2a9539295a70a", "a78a5b14dcf7d45dc1147f12138a46aea7d74643f150947184121c4d8e83aacc", "5fc7cd475121963671bda69d4e83b5da3b915f94780f9b21ad11e14876e6a2ae", "ddf9d7f5b52966e8dc5643c2c7780ce8d5512b581859fad0f11d7862b9082a0e", "98d4c1b60953deba57b070f6686ad1e56dafabe4e0461ff823f7e4f1e2d68a6d", "da05a4b3332528d56f466d3eda964682bc31f90795155ad306960e85239d1570", "52da74b3f44371219361d635f8ec93f428b068aee1d49adfda3f1080b812c403", "03d5d11bb421694cf5829985b2d2ed69cdb66c59874e772f9133feba146e56fd", "95112eaea86e4518c06e90875d56fa96d2c2e1d279263b8aeb55e2ef609c0015", "7385b128fcd181847ccd65e61535a3b1e6c935085feb1f116d07b69f754797c9", "025829df5b0e89d33e50e4da9cbac3699faf423a17a01f82abb1dc5a4aeaf7bf", "b8d71572694b145ff3a891e14463c46bfc2a7f3ce66f4b72489dade529fede9c", "67106f52b3bebaf6148ca60c81bc8802050f299d8e3139a8045ef34a0ee8a83b", "c1e4c64335250f030a8dff08151d8631de4f1737973ece0a66ce5819a6bcdab9", "15ccfb66ee051bc937c87c622ffc726f5f6c9b2c83acf52ed0dc6c63d33e0764", "dabbdafa2406d76784fd51b3f5f4014f97e91a0293e96cac0d7252400793352d", "4c6fe6506950104f209a64e0975ced68826c9d6d5c604725c7cc38119741fe1e", "4c0da75b314859992796ac6fa932c9804e6cbc0372b8af03dc17ee487dd46a01", "126d57ea0faa1410e2bff97a97dee4bb95f931c65e424936a3c663136cf44b28", "7b2000fbbbcb50649b57f7de2fe8e0c2384c16839def35e4ca3b368306c737aa", "191a431907c471085ce9133b62f3ab70ad7ba440ac70790400981e68f46a3a34", "7c6b5159af1596f1b1116915f58686bf5943222da9e864f415626328ee0ae8f0", "c01fc7330f29cdc41647dc85b357fe1c734410628077db6c61f736f2288e91be", "c1c9811dc7c62642ae25fbecdbd276124bbb0b2b3ccde483d81831a092fe8940", "183760186863265934b5678d6701d33b02427f0260de63ab92620cdd0ea0a193", "91036fe1c4780fc9a73005bd4fd0e674d0fdd2c372c1ea036e03d89296322b08", "279f655e7eb78b83a915ebf71097429c2ce71ade9c0ef44f5342f7361dda1c1e", "5b9ea6fe50b0bc7338a425931d5587e7bf29ddc886f95a013dc265f9ad4e6a5f", "e58b9814df7395a036222c5154c090e1edb7413d786f744bc71d3a3e7d3ae51a", "72f05a38389a396e7e099943e7626432809e8fea44b2b59c7f5b1be6e544c477", "66efc642ef86130ae927b9a8211a7898a1a0d4633d800b069b8a435f38a87f2d", "57e2163c10bc4cf0291a22e157e30e2f3bd32774777d562d66b5a56785af16cd", "d8bb29af4ab87ee4c6a5f906da83b486b0cb68804d46520402560fd361f9c046", "08c384948e4a5437238b38307ef1433aad79196ccf3192061381fbe1cb2f95a6", "4961223a92ed9aac5200710c1fac16222cebf4f45d71f9bcf747772ebcc10624", "51749e1822fdbd6e3160abdeae195e281affc52170d4d350b3f205f742ed7b13", "14e8dc225152adf94b64a266a412317eb84fd518055718d4f8261e0fdf8a9826", "4c5dec521f84e603ac86babbe7763fe82125a9eaaa705d8cddd6eec95953a4b5", "8acce8dfac2236fafc944be02d072bfb63ddaea49045e31283d73ab38823fcb7", "12cddcbe68b1fabd5650ded7d323b80460ee122c96e3b58c8b5d29a17b917ec3", "d86759a0c43a2fde5e79adaaa167f9d05338aa8b2bc6fc5f9b1263164aa60343", "5267ab3dd6d646eb7bb1c04b9c23fa104287011a46714accc33f608d36d0f2e7", "d8d8d61f18ebffb56574b089b975016513abba64f68fe0da8c0f8d0a62e0416c", "b0f64d75d6754023267a8bd9dcdd975002ce1aea4d2e8103edf80ed391be3782", "b72d60462ce989b717868769b43678b933f239f977e22e2a0d61fa59721ee3a0", "be9a8aa7883625a2f43670b961827cb4d58edf21618af86e376abb6d743a54c0", "a233d9c85d895c54f9df1c93659ac3b1ad9f46458142a5310f40f11ee9bf6316", "75ee0e41d376721a8a59c7c9dd40282780a0ca863db78dee7a589cfc4c98b3e9", "8b34745c1c95a176ca7f21bd1350ab491763379a3ff99f60214003217f6a7118", "75e4c59a6469d9da7de866054c21689625786d6ced18cf6130aec6fd45766025"} + testnetHashes := []string{"fa857cb5d4b774e975d149a91dc47687ab6400301bba7fab1a70e82bd57ab33b", "57c87eeb449e976020fb60b3366b867ffc9d88ec5c0f10171af4c7c771462130", "1af58b777b8054a15f3e0c60ec1c0501bd7626003a4fabb2017e16f1f4f9b0aa", "155eb38e56a75c59863434446071a29df399e0b79a0f7627f3c0def08c0dee4a", "fc541a457aaacc00c4bbf2ddd296c212c7c7436a1b15fdf40971436f4679060b", "22461d010d68d2b67a7a3373782af7f75eb240a845c4b1fa1c399c48f7d3eaad", "e62881132d705937c2a0f88cd0e94f595e922e752f5a3225ecbb4e4f91f242e5", "61084954ebe8d12d9ac71a9ce32f2f72c5ab819ab3382215e0122b98ef98bf6d", "b65786186ff332a66cf502565101ed3fdf0a005d8ea847829a909cafc948cdb7", "6ec4b77f75ce5bd028a22f88049d856dbf83b34480f24eb13ed567de839e06f9", "c1db0ccc2f546863cda1e14da73d951e4fa4c788427f13500a1a7557709de271", "b8a6f83f59913bece208fbc481bdf8a0ac332433f8cb01a3c5c1b7ae377f2700", "9a8c588bb81d8c622b8c6d9073233c176440da4dce49433b56398c30239cfe8d", "a84205d415780ed3c0566f9f4578efeb6ec4ca51f8a93cc7f89a00ccce8dcb39", "c5d6591d91eef2ca446351e95dc4134438360c1b7389d975d636cbacba435280", "7be74dcff396c8abf98c6727659575a5b157c9ec98c6f1c9504732054f09aaf5", "a7dcad11df6d5778824decb3624953440a2e8f01036083c10adb36b4465ee14a", "ac6e904295a3d736e7f22ecb5698c1fd8964e3f0afc07ee2487e63ee606b9bbf", "d7c2cff7f8a08373b8aed134fe1fa80899ddaaa8dd7722fca9b2954228b25803", "580cf925b0d2ec1617e17f0be43402381d537e789bd5a08c3a681dcdcae2d731", "c71cde092dddca890f9f44567a651434a801119dfca6fa6a8ad6daa26ce4d6a2", "b010526b4edd19af408eae841184d97f1ad6e8955c4a6ac8240e32f75a26e5f9", "9278a4d8204e7b937c41c71b9f03c97c49203d4cc6e4e6d429be80ff1d11bf02", "d57366198709ee6be52ea72cb54cfb6282ddd6708e487839f74b93c06c9a994a", "1d17a3f34d23425ad6fa3b1f57cb1276d988c3064c727995cd6966af22323830", "660a0a66a46fae20c0a4f2b1a5f11c246ce39bc1338f641ea304cf2dc9bd0940", "e4562f14b6464d2ee4e92764b6126fab3b37b12c8b0ccb0cbc539a0f1d54318f", "3ed39df06d960213a978379790386ec1c6df288a524c9bc11dbc869d1133e86d", "f09abfcf424b6bcb7a54fc613828e5ff756b619c957c51457d833efbbfd9c601", "58b6fe973b269639c2a6dc768e1f1f328c3c1d098b6ded3511b1f8e3393f8344", "398fd65258285061025e5b53043496832acca2a6b61906046605df18767a9da3", "b933d1d819cdbeff8e3acf9cba0fe7b3e6db3bb582da027a0f1e432219bd6033", "99baada49d56352f2e221cf62116c70485a83c1174bcd50cf5ba62b35d1661a9", "19a47884389d1f995a37c7e2b19525d44a27a32a5df2c0b9c2954fe458655baf", "3820ea36958821d31b8f2eee80fc17e72dcf361f052c0399931ce979e9a10293", "3a3655c7bb4fb1814002b468d63f72c0626d4c7df4ceac28a68c970a3686712a", "bc181caec490ade2d715e7d0c82cb9ad3fd685dc962d8ffca00861d88f5366b4", "da92ccf74d37b40738c41222cee137c149889966c54d62d91472d2ea81be37f2", "e51d0d81a40598e0d6281d2bbc56a1d8c5aa3c8233f2bd9be2316ad6a24a2dc3", "6cb2e1aea92658471cc40ec0a4bfd64d8e76bc0b9bb5707306fe89d93158e7c4", "e4bb2e67f5ff721ecfac0df301bf3db9704d47a9d33c2f952be17dc23a113c45", "7d29bf4f9796573cf5274900ec667bced39cb0377409d281a2dbceaf99ec8fd9", "45b32bbc856daf25ad81206623f8a7fb53f0afbb488f72ffef4d8f0a9431e62b", "b5aca33f4af1f65d9e9e35035597b58896d99abd5b7954593ffc70c86a90c94d", "7a21bc1136bd1b288fb5be1fd43b39cdfeae9b424e3da274e241dbc1ac780d72", "95bd53bea9d44609b8b24ff5c30feb08c91d92f239632f8093fbb8f37a704112", "61551f4fb10bd3b97870af25c6c18d8582d6badef8e87e3c5297befd1331003a", "ba43a4bd43dcdf44ce163b58d35df3def39f2a2ed29cfcf76f3d7571827b8bc1", "329c277c2f0555d33e294377bf906c404a163ee653d0894661714a25b1d3c8fb", "6e143a6cf96b0b8eb695bd77b1e28f2a61f4dac8a47b3cf2b69d6737d8441242", "991bc0911f5914677f4ba476717a53b0b889b91cf178ae66c0625167f7ac0801", "541fb4e3a4fc928a017bdce01393ea8113b2236dafbb3809973f7b8352442d32", "9c9181ad53d6506666187974b6b9e3a9c0bee8d085d10cc79f50bfb4248ca129", "1cb89bb5668ac284574be9118a78d3fa5d674c84579c75d5596a47d2acce29f6", "116b1c4d1a8fef4cd852a8841b689fff4f1df3a0f5bbeb545942150f4b806646", "b54f3b2b235b816bda74453e228378fcf9b79a293534aac71dbfeb6b0ee1ecad", "9acb23972960f0b4c5d3c6b061a2a1c4af4f7a6d4a0cdd8ec7134ae7bd59f95d", "17f3d6c720bc5efd5ee8226d353d1b347828e621400a2a282a190f5b7bbdd0f0", "1838dc6001bb37cff89aa8675ec0ae8efdfd35c5dc8a793538c31d08df4b8232", "ad362ed3de8ac036d4a89d31282f26e10cb50fa900c6ad76f7ab06cb7155d234", "2bd6a5464607a39d0bcdd07e15d4752d1a52b644bf9a81d8d7e5f9cff0af30af", "44124bbba59755b9004d53c3e721820c40c1cc163b7639b4c1a03ce6955e292b", "f19520a13533371cea4cc20daeef421c31c0a88d4604e58b56ebef82288cdaaf", "c1796053a6e8847cf3d8a545670dd953d1273dd3d9a6e4df6e59e33950cc2890", "49aeb76ef737a04fe91c3a61dc8c7b87adad5978d8951f8d033ddeae6fa2b720", "bed2427fb70a9a9a576528569ccfd8fc86ab0ecd4ca7a932d5a8f39316f887a0", "a8da98fa12885b4165f7635906d9bc240c2eaa66079bf18f496dbecb68c7c49e", "cd7e523f67b5ab520d1c8972f78db9a8d283c66ccf000aa31cda8216fe2e508b", "1e29c627ce7b6402eb5115c59a48d561f4420c44748d7de2ed185142beab4a29", "5ddf101e94858f06934c6019eaa22b93d88eca16592720e9dcd982894ac27060", "c408705873fb0ab3fc4f5811e69ee20b0a1600f52bb4663e29362f4391601ebe", "ddf70a2c37e60622148124c22f8f0e96b4eba0af4d5b8b18015d574f33923a7e", "d6e1f406e0d96c486c1bcbb09768ff0e5577f18c97cdf2c3e86dda54b4007448", "656b861ba19271a6591c7468af61a9d29e331eccc9e526a3d25517d29bd69809", "24372783456ac149b4fd0dc41ee16d55500a3c433fc3b1bd3c1c45c8a93c89c5", "2bbbb4392ab7f1fd8a160a80163b69b5f8db16fdf97c2d8ee9e29df1d9ebd9fe", "cc9fd404792808740bdee891c8e93e3d41bfe56c2438396d1ca8a692dd5fb990", "38080ff661e3142133b82633be87af6db2d33f386d05f8439672a1984aa88d13", "22b7125bf763c17087306776783ab6d1c50084e8a7435b015207f99295aa1af9", "570c31b148e5f909873e8d2253401a64eace826993948cd2f3f4d03a798c6c54", "f0cb29da50bff805a3a1736dbe33ea139893534d0e25a98f354aa5f279adbc97", "cd6b07cee12ae00058b20a6d31173c934933e6339a00885554ccefde008b12e3", "323fa87c41960355883ada3b85bbc13303d8202761ea70d015841060c7f7fde7", "01c7c87db4a01af781695e2984e68b72f04a0f7859749bcfdcbee73466bf0990", "a79003be6397a1fac1d183ebd14d72f69cfd9ab310cd8f9cc9c3d835b05d7556", "50dcfbe053447768b56f6c3159cc6d37aa5791d87abfad32b2952e36de8a20c7", "21647bb0680b8b09b357a54518a50d6c4163d78889f26ef48bc93cfe43acb16d", "96dfe03bc8aa7dd74ef98b4cb7cad866c851b8fd145f4b5bdb54c7b799e58adf", "87037ff5508a2a31c62cbef1feb19f3ec22f44ade292e0a036e8a7d8ef3d13bd", "6e7336d4e63a744ae45cfd320ca237ba4b194d930bcbfbfde2d172616df367b8", "780126f3f77af11cac4a71371812160e436d50f09ee01eb312d6839b7dd4e3a3", "9373a2bdc426bc5bf3242c7f3ecc83a19f2cfc0772ecdeb846e423fc8ec40b5e", "0339e7901bccba1e3c8e05956536823b2b0e7189c66f5796b7602b63a8fd1ff9", "b213bb94b274991d4288a6405954059e99b4c4b891a74a1abcd83ea295331b18", "d0a7195ec0cd987709b4dc6416e0ed6fc9939054ecbf502da8c4c6a09836ed9c", "7b9c334b3aeb75a795f9d6c7c0ee01ab219f31860880eb3480921dcd2a057d2b", "9c4e722d126467603530d242fe91a19fa90ebd3a461ee38f36ef5eefa07e996c", "4306ac8ccd2ce6a880350f95c7d59635371ba3d78bb13353c5b7ff06f7c6fc40", "4b9360e2d86f20850d2c6ff222ed16c6a4252c00afad8d488c30c162b3a10da7", "927f20b9dcfbb80f4a6b5d6067a586835bdcb5f3e921ed87bec67fb5160181d1", "e620bc51fbeb8011f57324b0a7ae6f45c46050cd624887f0a50879880632fdaa", "ee7b749b81e86d46fa3e93b9aba29285bae38a91f175dbf7c619d05fcf91e857", "573d5039fa570ceb3fa136be73c432b49a19af00a7f109325b78160f7dc13db1", "9ab1936825e830d4eab7a945701528579f78a8d1702a76a774e7456ddd3a254e", "2b3538a6fed897c0143f51b82f7e9e1929cb698e7de8d88aa8b1d23cabd58fa9", "21e2f8ae0522da985262ccf8422d98d75068ccd448d15c4bfec9f793713c7644", "c02a276e24fbb64f5b35d4b6555d1d873095e076868cea8dcfdad9e606612f9b", "7756adb6b470c5126693a4de57c1d5b38afab4f7ffc4f982374e8466051bcfca", "f82cbe9343e63fa4bf486f8e4113f91abef7c994e6f7068b500942fede79f095", "782f9df4e3f669149a575922a7318d523b1ab8a5911a2b1c2850839d5762cf03", "89ef33e05604e28f762b3cdf2f20d876adcb104a87c2636c5facb61ec47d020c", "59e374462a0c7e32df5e087d4d250936ef54aa19ca824ebaa63b66406180719d", "11fc2b68e458f12e93398a453c5efac599691bd89d40c35e003dc594d87bf51d", "5f793edc159efab968da834bd44187fff951cec822ca1b8982b1f36d966956be", "da0d474d5e0ec5d0966e1986a5de3f085e0f491da67cdb43d52fdc9848b14314", "8d4eec56231819d18f3fb3ec6e6881b269c0ccb881eedecb5916d2b4ef82c6cf", "137e7ea7c47a724f8a4494a3e73e74f146282382935d64d25385dd720f537e98", "1a2a9c7707443c848897141a4f659fbd0b7fefa47365f2af43183777dcb4a8ef", "7747a6f738959e6d75f16fe6d0782b455258b9c93d0380a230722cd6ae11e0bb", "314e30caef6c7c09b2a85056610949febb6abbbf7702c5d6706cef658123d782", "9ab42848b175c62790b5aa4f256899bb609d05723d364b8d349160afadfd9f95", "853b07dda09eb155dcebbac23e2fa5d76c5f619f3cabfa5e25fd82706485bd25", "a2b0053632aafe21d4dff287c03c362cae2a1d3267cd87d82a7ba9a3795129c9", "7918541145cb2c5918b8fa20a31298a7bc9b8f43aebb69f046f78d070a7f22ef", "0827e91cf9ec4dbb95966d68cdeb90dc8399457f47922d1e53eb2972c87756ef", "6121dac0131fc1fe0f7652d6c2195141c0e6a9b7e5cb555647ec3bb2f90b912d", "134fae4eec772042a832efc19e2f3e449db962f3573c070f2920591c306967b1", "b9a716636f3d1dd47e61aa1216f55317230cf734e06c9f740552f2bbd6e8210a", "d5caa5c0bb57e75c78de5f6f132e19776b777dd205d37ff6c2179412caa32c40", "e11c15139b71e7078a664d430e115c631ac8cdd89a8f4b35e4bbbeb9ec85dc17", "cbff909b284e4b1858adff2a0cee75032a2b2411d805604dfe820e40e855d6b5", "5b4ce1b89dde6b8b5cbec1b454306b7f53a9dadcdbe5df429ea5a33635d989d3", "c06a55411e962e0bf9cc11c14e854be084906b374cc181868c29ebcab0b66775", "ad16c4f73055baa8c0c6f69e294019ea90e3e97ee90923c4478156e15180d19c", "76866d7b50747a469e9891c529b7a58a4b9082d113b7acbe2b46f6049a8d36c7", "df96c9eba4763a1c3a8a0d2eb14e57847ce679adeda80b04cb86ef4f40cf290a", "6421d33aed4529b00db819051abed4ae78f28778feab921177c24378d48b427b", "cb76cbf3c146f5890eef6a8e78349b9291b75d2ca3b947b027f52dab0acbcdd4", "cb9a9e1606d5d6cc59bce096733be7e6902d8c8de19d22cc0f5435ad4e719015", "d3b9005c6b93a657d8edd2312d4d59b8807ea7c509079dfd1e4a8cef3d6852ba", "ebc705fd3ee20a69c5e99b1bf063acff8c926eec9358a36294b8df0fdcd31eb5", "c99e64329e066cc19b2e9962bfa2eb474bb7f9bd1c797421878209c16ca85d80", "55a081aab8afb0cfe83873b812c4495a762bdfc866d74c038d64f73d26944db3", "d830b389b67743e2a2cec5d64af37ce1b991b2781bf2a3fb1e8283bc78e98495", "558d06ff221f4d6e5265465ef2928828a80b498f95d7b1853c4a93d842931ccd", "e967f7ac0177971566b44535eed88a5ffcd0b2ec09de03edbf817f8e110eaf5b", "404df2a8bcf278cae68d9a43b86ff9c2781461ccd227c20aa5e0c5b1db2c0cb1", "f8f5160a6d1e91a3cae676b1e8f8563da2e1cb92869df51c190f0d91f62c81b2", "30a23be3cb0e3feab447217745d537e6c5299f3a95172c234bb84de54169b694", "7c5e66106c5e7cb9e68cd6bec431acdb4b0c9394f2c000a60f0ec558b1667750", "be103be330df170331a747138325af15173704afb808abfd6fc5742c677de241", "d711f0d3914c1bee36324e055ade9058750f2b3d0206f516382702de8eda3757", "519658c8746832821044b074a40661ea1497ca50426888303d8eec43ae8b9d6a", "87cd56d2f6ff774a0c75b029c2a888df7b41319380336f3e4663fe5417229687", "2efe240e7018fd0443262223d286c04120199063f4ef194bdef9af0ab34fa4a8", "8c9a69c950bea4e4beecc286124bf44e2cc78614f767580d59dc22cf94bd23f6", "e7641851ddf32f8fa1937528a2c88a2ef512d45f0a7296c232df6584471ad7ba", "a5beb770e26085eb45a6c5e15acb5844fdda167261e92b20c87dc72c1e0d0a1d", "f54988150d2ba3327251b7a4672ec9bff6fe93f06a7a9f19030f17e693281f11", "6cbfa48ae32ef9b3798f0afe4b86798497a758735dc3ac3e0aa6b42710476f58", "35130215ec7db0e57d5964dacb9aa2ea858e70fc864edd08cf062334823a3ce8", "a935e9ddece310c12baa815a0077e151b300a293f88651d7715ea33151d4016e", "167e10bb4d35aa27a4916de2f846ff5d323a0090c9d37b9c35ca455272ab07be", "a85a1222927f535ca37587d38ab4db2bc940bfc0c6d703003119329d05469a75", "826ab7e279754c009dcd86421f3bdaaa3325bdfff8352788c9f8cfbdddfcfafe", "8336015f3f6ca5d69d5af6dfd521a3e3c024c08121bd42de3a25e5bffb417d42", "194125cbe3f428afbf59da1dd144062ad288011e10beca10ca534f935ea7290d", "3bb36e7a0165d3b6f51b628c18e6b4d9e355b05c5be7a616c881dc395c623c66", "c092f7add11cee0facec22c78badba46fb8688538df1443b7356ceea83bae10d", "31552a2bb308a5778e815fee39b007ce5a633d2e7ba27f08eee2bec6f8d387b7", "373533933e0aae2d2dcffb59b09c49fa64506606aa0359eddf00326ee7bbcc7e", "0f580299cabe89b2dc9809735d14fdabf60cc1b65824bb5f6b5cf283b68210ee", "138c02ce7b36a4d7e82f942a3291bbb357b2e8845b579189ce4c35e01e6b859f", "9dc184037f271c4043b1a6d01d9fbed5d2f156fb561ec2612e5b1cd6aa486083", "cb2ae942cd73059bfe666d9ef78cee5a557cda842c9503df0f7d6b00be815cc5", "f941433597eaa923318023f040798918f743db7bf6d33bc6a13bc8c2e8d3e711", "02a1f2c523e2705b1ab122a06c08bd64080ef76d09d517c56c4e64a3f6626021", "ac3dda90e10c66d26ebb6911924713785f48e8e3d2150aa06ae90db456e1c9a0", "61a39a58e915f953d1ea5c0483f3f45b33ed6f097d76ea6d03d7cf81616f33bd", "b3ff677201fa7543da2f635753305a128c4076409268f1ee53ee824989193e90", "3a2cf44822616731ce40cde80365738e4a4d9af161de3cc2bb3e4f4d3ced8009", "b1a3f23c441a6afece152c4b2e1f1da6fc952f997bc8711a6122e26afafeb5b1", "87123bd9968d64fead15b346ad4ef3b0918aebc596fb7ce8c016c09085985bbd", "eae98597fc685154c882a62073157e1538e37270573de17e7f9bd1af724e1164", "e6dc4cfe6c4b77ebc2a915a49157447a65f85c275ba6c888fddbfae95a2d1c2b", "45ffffa2166eff3624a6b83e5d953669e3639188556330a58656d51ac9008f15", "2b5658b7d00f6d34890e71cf1d57b520e934f6b4087cca5c50604a7c8190488d", "d9b516ec359cccafc8cd2c5721bed137cb0d4b7bb21ba4772baed786a9f059a6", "5005e282fff3675ff3ac18906d5cf9df5b992d0bd95fc9cd3258f386f1c5b5ea", "2dea763455c4ae2c662bd9db6529b85cfd397744cb3da1a639925b0fa2b048b4", "497399dc295066a487984ab67cbfec9bf3d65184bc424a7b96268f2c03e6557f", "8f87e5ab712b41e1bc6f74fd74bb8e96323f62f62bedb35ed578992ddbbd5f47", "a5504fbce2afcd7277b0bd94581050195607d5c6701cff8d8e25f05a2d50d81c", "205b534ee10a3633f87c8ab36590d114f516985470ef5851077ac5c95aa83f16", "0d2093c088c08840643f542a44d9e8c389694f03dc9c62a264445de5758e73c3", "b32c1de573b72b62ce6b77d628f758acbfe89ecaa17d3c4c94cad8dff45dd0c9", "6d75d744de2e5dd7ebd3fa47b22ca0d99d4255ee36b5e767567479e0134e0697", "d3228e2e8e5de7178f2afc4b6f86b13287469b55410a164397bc602a0e3bd2db", "5d0a5e9e280f90c7d1f69b69ff3b5bfb94bce299dde8799520fe92912afd2cff", "aafabddd3fe15559af9138aa113c2473fed25a41ee52877a05dc2f9b24416827", "00a9160b3ae08d4066e53992f3cc004b3f6bf3d840613d6e847fb16323ddb270", "1473078fe8d18a5e3f791064c1083783fdc19517a3f2af47777d8778bb2b2f89", "aa2b720f1b7fd016086641fa0c3a6f8133c5f7eb3e9a65cd01ad0b51e7c35719", "ecdd45371e9a284e97416f414d665afa0aec864277a03c333e785e4d6ba6d439", "66a7301e8f3d54360b15fc64610398888301a3caeb685dc71e0ec0fdd175937f", "bd156dc25f23d82eaef927957d4c8c883ec0c80de4c58310313764ccc701d281", "3e8aa53535920d5886779d30687c2350800e9c712c5c2414db463b9c99f3052e", "308a237dad23fa158e7590ce7c75e788ec3ae6be8f6972a867f2eb94f6417c96", "4b12d020e1df286f672fe5d2eac74d95f817d0bbb8bee15a7913ebd9c3a8014a", "303c6f66eaff75bf2145e3bcc343245bcbedb2df46af1fb1e8382473fd2ab402", "d21e974892bd9209a0e2333b22acb55ec2a4abc015755379640cb81d4ba38d82", "40bdb0c10ce735f5e6abf18bf46dd8ef5625ea828fbfc6e380b70809d7cf76dd", "c0b4d28f557f71bcc41eb3573e2afb6da0c127639972bbcb8f4962cff0896f7a", "d2e36f3773f4c313fafb160ac753f1a11b53783920d45552b693f7a37b80bbe2", "3fd160ad0045137801256a22fed09f5f31aacf31f1681fbf6d70bc03972d2253", "2c0c05796774bdbb27c0a6ec5559817b4cd48feee80dff2c540257f86733e397", "5f17ad7ebf06c9ee5f7c86716e2392fd65b773eb6c94f47ac1ea1e12afbacfd0", "0dc16b207a0a9a722cd0b6ce18419eeb2c7809a9f90f3ebca7cc084d6714469d", "8f576a107b37c1309055282825effed4d57dd7e96fd69595ad300c26f77b07a5", "b433f6a339e84a5dbd8e6638a4547dd029b642d1007199948678d7574350b64e", "738768e552067738d3ba97fabe8ea93c0a6ba3b64cc24fab0e9b0c2ce4842982", "533020acb857afd489d4766280665cc484d184ed8eeaacd031e8a5e70b5c4a88", "1d84007a810cb751a5f7207b36cffb1a7f50c1553cbcb0c922c7cb1ada8bb409", "a0b398eb392174cfa24948edcf03c50553a7367c7f6ed50970456484ea09680b", "f156f642f5fd502eb9d0fff911981506c32e6c40b12362e6b3082dffb7fc6550", "2338e90aafd734d44bd50aad3f4d0f4255e2d2505546925e810798626c79f4f4", "c141f87ed878c297468d5be367ce8df0c7d90be4b6be070059eb9345f8250b62", "57106030bb89bd435844ef9baf318c9696af10784a4cf09359bff4b22a4d74eb", "2419aa33614ded3307173c53d6f614b6567d6f50fdc9a99fd32a299efc3de982", "07e60b9438f0b0fc97151c34b781b2a6370cb4d6c48ecbbfe0016a24ebe7bf31", "c09518a1b22c36e3d599af9f956090609fff015a794680a12f730364c721aae4", "65ddb5cf2927237525c5b3d3613eb346660cba60d0478ea917b6f0aa4907d7d9", "1c8935ede01448904447520b90c742615062e404f3525fe5bd667e06f7341c13", "fcb9e121eb526413ec8c827a3dda5e619a85ffbcb7508f0525ac22a121a100f5", "6d0a6422309f64d722ba79f621a4fe3db0ecf16b40366313b146a97d95667307", "4ad7e9d2a199b2eb3cc1cf7bb35e7b03a0ca18bd7382ed29a18b97ed01cd63a0", "69e377941f0263ce3c585789ae6106782d1f15db0b1942a9627b2bd6fe83e13d", "bb51ed5948d59b0dcb2f5cb5f8a27d3f70b8c71660b0d6d4ab658b6a7ca2356c", "6695e79e0e07fde8c05da60736ba373d55271d5a7c6da2a2c7d30e957a46e7e7", "48bd888c98b158b5c82b148f091a91bb1881b9a1931227f0a5269649a8eebaff", "771382cfa5138ccd32fdddad18e3eb8f1a06eee10704248d1e4d49f32872afe6", "176bb2e118aeb292912fa1903470621ae385e819a50c580301b33165666f3c7d", "15596f8c5f8fb397e5214e6f5eaf286a813b6e5d8bebae2bad1d550511f92840", "bdacbb2d763783f1ac51fd2477276543f79db13a434697a2aedd8523a1427e1f", "ca0e3b746890e8d626840d445989bb0e703f3e4c792aaa49a6b8952ea7696063", "1319af4c3801a463f0e1b7a9cfe2cfbb79e769fb0daed1a2868ade7665765ea6", "172f67582c5270cf0ef8264ef64bc5e17a53aac87693eff1860dfe56aea4209e", "0462589f719e853654d1ca00038dfc806ae7acb9bb5a3f9e6d458f3d4206f532", "f7480a6f46b553517f41238cbd5a6069eab164fd1512e1685f9bddf5c1afa59c", "a5cfdbe5c0b38b0904b5fe6afc2ce583dce1dbc7b4cd88224cbd88efa30b0291", "6f07b548ced6405ef78693332d516d041780f85f0771cfbaba8bbb86a6cdfb7d", "de0184abac150e780e26f1e7de09da64dfee433e8c9a9efe8d93a673350016b8", "8e7cee539c6315ad939a9495e40e7e70e2d07f6b2920cdbcc689457cd9e11997", "0088ccc025bf814e8098607bfbd17448024495a62610700b6000ec448afc1ca3", "d3a0503fdb8802e979871dca7d3c10a928cedf1978e44f42ecb72b96ada13dc3", "add0b405d079dd0c682a1e5026ef1a5b989b0bdf044d2db28249b4d51a74c5dc"} + + // Convert each string to a [32]byte + for i := 0; i < 256; i++ { + // Decode hex string to bytes + mainnetFixedHashes[i] = gethCommon.HexToHash(mainnetHashes[i]) + testnetFixedHashes[i] = gethCommon.HexToHash(testnetHashes[i]) + } + + fixedHashes[flow.Mainnet] = mainnetFixedHashes + fixedHashes[flow.Testnet] = testnetFixedHashes +} + +func init() { + generateFixedHashes() +} From 333cbe9b5edf4ca8cc03a1561bd9565b1b41cace Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Thu, 21 Nov 2024 14:26:42 +0100 Subject: [PATCH 12/36] cleanup --- fvm/evm/offchain/blocks/block_context.go | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go index 38003e0b771..39e0e72f81c 100644 --- a/fvm/evm/offchain/blocks/block_context.go +++ b/fvm/evm/offchain/blocks/block_context.go @@ -1,10 +1,11 @@ package blocks import ( - evmTypes "github.com/onflow/flow-go/fvm/evm/types" - "github.com/onflow/flow-go/model/flow" gethCommon "github.com/onflow/go-ethereum/common" "github.com/onflow/go-ethereum/eth/tracers" + + "github.com/onflow/flow-go/fvm/evm/types" + "github.com/onflow/flow-go/model/flow" ) // NewBlockContext creates a new block context for the given chain ID and height. @@ -20,20 +21,20 @@ func NewBlockContext( getHashByHeight func(uint64) gethCommon.Hash, prevRandao gethCommon.Hash, tracer *tracers.Tracer, -) (evmTypes.BlockContext, error) { +) (types.BlockContext, error) { // coinbase address fix - miner := evmTypes.CoinbaseAddress + miner := types.CoinbaseAddress if chainID == flow.Testnet && height < coinbaseAddressChangeEVMHeightTestnet { - miner = evmTypes.Address(gethCommon.HexToAddress("0000000000000000000000021169100eecb7c1a6")) + miner = oldCoinbaseAddressTestnet } - return evmTypes.BlockContext{ - ChainID: evmTypes.EVMChainIDFromFlowChainID(chainID), + return types.BlockContext{ + ChainID: types.EVMChainIDFromFlowChainID(chainID), BlockNumber: height, BlockTimestamp: timestamp, - DirectCallBaseGasUsage: evmTypes.DefaultDirectCallBaseGasUsage, - DirectCallGasPrice: evmTypes.DefaultDirectCallGasPrice, + DirectCallBaseGasUsage: types.DefaultDirectCallBaseGasUsage, + DirectCallGasPrice: types.DefaultDirectCallGasPrice, GasFeeCollector: miner, GetHashFunc: func(n uint64) gethCommon.Hash { // For block heights greater than or equal to the current, @@ -67,11 +68,14 @@ func NewBlockContext( const blockHashListFixHCUEVMHeightMainnet = 0 // Testnet52 - Height Coordinated Upgrade 4, Nov 20, 2024 -const blockHashListFixHCUEVMHeightTestnet = 0 +// Flow Block: 228025500 7eb808b77f02c3e77c36d57dc893ed63adc5ff6113bb0f4b141bb39e44d634e6 +const blockHashListFixHCUEVMHeightTestnet = 16848829 // TODO: HCU name and date const coinbaseAddressChangeEVMHeightTestnet = 1385490 +var oldCoinbaseAddressTestnet = types.Address(gethCommon.HexToAddress("0000000000000000000000021169100eecb7c1a6")) + var fixedHashes map[flow.ChainID][256]gethCommon.Hash // generate the fixed hashes for mainnet and testnet From f880fe7b20901c12ab70d4b1a3adf96af5067b54 Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Thu, 21 Nov 2024 14:31:45 +0100 Subject: [PATCH 13/36] add more details --- fvm/evm/offchain/blocks/block_context.go | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go index 39e0e72f81c..dd69a7d10c4 100644 --- a/fvm/evm/offchain/blocks/block_context.go +++ b/fvm/evm/offchain/blocks/block_context.go @@ -65,14 +65,19 @@ func NewBlockContext( } // TODO: HCU not done yet +// PR: https://github.com/onflow/flow-go/pull/6734 const blockHashListFixHCUEVMHeightMainnet = 0 // Testnet52 - Height Coordinated Upgrade 4, Nov 20, 2024 // Flow Block: 228025500 7eb808b77f02c3e77c36d57dc893ed63adc5ff6113bb0f4b141bb39e44d634e6 +// PR: https://github.com/onflow/flow-go/pull/6734 const blockHashListFixHCUEVMHeightTestnet = 16848829 -// TODO: HCU name and date -const coinbaseAddressChangeEVMHeightTestnet = 1385490 +// Testnet51 - Height Coordinated Upgrade 1 +// Flow Block: 212562161 1a520608c5457f228405c4c30fc39c8a0af7cf915fb2ede7ec5ccffc2a000f57 +// PR: https://github.com/onflow/flow-go/pull/6380 +// TODO: should this be 1385490? +const coinbaseAddressChangeEVMHeightTestnet = 1385491 var oldCoinbaseAddressTestnet = types.Address(gethCommon.HexToAddress("0000000000000000000000021169100eecb7c1a6")) From 501d0ff1a41b6918536126daa053e669f7c2c4d0 Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Thu, 21 Nov 2024 16:02:21 +0100 Subject: [PATCH 14/36] fix init --- fvm/evm/offchain/blocks/block_context.go | 1 + 1 file changed, 1 insertion(+) diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go index dd69a7d10c4..f82dd5f2387 100644 --- a/fvm/evm/offchain/blocks/block_context.go +++ b/fvm/evm/offchain/blocks/block_context.go @@ -98,6 +98,7 @@ func generateFixedHashes() { testnetFixedHashes[i] = gethCommon.HexToHash(testnetHashes[i]) } + fixedHashes = make(map[flow.ChainID][256]gethCommon.Hash) fixedHashes[flow.Mainnet] = mainnetFixedHashes fixedHashes[flow.Testnet] = testnetFixedHashes } From d4b7668be481190bd5817fd414e509628a087c8d Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Thu, 21 Nov 2024 16:23:08 +0100 Subject: [PATCH 15/36] use new block contect in offchain package --- fvm/evm/offchain/blocks/blocks.go | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/fvm/evm/offchain/blocks/blocks.go b/fvm/evm/offchain/blocks/blocks.go index 7852d0bd90c..0d4808c8154 100644 --- a/fvm/evm/offchain/blocks/blocks.go +++ b/fvm/evm/offchain/blocks/blocks.go @@ -108,22 +108,20 @@ func (b *Blocks) BlockContext() (types.BlockContext, error) { return types.BlockContext{}, err } - return types.BlockContext{ - ChainID: types.EVMChainIDFromFlowChainID(b.chainID), - BlockNumber: bm.Height, - BlockTimestamp: bm.Timestamp, - DirectCallBaseGasUsage: types.DefaultDirectCallBaseGasUsage, - DirectCallGasPrice: types.DefaultDirectCallGasPrice, - GasFeeCollector: types.CoinbaseAddress, - GetHashFunc: func(n uint64) gethCommon.Hash { + return NewBlockContext( + b.chainID, + bm.Height, + bm.Timestamp, + func(n uint64) gethCommon.Hash { hash, err := b.BlockHash(n) if err != nil { panic(err) } return hash }, - Random: bm.Random, - }, nil + bm.Random, + nil, + ) } // storeBlockMetaData stores the block meta data into storage From f4654c80fa178995d9850c5d652bf30195743b86 Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Thu, 21 Nov 2024 17:27:19 +0100 Subject: [PATCH 16/36] add mainnet height --- fvm/evm/offchain/blocks/block_context.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go index f82dd5f2387..9085384de1f 100644 --- a/fvm/evm/offchain/blocks/block_context.go +++ b/fvm/evm/offchain/blocks/block_context.go @@ -64,9 +64,10 @@ func NewBlockContext( }, nil } -// TODO: HCU not done yet +// Testnet52 - Height Coordinated Upgrade 4, Nov 21, 2024 +// Flow Block: 94361765 4c9edc817afeaaa6aeb5e63504ed3f5ba8bcbba3931e53f5437d911a1129b431 // PR: https://github.com/onflow/flow-go/pull/6734 -const blockHashListFixHCUEVMHeightMainnet = 0 +const blockHashListFixHCUEVMHeightMainnet = 8357079 // Testnet52 - Height Coordinated Upgrade 4, Nov 20, 2024 // Flow Block: 228025500 7eb808b77f02c3e77c36d57dc893ed63adc5ff6113bb0f4b141bb39e44d634e6 From c38f6cef3acf3da2e98e9fc5c155088cf9de2128 Mon Sep 17 00:00:00 2001 From: Peter Argue <89119817+peterargue@users.noreply.github.com> Date: Thu, 21 Nov 2024 13:35:38 -0800 Subject: [PATCH 17/36] Update engine/access/rest/websockets/handler.go --- engine/access/rest/websockets/handler.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/engine/access/rest/websockets/handler.go b/engine/access/rest/websockets/handler.go index 911c8fc55b4..247890c2a62 100644 --- a/engine/access/rest/websockets/handler.go +++ b/engine/access/rest/websockets/handler.go @@ -48,7 +48,7 @@ func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { err := h.HttpHandler.VerifyRequest(w, r) if err != nil { // VerifyRequest sets the response error before returning - logger.Warn().Err(err).Msg("error validating websocket request") + logger.Debug().Err(err).Msg("error validating websocket request") return } From 549c64f4a5847f22ea3ac5bf495191b27e275e5f Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Fri, 22 Nov 2024 16:34:34 +0100 Subject: [PATCH 18/36] apply review comments --- fvm/evm/offchain/blocks/block_context.go | 26 ++++++++++++++---------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go index 9085384de1f..8903d65c800 100644 --- a/fvm/evm/offchain/blocks/block_context.go +++ b/fvm/evm/offchain/blocks/block_context.go @@ -26,7 +26,7 @@ func NewBlockContext( // coinbase address fix miner := types.CoinbaseAddress if chainID == flow.Testnet && height < coinbaseAddressChangeEVMHeightTestnet { - miner = oldCoinbaseAddressTestnet + miner = genesisCoinbaseAddressTestnet } return types.BlockContext{ @@ -36,27 +36,27 @@ func NewBlockContext( DirectCallBaseGasUsage: types.DefaultDirectCallBaseGasUsage, DirectCallGasPrice: types.DefaultDirectCallGasPrice, GasFeeCollector: miner, - GetHashFunc: func(n uint64) gethCommon.Hash { + GetHashFunc: func(hashHeight uint64) gethCommon.Hash { // For block heights greater than or equal to the current, // return an empty block hash. - if n >= height { + if hashHeight >= height { return gethCommon.Hash{} } // If the given block height, is more than 256 blocks // in the past, return an empty block hash. - if height-n > 256 { + if height-hashHeight > 256 { return gethCommon.Hash{} } // For testnet & mainnet, we fetch the block hash from the hard-coded // array of hashes. - if chainID == flow.Mainnet && height < blockHashListFixHCUEVMHeightMainnet { - return fixedHashes[flow.Mainnet][height%256] - } else if chainID == flow.Testnet && height < blockHashListFixHCUEVMHeightTestnet { - return fixedHashes[flow.Testnet][height%256] + if chainID == flow.Mainnet && hashHeight < blockHashListFixHCUEVMHeightMainnet { + return fixedHashes[flow.Mainnet][hashHeight%256] + } else if chainID == flow.Testnet && blockHashListBugIntroducedHCUEVMHeightTestnet <= hashHeight && hashHeight < blockHashListFixHCUEVMHeightTestnet { + return fixedHashes[flow.Testnet][hashHeight%256] } - return getHashByHeight(n) + return getHashByHeight(hashHeight) }, Random: prevRandao, @@ -74,13 +74,17 @@ const blockHashListFixHCUEVMHeightMainnet = 8357079 // PR: https://github.com/onflow/flow-go/pull/6734 const blockHashListFixHCUEVMHeightTestnet = 16848829 +// Testnet52 - Spork +// Flow Block: 218215350 cc7188f0bdac4c442cc3ee072557d7f7c8ca4462537da945b148d5d0efa7a1ff +// PR: https://github.com/onflow/flow-go/pull/6377 +const blockHashListBugIntroducedHCUEVMHeightTestnet = 7038679 + // Testnet51 - Height Coordinated Upgrade 1 // Flow Block: 212562161 1a520608c5457f228405c4c30fc39c8a0af7cf915fb2ede7ec5ccffc2a000f57 // PR: https://github.com/onflow/flow-go/pull/6380 -// TODO: should this be 1385490? const coinbaseAddressChangeEVMHeightTestnet = 1385491 -var oldCoinbaseAddressTestnet = types.Address(gethCommon.HexToAddress("0000000000000000000000021169100eecb7c1a6")) +var genesisCoinbaseAddressTestnet = types.Address(gethCommon.HexToAddress("0000000000000000000000021169100eecb7c1a6")) var fixedHashes map[flow.ChainID][256]gethCommon.Hash From e670c37438eb02cd4c5ece51909106484fa493b6 Mon Sep 17 00:00:00 2001 From: Janez Podhostnik Date: Fri, 22 Nov 2024 16:46:33 +0100 Subject: [PATCH 19/36] extract method for block hash correction --- fvm/evm/offchain/blocks/block_context.go | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go index 8903d65c800..a18c7077378 100644 --- a/fvm/evm/offchain/blocks/block_context.go +++ b/fvm/evm/offchain/blocks/block_context.go @@ -48,12 +48,9 @@ func NewBlockContext( return gethCommon.Hash{} } - // For testnet & mainnet, we fetch the block hash from the hard-coded - // array of hashes. - if chainID == flow.Mainnet && hashHeight < blockHashListFixHCUEVMHeightMainnet { - return fixedHashes[flow.Mainnet][hashHeight%256] - } else if chainID == flow.Testnet && blockHashListBugIntroducedHCUEVMHeightTestnet <= hashHeight && hashHeight < blockHashListFixHCUEVMHeightTestnet { - return fixedHashes[flow.Testnet][hashHeight%256] + hash, ok := UseBlockHashCorrection(chainID, height, hashHeight) + if ok { + return hash } return getHashByHeight(hashHeight) @@ -64,6 +61,18 @@ func NewBlockContext( }, nil } +// UseBlockHashCorrection returns the block hash correction for the given chain ID, EVM height, and queried EVM height, and a boolean indicating whether the correction should be used. +func UseBlockHashCorrection(chainID flow.ChainID, evmHeightOfCurrentBlock uint64, queriedEVMHeight uint64) (gethCommon.Hash, bool) { + // For testnet & mainnet, we fetch the block hash from the hard-coded + // array of hashes. + if chainID == flow.Mainnet && evmHeightOfCurrentBlock < blockHashListFixHCUEVMHeightMainnet { + return fixedHashes[flow.Mainnet][queriedEVMHeight%256], true + } else if chainID == flow.Testnet && blockHashListBugIntroducedHCUEVMHeightTestnet <= evmHeightOfCurrentBlock && evmHeightOfCurrentBlock < blockHashListFixHCUEVMHeightTestnet { + return fixedHashes[flow.Testnet][queriedEVMHeight%256], true + } + return gethCommon.Hash{}, false +} + // Testnet52 - Height Coordinated Upgrade 4, Nov 21, 2024 // Flow Block: 94361765 4c9edc817afeaaa6aeb5e63504ed3f5ba8bcbba3931e53f5437d911a1129b431 // PR: https://github.com/onflow/flow-go/pull/6734 From 9bf550caeec2dce57939be29a75dad8ef7226fe7 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Wed, 20 Nov 2024 17:27:42 -0800 Subject: [PATCH 20/36] add testcase for offchain evm backward compatibilities --- fvm/evm/offchain/utils/collection_test.go | 326 ++++++++++++++++++++-- fvm/evm/testutils/backend.go | 23 +- 2 files changed, 323 insertions(+), 26 deletions(-) diff --git a/fvm/evm/offchain/utils/collection_test.go b/fvm/evm/offchain/utils/collection_test.go index a90a8f57bea..d4fe05dcf8f 100644 --- a/fvm/evm/offchain/utils/collection_test.go +++ b/fvm/evm/offchain/utils/collection_test.go @@ -2,45 +2,252 @@ package utils_test import ( "bufio" + "encoding/gob" "encoding/hex" "encoding/json" + "fmt" "os" + "path/filepath" "strings" "testing" - "github.com/onflow/cadence" - "github.com/onflow/cadence/encoding/ccf" "github.com/rs/zerolog" + "github.com/rs/zerolog/log" "github.com/stretchr/testify/require" + "github.com/onflow/cadence" + "github.com/onflow/cadence/encoding/ccf" + "github.com/onflow/flow-go/fvm/environment" "github.com/onflow/flow-go/fvm/evm" "github.com/onflow/flow-go/fvm/evm/events" "github.com/onflow/flow-go/fvm/evm/offchain/blocks" + "github.com/onflow/flow-go/fvm/evm/offchain/storage" "github.com/onflow/flow-go/fvm/evm/offchain/sync" "github.com/onflow/flow-go/fvm/evm/offchain/utils" . "github.com/onflow/flow-go/fvm/evm/testutils" - "github.com/onflow/flow-go/fvm/evm/types" "github.com/onflow/flow-go/model/flow" ) -func ReplyingCollectionFromScratch( +func TestTestnetBackwardCompatibility(t *testing.T) { + t.Skip("TIME CONSUMING TESTS. Enable the tests with the events files saved in local") + // how to run this tests + // Note: this is a time consuming tests, so please run it in local + // + // 1) run the following cli to get the events files across different sporks + + // flow events get A.8c5303eaa26202d6.EVM.TransactionExecuted A.8c5303eaa26202d6.EVM.BlockExecuted + // --start 211176670 --end 211176770 --network testnet --host access-001.devnet51.nodes.onflow.org:9000 + // > ~/Downloads/events_devnet51_1.jsonl + // ... + // + // 2) comment the above t.Skip, and update the events file paths and checkpoint dir + // to run the tests + BackwardCompatibleSinceEVMGenesisBlock( + t, flow.Testnet, []string{ + "~/Downloads/events_devnet51_1.jsonl", + "~/Downloads/events_devnet51_2.jsonl", + }, + "~/Downloads/", + 0, + ) +} + +// BackwardCompatibilityTestSinceEVMGenesisBlock verifies that the offchain package +// is able to read EVM events from the given file paths and replay blocks since the +// EVM genesis block and derive a consistant state as the latest onchain EVM state. +// the eventsFilePaths is a list of file paths that contain ordered EVM events in JSONL format. +// The EVM events file can be queried by flow cli query, for instance: +// +// flow events get A.8c5303eaa26202d6.EVM.TransactionExecuted A.8c5303eaa26202d6.EVM.BlockExecuted +// --start 211176670 --end 211176770 --network testnet --host access-001.devnet51.nodes.onflow.org:9000 +// +// After replaying with each event json file, it will generate a values_.gob and +// allocators_.gob files as checkpoint, such that when the checkpoint exists, it will loaded +// and skil replaying the coresponding event json files. + +// backwardCompatibilityTestSinceEVMGenesisBlock ensures that the offchain package +// can read EVM events from the provided file paths, replay blocks starting from +// the EVM genesis block, and derive a consistent state matching the latest on-chain EVM state. +// +// The parameter `eventsFilePaths` is a list of file paths containing ordered EVM events in JSONL format. +// These EVM event files can be generated using the Flow CLI query command, for example: +// +// flow events get A.8c5303eaa26202d6.EVM.TransactionExecuted A.8c5303eaa26202d6.EVM.BlockExecuted +// +// --start 211176670 --end 211176770 --network testnet --host access-001.devnet51.nodes.onflow.org:9000 +// +// During the replay process, it will generate `values_.gob` and +// `allocators_.gob` checkpoint files for each height. If these checkpoint files exist, +// the corresponding event JSON files will be skipped to optimize replay. +func BackwardCompatibleSinceEVMGenesisBlock( t *testing.T, chainID flow.ChainID, - storage types.BackendStorage, - filePath string, + eventsFilePaths []string, // ordered EVM events in JSONL format + checkpointDir string, + checkpointEndHeight uint64, // EVM height of an EVM state that a checkpoint was created for ) { + // ensure that checkpoints are not more than the event files + require.True(t, len(eventsFilePaths) > 0) + + log.Info().Msgf("replaying EVM events from %v to %v, with checkpoints in %s, and checkpointEndHeight: %v", + eventsFilePaths[0], eventsFilePaths[len(eventsFilePaths)-1], + checkpointDir, checkpointEndHeight) + + store, checkpointEndHeightOrZero := initStorageWithCheckpoints(t, chainID, checkpointDir, checkpointEndHeight) + + // the events to replay + nextHeight := checkpointEndHeightOrZero + 1 + + // replay each event files + for _, eventsFilePath := range eventsFilePaths { + log.Info().Msgf("replaying events from %v, nextHeight: %v", eventsFilePath, nextHeight) + + checkpointEndHeight := replayEvents(t, chainID, store, eventsFilePath, checkpointDir, nextHeight) + nextHeight = checkpointEndHeight + 1 + } + log.Info(). + Msgf("succhessfully replayed all events and state changes are consistent with onchain state change. nextHeight: %v", nextHeight) +} + +func initStorageWithCheckpoints(t *testing.T, chainID flow.ChainID, checkpointDir string, checkpointEndHeight uint64) ( + *TestValueStore, uint64, +) { rootAddr := evm.StorageAccountAddress(chainID) - // setup the rootAddress account - as := environment.NewAccountStatus() - err := storage.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), as.ToBytes()) + // if there is no checkpoint, create a empty store and initialize the account status, + // return 0 as the genesis height + if checkpointEndHeight == 0 { + store := GetSimpleValueStore() + as := environment.NewAccountStatus() + require.NoError(t, store.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), as.ToBytes())) + + return store, 0 + } + + valueFileName, allocatorFileName := checkpointFileNamesByEndHeight(checkpointDir, checkpointEndHeight) + values, err := deserialize(valueFileName) require.NoError(t, err) + allocators, err := deserializeAllocator(allocatorFileName) + require.NoError(t, err) + store := GetSimpleValueStorePopulated(values, allocators) + return store, checkpointEndHeight +} - bp, err := blocks.NewBasicProvider(chainID, storage, rootAddr) +func replayEvents( + t *testing.T, + chainID flow.ChainID, + store *TestValueStore, eventsFilePath string, checkpointDir string, initialNextHeight uint64) uint64 { + + rootAddr := evm.StorageAccountAddress(chainID) + + bpStorage := storage.NewEphemeralStorage(store) + bp, err := blocks.NewBasicProvider(chainID, bpStorage, rootAddr) require.NoError(t, err) + nextHeight := initialNextHeight + + scanEventFilesAndRun(t, eventsFilePath, + func(blockEventPayload *events.BlockEventPayload, txEvents []events.TransactionEventPayload) error { + if blockEventPayload.Height != nextHeight { + return fmt.Errorf( + "expected height for next block event to be %v, but got %v", + nextHeight, blockEventPayload.Height) + } + + err = bp.OnBlockReceived(blockEventPayload) + require.NoError(t, err) + + sp := NewTestStorageProvider(store, blockEventPayload.Height) + cr := sync.NewReplayer(chainID, rootAddr, sp, bp, zerolog.Logger{}, nil, true) + res, err := cr.ReplayBlock(txEvents, blockEventPayload) + require.NoError(t, err) + + // commit all changes + for k, v := range res.StorageRegisterUpdates() { + err = store.SetValue([]byte(k.Owner), []byte(k.Key), v) + require.NoError(t, err) + } + + err = bp.OnBlockExecuted(blockEventPayload.Height, res) + require.NoError(t, err) + + // commit all block hash list changes + for k, v := range bpStorage.StorageRegisterUpdates() { + err = store.SetValue([]byte(k.Owner), []byte(k.Key), v) + require.NoError(t, err) + } + + // verify the block height is sequential without gap + nextHeight++ + + return nil + }) + + checkpointEndHeight := nextHeight - 1 + + log.Info().Msgf("finished replaying events from %v to %v, creating checkpoint", initialNextHeight, checkpointEndHeight) + valuesFile, allocatorsFile := dumpCheckpoint(t, store, checkpointDir, checkpointEndHeight) + log.Info().Msgf("checkpoint created: %v, %v", valuesFile, allocatorsFile) + + return checkpointEndHeight +} + +func checkpointFileNamesByEndHeight(dir string, endHeight uint64) (string, string) { + return filepath.Join(dir, fmt.Sprintf("values_%d.gob", endHeight)), + filepath.Join(dir, fmt.Sprintf("allocators_%d.gob", endHeight)) +} + +func dumpCheckpoint(t *testing.T, store *TestValueStore, dir string, checkpointEndHeight uint64) (string, string) { + valuesFileName, allocatorsFileName := checkpointFileNamesByEndHeight(dir, checkpointEndHeight) + values, allocators := store.Dump() + + require.NoError(t, serialize(valuesFileName, values)) + require.NoError(t, serializeAllocator(allocatorsFileName, allocators)) + return valuesFileName, allocatorsFileName +} + +const resume_height = 6559268 + +func decodeFullKey(encoded string) ([]byte, []byte, error) { + // Split the encoded string at the first occurrence of "~" + parts := strings.SplitN(encoded, "~", 2) + if len(parts) != 2 { + return nil, nil, fmt.Errorf("invalid encoded key: no delimiter found") + } + + // Convert the split parts back to byte slices + owner := []byte(parts[0]) + key := []byte(parts[1]) + return owner, key, nil +} + +type Subscription[T any] struct { + ch chan T + err error +} + +func NewSubscription[T any]() *Subscription[T] { + return &Subscription[T]{ + ch: make(chan T), + } +} + +func (s *Subscription[T]) Channel() <-chan T { + return s.ch +} + +func (s *Subscription[T]) Err() error { + return s.err +} + +// scanEventFilesAndRun +func scanEventFilesAndRun( + t *testing.T, + filePath string, + handler func(*events.BlockEventPayload, []events.TransactionEventPayload) error, +) { file, err := os.Open(filePath) require.NoError(t, err) defer file.Close() @@ -65,21 +272,8 @@ func ReplyingCollectionFromScratch( blockEventPayload, err := events.DecodeBlockEventPayload(ev.(cadence.Event)) require.NoError(t, err) - err = bp.OnBlockReceived(blockEventPayload) - require.NoError(t, err) - - sp := NewTestStorageProvider(storage, blockEventPayload.Height) - cr := sync.NewReplayer(chainID, rootAddr, sp, bp, zerolog.Logger{}, nil, true) - res, err := cr.ReplayBlock(txEvents, blockEventPayload) - require.NoError(t, err) - // commit all changes - for k, v := range res.StorageRegisterUpdates() { - err = storage.SetValue([]byte(k.Owner), []byte(k.Key), v) - require.NoError(t, err) - } - - err = bp.OnBlockExecuted(blockEventPayload.Height, res) - require.NoError(t, err) + require.NoError(t, handler(blockEventPayload, txEvents), fmt.Sprintf("fail to handle block at height %d", + blockEventPayload.Height)) txEvents = make([]events.TransactionEventPayload, 0) continue @@ -97,3 +291,85 @@ func ReplyingCollectionFromScratch( t.Fatal(err) } } + +// Serialize function: saves map data to a file +func serialize(filename string, data map[string][]byte) error { + // Create a file to save data + file, err := os.Create(filename) + if err != nil { + return err + } + defer file.Close() + + // Use gob to encode data + encoder := gob.NewEncoder(file) + err = encoder.Encode(data) + if err != nil { + return err + } + + return nil +} + +// Deserialize function: reads map data from a file +func deserialize(filename string) (map[string][]byte, error) { + // Open the file for reading + file, err := os.Open(filename) + if err != nil { + return nil, err + } + defer file.Close() + + // Prepare the map to store decoded data + var data map[string][]byte + + // Use gob to decode data + decoder := gob.NewDecoder(file) + err = decoder.Decode(&data) + if err != nil { + return nil, err + } + + return data, nil +} + +// Serialize function: saves map data to a file +func serializeAllocator(filename string, data map[string]uint64) error { + // Create a file to save data + file, err := os.Create(filename) + if err != nil { + return err + } + defer file.Close() + + // Use gob to encode data + encoder := gob.NewEncoder(file) + err = encoder.Encode(data) + if err != nil { + return err + } + + return nil +} + +// Deserialize function: reads map data from a file +func deserializeAllocator(filename string) (map[string]uint64, error) { + // Open the file for reading + file, err := os.Open(filename) + if err != nil { + return nil, err + } + defer file.Close() + + // Prepare the map to store decoded data + var data map[string]uint64 + + // Use gob to decode data + decoder := gob.NewDecoder(file) + err = decoder.Decode(&data) + if err != nil { + return nil, err + } + + return data, nil +} diff --git a/fvm/evm/testutils/backend.go b/fvm/evm/testutils/backend.go index 7e0f05cb201..8971b97c2b0 100644 --- a/fvm/evm/testutils/backend.go +++ b/fvm/evm/testutils/backend.go @@ -60,7 +60,7 @@ func ConvertToCadence(data []byte) []cadence.Value { } func fullKey(owner, key []byte) string { - return string(owner) + "~" + string(key) + return fmt.Sprintf("%x~%s", owner, key) } func GetSimpleValueStore() *TestValueStore { @@ -145,6 +145,19 @@ func GetSimpleValueStorePopulated( // clone allocator return GetSimpleValueStorePopulated(newData, newAllocator) }, + + DumpFunc: func() (map[string][]byte, map[string]uint64) { + // clone data + newData := make(map[string][]byte) + for k, v := range data { + newData[k] = v + } + newAllocator := make(map[string]uint64) + for k, v := range allocator { + newAllocator[k] = v + } + return newData, newAllocator + }, } } @@ -253,6 +266,7 @@ type TestValueStore struct { TotalStorageItemsFunc func() int ResetStatsFunc func() CloneFunc func() *TestValueStore + DumpFunc func() (map[string][]byte, map[string]uint64) } var _ environment.ValueStore = &TestValueStore{} @@ -327,6 +341,13 @@ func (vs *TestValueStore) Clone() *TestValueStore { return vs.CloneFunc() } +func (vs *TestValueStore) Dump() (map[string][]byte, map[string]uint64) { + if vs.DumpFunc == nil { + panic("method not set") + } + return vs.DumpFunc() +} + type testMeter struct { meterComputation func(common.ComputationKind, uint) error hasComputationCapacity func(common.ComputationKind, uint) bool From 27c0f3ae641b6525506c06eefb6958033a99f6e9 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 20:46:42 -0800 Subject: [PATCH 21/36] review comments --- fvm/evm/offchain/utils/collection_test.go | 51 +---------------------- 1 file changed, 2 insertions(+), 49 deletions(-) diff --git a/fvm/evm/offchain/utils/collection_test.go b/fvm/evm/offchain/utils/collection_test.go index d4fe05dcf8f..a18ce4a81ac 100644 --- a/fvm/evm/offchain/utils/collection_test.go +++ b/fvm/evm/offchain/utils/collection_test.go @@ -53,20 +53,7 @@ func TestTestnetBackwardCompatibility(t *testing.T) { ) } -// BackwardCompatibilityTestSinceEVMGenesisBlock verifies that the offchain package -// is able to read EVM events from the given file paths and replay blocks since the -// EVM genesis block and derive a consistant state as the latest onchain EVM state. -// the eventsFilePaths is a list of file paths that contain ordered EVM events in JSONL format. -// The EVM events file can be queried by flow cli query, for instance: -// -// flow events get A.8c5303eaa26202d6.EVM.TransactionExecuted A.8c5303eaa26202d6.EVM.BlockExecuted -// --start 211176670 --end 211176770 --network testnet --host access-001.devnet51.nodes.onflow.org:9000 -// -// After replaying with each event json file, it will generate a values_.gob and -// allocators_.gob files as checkpoint, such that when the checkpoint exists, it will loaded -// and skil replaying the coresponding event json files. - -// backwardCompatibilityTestSinceEVMGenesisBlock ensures that the offchain package +// BackwardCompatibilityTestSinceEVMGenesisBlock ensures that the offchain package // can read EVM events from the provided file paths, replay blocks starting from // the EVM genesis block, and derive a consistent state matching the latest on-chain EVM state. // @@ -87,7 +74,7 @@ func BackwardCompatibleSinceEVMGenesisBlock( checkpointDir string, checkpointEndHeight uint64, // EVM height of an EVM state that a checkpoint was created for ) { - // ensure that checkpoints are not more than the event files + // ensure that event files is not an empty array require.True(t, len(eventsFilePaths) > 0) log.Info().Msgf("replaying EVM events from %v to %v, with checkpoints in %s, and checkpointEndHeight: %v", @@ -208,40 +195,6 @@ func dumpCheckpoint(t *testing.T, store *TestValueStore, dir string, checkpointE return valuesFileName, allocatorsFileName } -const resume_height = 6559268 - -func decodeFullKey(encoded string) ([]byte, []byte, error) { - // Split the encoded string at the first occurrence of "~" - parts := strings.SplitN(encoded, "~", 2) - if len(parts) != 2 { - return nil, nil, fmt.Errorf("invalid encoded key: no delimiter found") - } - - // Convert the split parts back to byte slices - owner := []byte(parts[0]) - key := []byte(parts[1]) - return owner, key, nil -} - -type Subscription[T any] struct { - ch chan T - err error -} - -func NewSubscription[T any]() *Subscription[T] { - return &Subscription[T]{ - ch: make(chan T), - } -} - -func (s *Subscription[T]) Channel() <-chan T { - return s.ch -} - -func (s *Subscription[T]) Err() error { - return s.err -} - // scanEventFilesAndRun func scanEventFilesAndRun( t *testing.T, From 2af81790b2a36c9e7dfda216966ddf3b8dcfa446 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Thu, 21 Nov 2024 11:42:55 -0800 Subject: [PATCH 22/36] add verify evm offchain replay util cmd --- cmd/util/cmd/root.go | 2 + .../cmd/verify-evm-offchain-replay/main.go | 87 +++++++++ .../cmd/verify-evm-offchain-replay/verify.go | 93 ++++++++++ fvm/evm/offchain/utils/collection_test.go | 48 ++--- fvm/evm/offchain/utils/verify.go | 168 ++++++++++++++++++ 5 files changed, 374 insertions(+), 24 deletions(-) create mode 100644 cmd/util/cmd/verify-evm-offchain-replay/main.go create mode 100644 cmd/util/cmd/verify-evm-offchain-replay/verify.go create mode 100644 fvm/evm/offchain/utils/verify.go diff --git a/cmd/util/cmd/root.go b/cmd/util/cmd/root.go index cefd8db691d..146fb2b5af8 100644 --- a/cmd/util/cmd/root.go +++ b/cmd/util/cmd/root.go @@ -41,6 +41,7 @@ import ( "github.com/onflow/flow-go/cmd/util/cmd/snapshot" system_addresses "github.com/onflow/flow-go/cmd/util/cmd/system-addresses" truncate_database "github.com/onflow/flow-go/cmd/util/cmd/truncate-database" + verify_evm_offchain_replay "github.com/onflow/flow-go/cmd/util/cmd/verify-evm-offchain-replay" "github.com/onflow/flow-go/cmd/util/cmd/version" "github.com/onflow/flow-go/module/profiler" ) @@ -126,6 +127,7 @@ func addCommands() { rootCmd.AddCommand(debug_script.Cmd) rootCmd.AddCommand(generate_authorization_fixes.Cmd) rootCmd.AddCommand(evm_state_exporter.Cmd) + rootCmd.AddCommand(verify_evm_offchain_replay.Cmd) } func initConfig() { diff --git a/cmd/util/cmd/verify-evm-offchain-replay/main.go b/cmd/util/cmd/verify-evm-offchain-replay/main.go new file mode 100644 index 00000000000..9f56587306e --- /dev/null +++ b/cmd/util/cmd/verify-evm-offchain-replay/main.go @@ -0,0 +1,87 @@ +package verify + +import ( + "fmt" + "strconv" + "strings" + + "github.com/rs/zerolog/log" + "github.com/spf13/cobra" + + "github.com/onflow/flow-go/model/flow" +) + +var ( + flagDatadir string + flagExecutionDataDir string + flagEVMStateGobDir string + flagChain string + flagFromTo string +) + +// usage example +// +// ./util verify-evm-offchain-replay --chain flow-testnet --from-to 211176671-211177000 +// --datadir /var/flow/data/protocol --execution_data_dir /var/flow/data/execution_data +var Cmd = &cobra.Command{ + Use: "verify-evm-offchain-replay", + Short: "verify evm offchain replay with execution data", + Run: run, +} + +func init() { + Cmd.Flags().StringVar(&flagChain, "chain", "", "Chain name") + _ = Cmd.MarkFlagRequired("chain") + + Cmd.Flags().StringVar(&flagDatadir, "datadir", "/var/flow/data/protocol", + "directory that stores the protocol state") + + Cmd.Flags().StringVar(&flagExecutionDataDir, "execution_data_dir", "/var/flow/data/execution_data", + "directory that stores the execution state") + + Cmd.Flags().StringVar(&flagFromTo, "from_to", "", + "the flow height range to verify blocks, i.e, 1-1000, 1000-2000, 2000-3000, etc.") + + Cmd.Flags().StringVar(&flagEVMStateGobDir, "evm_state_gob_dir", "/var/flow/data/evm_state_gob", + "directory that stores the evm state gob files as checkpoint") +} + +func run(*cobra.Command, []string) { + _ = flow.ChainID(flagChain).Chain() + + from, to, err := parseFromTo(flagFromTo) + if err != nil { + log.Fatal().Err(err).Msg("could not parse from_to") + } + + log.Info().Msgf("verifying range from %d to %d", from, to) + err = Verify(from, to, flow.Testnet, flagDatadir, flagExecutionDataDir) + if err != nil { + log.Fatal().Err(err).Msg("could not verify last k height") + } + log.Info().Msgf("successfully verified range from %d to %d", from, to) + +} + +func parseFromTo(fromTo string) (from, to uint64, err error) { + parts := strings.Split(fromTo, "-") + if len(parts) != 2 { + return 0, 0, fmt.Errorf("invalid format: expected 'from-to', got '%s'", fromTo) + } + + from, err = strconv.ParseUint(strings.TrimSpace(parts[0]), 10, 64) + if err != nil { + return 0, 0, fmt.Errorf("invalid 'from' value: %w", err) + } + + to, err = strconv.ParseUint(strings.TrimSpace(parts[1]), 10, 64) + if err != nil { + return 0, 0, fmt.Errorf("invalid 'to' value: %w", err) + } + + if from > to { + return 0, 0, fmt.Errorf("'from' value (%d) must be less than or equal to 'to' value (%d)", from, to) + } + + return from, to, nil +} diff --git a/cmd/util/cmd/verify-evm-offchain-replay/verify.go b/cmd/util/cmd/verify-evm-offchain-replay/verify.go new file mode 100644 index 00000000000..1a907be669a --- /dev/null +++ b/cmd/util/cmd/verify-evm-offchain-replay/verify.go @@ -0,0 +1,93 @@ +package verify + +import ( + "fmt" + "io" + "os" + "path/filepath" + + "github.com/dgraph-io/badger/v2" + badgerds "github.com/ipfs/go-ds-badger2" + + "github.com/onflow/flow-go/cmd/util/cmd/common" + "github.com/onflow/flow-go/fvm/environment" + "github.com/onflow/flow-go/fvm/evm" + "github.com/onflow/flow-go/fvm/evm/offchain/utils" + "github.com/onflow/flow-go/fvm/evm/testutils" + "github.com/onflow/flow-go/model/flow" + "github.com/onflow/flow-go/module/blobs" + "github.com/onflow/flow-go/module/executiondatasync/execution_data" + "github.com/onflow/flow-go/storage" +) + +func Verify(from uint64, to uint64, chainID flow.ChainID, dataDir string, executionDataDir string, evmStateGobDir string) error { + db, storages, executionDataStore, dsStore, err := initStorages(chainID, dataDir, executionDataDir) + if err != nil { + return fmt.Errorf("could not initialize storages: %w", err) + } + + defer db.Close() + defer dsStore.Close() + + var store *testutils.TestValueStore + isRoot := isEVMRootHeight(chainID, from) + if isRoot { + store = testutils.GetSimpleValueStore() + as := environment.NewAccountStatus() + rootAddr := evm.StorageAccountAddress(chainID) + err = store.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), as.ToBytes()) + if err != nil { + return err + } + } else { + // TODO: recover from gob + } + + return utils.OffchainReplayBackwardCompatibilityTest( + chainID, + from, + to, + storages.Headers, + storages.Results, + executionDataStore, + store, + ) +} + +func initStorages(chainID flow.ChainID, dataDir string, executionDataDir string) ( + *badger.DB, + *storage.All, + execution_data.ExecutionDataGetter, + io.Closer, + error, +) { + db := common.InitStorage(dataDir) + + storages := common.InitStorages(db) + + datastoreDir := filepath.Join(executionDataDir, "blobstore") + err := os.MkdirAll(datastoreDir, 0700) + if err != nil { + return nil, nil, nil, nil, err + } + dsOpts := &badgerds.DefaultOptions + ds, err := badgerds.NewDatastore(datastoreDir, dsOpts) + if err != nil { + return nil, nil, nil, nil, err + } + + executionDataBlobstore := blobs.NewBlobstore(ds) + executionDataStore := execution_data.NewExecutionDataStore(executionDataBlobstore, execution_data.DefaultSerializer) + + return db, storages, executionDataStore, ds, nil +} + +// EVM Root Height is the first block that has EVM Block Event where the EVM block height is 1 +func isEVMRootHeight(chainID flow.ChainID, flowHeight uint64) bool { + if chainID == flow.Testnet { + return flowHeight == 211176671 + } else if chainID == flow.Mainnet { + return flowHeight == 85981136 + } + return flowHeight == 1 +} diff --git a/fvm/evm/offchain/utils/collection_test.go b/fvm/evm/offchain/utils/collection_test.go index a18ce4a81ac..827bb918601 100644 --- a/fvm/evm/offchain/utils/collection_test.go +++ b/fvm/evm/offchain/utils/collection_test.go @@ -41,7 +41,7 @@ func TestTestnetBackwardCompatibility(t *testing.T) { // > ~/Downloads/events_devnet51_1.jsonl // ... // - // 2) comment the above t.Skip, and update the events file paths and checkpoint dir + // 2) comment the above t.Skip, and update the events file paths and evmStateGob dir // to run the tests BackwardCompatibleSinceEVMGenesisBlock( t, flow.Testnet, []string{ @@ -65,47 +65,47 @@ func TestTestnetBackwardCompatibility(t *testing.T) { // --start 211176670 --end 211176770 --network testnet --host access-001.devnet51.nodes.onflow.org:9000 // // During the replay process, it will generate `values_.gob` and -// `allocators_.gob` checkpoint files for each height. If these checkpoint files exist, +// `allocators_.gob` checkpoint files for each height. If these checkpoint gob files exist, // the corresponding event JSON files will be skipped to optimize replay. func BackwardCompatibleSinceEVMGenesisBlock( t *testing.T, chainID flow.ChainID, eventsFilePaths []string, // ordered EVM events in JSONL format - checkpointDir string, - checkpointEndHeight uint64, // EVM height of an EVM state that a checkpoint was created for + evmStateGob string, + evmStateEndHeight uint64, // EVM height of an EVM state that a evmStateGob file was created for ) { // ensure that event files is not an empty array require.True(t, len(eventsFilePaths) > 0) - log.Info().Msgf("replaying EVM events from %v to %v, with checkpoints in %s, and checkpointEndHeight: %v", + log.Info().Msgf("replaying EVM events from %v to %v, with evmStateGob file in %s, and evmStateEndHeight: %v", eventsFilePaths[0], eventsFilePaths[len(eventsFilePaths)-1], - checkpointDir, checkpointEndHeight) + evmStateGob, evmStateEndHeight) - store, checkpointEndHeightOrZero := initStorageWithCheckpoints(t, chainID, checkpointDir, checkpointEndHeight) + store, evmStateEndHeightOrZero := initStorageWithEVMStateGob(t, chainID, evmStateGob, evmStateEndHeight) // the events to replay - nextHeight := checkpointEndHeightOrZero + 1 + nextHeight := evmStateEndHeightOrZero + 1 // replay each event files for _, eventsFilePath := range eventsFilePaths { log.Info().Msgf("replaying events from %v, nextHeight: %v", eventsFilePath, nextHeight) - checkpointEndHeight := replayEvents(t, chainID, store, eventsFilePath, checkpointDir, nextHeight) - nextHeight = checkpointEndHeight + 1 + evmStateEndHeight := replayEvents(t, chainID, store, eventsFilePath, evmStateGob, nextHeight) + nextHeight = evmStateEndHeight + 1 } log.Info(). Msgf("succhessfully replayed all events and state changes are consistent with onchain state change. nextHeight: %v", nextHeight) } -func initStorageWithCheckpoints(t *testing.T, chainID flow.ChainID, checkpointDir string, checkpointEndHeight uint64) ( +func initStorageWithEVMStateGob(t *testing.T, chainID flow.ChainID, evmStateGob string, evmStateEndHeight uint64) ( *TestValueStore, uint64, ) { rootAddr := evm.StorageAccountAddress(chainID) - // if there is no checkpoint, create a empty store and initialize the account status, + // if there is no evmStateGob file, create a empty store and initialize the account status, // return 0 as the genesis height - if checkpointEndHeight == 0 { + if evmStateEndHeight == 0 { store := GetSimpleValueStore() as := environment.NewAccountStatus() require.NoError(t, store.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), as.ToBytes())) @@ -113,19 +113,19 @@ func initStorageWithCheckpoints(t *testing.T, chainID flow.ChainID, checkpointDi return store, 0 } - valueFileName, allocatorFileName := checkpointFileNamesByEndHeight(checkpointDir, checkpointEndHeight) + valueFileName, allocatorFileName := evmStateGobFileNamesByEndHeight(evmStateGob, evmStateEndHeight) values, err := deserialize(valueFileName) require.NoError(t, err) allocators, err := deserializeAllocator(allocatorFileName) require.NoError(t, err) store := GetSimpleValueStorePopulated(values, allocators) - return store, checkpointEndHeight + return store, evmStateEndHeight } func replayEvents( t *testing.T, chainID flow.ChainID, - store *TestValueStore, eventsFilePath string, checkpointDir string, initialNextHeight uint64) uint64 { + store *TestValueStore, eventsFilePath string, evmStateGob string, initialNextHeight uint64) uint64 { rootAddr := evm.StorageAccountAddress(chainID) @@ -172,22 +172,22 @@ func replayEvents( return nil }) - checkpointEndHeight := nextHeight - 1 + evmStateEndHeight := nextHeight - 1 - log.Info().Msgf("finished replaying events from %v to %v, creating checkpoint", initialNextHeight, checkpointEndHeight) - valuesFile, allocatorsFile := dumpCheckpoint(t, store, checkpointDir, checkpointEndHeight) - log.Info().Msgf("checkpoint created: %v, %v", valuesFile, allocatorsFile) + log.Info().Msgf("finished replaying events from %v to %v, creating evm state gobs", initialNextHeight, evmStateEndHeight) + valuesFile, allocatorsFile := dumpEVMStateToGobFiles(t, store, evmStateGob, evmStateEndHeight) + log.Info().Msgf("evm state gobs created: %v, %v", valuesFile, allocatorsFile) - return checkpointEndHeight + return evmStateEndHeight } -func checkpointFileNamesByEndHeight(dir string, endHeight uint64) (string, string) { +func evmStateGobFileNamesByEndHeight(dir string, endHeight uint64) (string, string) { return filepath.Join(dir, fmt.Sprintf("values_%d.gob", endHeight)), filepath.Join(dir, fmt.Sprintf("allocators_%d.gob", endHeight)) } -func dumpCheckpoint(t *testing.T, store *TestValueStore, dir string, checkpointEndHeight uint64) (string, string) { - valuesFileName, allocatorsFileName := checkpointFileNamesByEndHeight(dir, checkpointEndHeight) +func dumpEVMStateToGobFiles(t *testing.T, store *TestValueStore, dir string, evmStateEndHeight uint64) (string, string) { + valuesFileName, allocatorsFileName := evmStateGobFileNamesByEndHeight(dir, evmStateEndHeight) values, allocators := store.Dump() require.NoError(t, serialize(valuesFileName, values)) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go new file mode 100644 index 00000000000..bf3ed506adf --- /dev/null +++ b/fvm/evm/offchain/utils/verify.go @@ -0,0 +1,168 @@ +package utils + +import ( + "context" + "errors" + "strings" + + "github.com/rs/zerolog/log" + + "github.com/onflow/cadence" + "github.com/onflow/cadence/encoding/ccf" + + "github.com/onflow/flow-go/fvm/environment" + "github.com/onflow/flow-go/fvm/evm" + "github.com/onflow/flow-go/fvm/evm/events" + "github.com/onflow/flow-go/fvm/evm/offchain/blocks" + evmStorage "github.com/onflow/flow-go/fvm/evm/offchain/storage" + "github.com/onflow/flow-go/fvm/evm/offchain/sync" + "github.com/onflow/flow-go/fvm/evm/testutils" + "github.com/onflow/flow-go/ledger" + "github.com/onflow/flow-go/ledger/common/convert" + "github.com/onflow/flow-go/model/flow" + "github.com/onflow/flow-go/module/executiondatasync/execution_data" + "github.com/onflow/flow-go/storage" +) + +func OffchainReplayBackwardCompatibilityTest( + chainID flow.ChainID, + flowStartHeight uint64, + flowEndHeight uint64, + headers storage.Headers, + results storage.ExecutionResults, + executionDataStore execution_data.ExecutionDataGetter, + store environment.ValueStore, +) error { + rootAddr := evm.StorageAccountAddress(chainID) + rootAddrStr := string(rootAddr.Bytes()) + + bpStorage := evmStorage.NewEphemeralStorage(store) + bp, err := blocks.NewBasicProvider(chainID, bpStorage, rootAddr) + if err != nil { + return err + } + + for height := flowStartHeight; height <= flowEndHeight; height++ { + blockID, err := headers.BlockIDByHeight(height) + if err != nil { + return err + } + + result, err := results.ByBlockID(blockID) + if err != nil { + return err + } + + executionData, err := executionDataStore.Get(context.Background(), result.ExecutionDataID) + if err != nil { + return err + } + + events := flow.EventsList{} + payloads := []*ledger.Payload{} + + for _, chunkData := range executionData.ChunkExecutionDatas { + events = append(events, chunkData.Events...) + payloads = append(payloads, chunkData.TrieUpdate.Payloads...) + } + + updates := make(map[flow.RegisterID]flow.RegisterValue, len(payloads)) + for i := len(payloads) - 1; i >= 0; i-- { + regID, regVal, err := convert.PayloadToRegister(payloads[i]) + if err != nil { + return err + } + + // skip non-evm-account registers + if regID.Owner != rootAddrStr { + continue + } + + // when iterating backwards, duplicated register updates are stale updates, + // so skipping them + if _, ok := updates[regID]; !ok { + updates[regID] = regVal + } + } + + // parse events + evmBlockEvent, evmTxEvents, err := parseEVMEvents(events) + if err != nil { + return err + } + + err = bp.OnBlockReceived(evmBlockEvent) + if err != nil { + return err + } + + sp := testutils.NewTestStorageProvider(store, evmBlockEvent.Height) + cr := sync.NewReplayer(chainID, rootAddr, sp, bp, log.Logger, nil, true) + res, err := cr.ReplayBlock(evmTxEvents, evmBlockEvent) + if err != nil { + return err + } + + // commit all changes + for k, v := range res.StorageRegisterUpdates() { + err = store.SetValue([]byte(k.Owner), []byte(k.Key), v) + if err != nil { + return err + } + } + + err = bp.OnBlockExecuted(evmBlockEvent.Height, res) + if err != nil { + return err + } + + // verify and commit all block hash list changes + for k, v := range bpStorage.StorageRegisterUpdates() { + // verify the block hash list changes are included in the trie update + + err = store.SetValue([]byte(k.Owner), []byte(k.Key), v) + if err != nil { + return err + } + } + } + + return nil +} + +func parseEVMEvents(evts flow.EventsList) (*events.BlockEventPayload, []events.TransactionEventPayload, error) { + var blockEvent *events.BlockEventPayload + txEvents := make([]events.TransactionEventPayload, 0) + + for _, e := range evts { + evtType := string(e.Type) + if strings.Contains(evtType, "BlockExecuted") { + if blockEvent != nil { + return nil, nil, errors.New("multiple block events in a single block") + } + + ev, err := ccf.Decode(nil, e.Payload) + if err != nil { + return nil, nil, err + } + + blockEventPayload, err := events.DecodeBlockEventPayload(ev.(cadence.Event)) + if err != nil { + return nil, nil, err + } + blockEvent = blockEventPayload + } else if strings.Contains(evtType, "TransactionExecuted") { + ev, err := ccf.Decode(nil, e.Payload) + if err != nil { + return nil, nil, err + } + txEv, err := events.DecodeTransactionEventPayload(ev.(cadence.Event)) + if err != nil { + return nil, nil, err + } + txEvents = append(txEvents, *txEv) + } + } + + return blockEvent, txEvents, nil +} From 873c7073de79587fc0bc2a42d230c1e81cb26a72 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Thu, 21 Nov 2024 14:49:32 -0800 Subject: [PATCH 23/36] refactor serailization with gob --- .../cmd/verify-evm-offchain-replay/main.go | 2 +- .../cmd/verify-evm-offchain-replay/verify.go | 38 +++++++- fvm/evm/offchain/utils/collection_test.go | 91 +------------------ fvm/evm/testutils/gob.go | 88 ++++++++++++++++++ 4 files changed, 129 insertions(+), 90 deletions(-) create mode 100644 fvm/evm/testutils/gob.go diff --git a/cmd/util/cmd/verify-evm-offchain-replay/main.go b/cmd/util/cmd/verify-evm-offchain-replay/main.go index 9f56587306e..76581e8a471 100644 --- a/cmd/util/cmd/verify-evm-offchain-replay/main.go +++ b/cmd/util/cmd/verify-evm-offchain-replay/main.go @@ -55,7 +55,7 @@ func run(*cobra.Command, []string) { } log.Info().Msgf("verifying range from %d to %d", from, to) - err = Verify(from, to, flow.Testnet, flagDatadir, flagExecutionDataDir) + err = Verify(from, to, flow.Testnet, flagDatadir, flagExecutionDataDir, flagEVMStateGobDir) if err != nil { log.Fatal().Err(err).Msg("could not verify last k height") } diff --git a/cmd/util/cmd/verify-evm-offchain-replay/verify.go b/cmd/util/cmd/verify-evm-offchain-replay/verify.go index 1a907be669a..bbdd9911c21 100644 --- a/cmd/util/cmd/verify-evm-offchain-replay/verify.go +++ b/cmd/util/cmd/verify-evm-offchain-replay/verify.go @@ -40,10 +40,21 @@ func Verify(from uint64, to uint64, chainID flow.ChainID, dataDir string, execut return err } } else { - // TODO: recover from gob + prev := from - 1 + valueFileName, allocatorFileName := evmStateGobFileNamesByEndHeight(evmStateGobDir, prev) + values, err := testutils.DeserializeState(valueFileName) + if err != nil { + return err + } + + allocators, err := testutils.DeserializeAllocator(allocatorFileName) + if err != nil { + return err + } + store = testutils.GetSimpleValueStorePopulated(values, allocators) } - return utils.OffchainReplayBackwardCompatibilityTest( + err = utils.OffchainReplayBackwardCompatibilityTest( chainID, from, to, @@ -52,6 +63,23 @@ func Verify(from uint64, to uint64, chainID flow.ChainID, dataDir string, execut executionDataStore, store, ) + + if err != nil { + return err + } + + valueFileName, allocatorFileName := evmStateGobFileNamesByEndHeight(evmStateGobDir, to) + values, allocators := store.Dump() + err = testutils.SerializeState(valueFileName, values) + if err != nil { + return err + } + err = testutils.SerializeAllocator(allocatorFileName, allocators) + if err != nil { + return err + } + + return nil } func initStorages(chainID flow.ChainID, dataDir string, executionDataDir string) ( @@ -91,3 +119,9 @@ func isEVMRootHeight(chainID flow.ChainID, flowHeight uint64) bool { } return flowHeight == 1 } + +func evmStateGobFileNamesByEndHeight(evmStateGobDir string, endHeight uint64) (string, string) { + valueFileName := filepath.Join(evmStateGobDir, fmt.Sprintf("values-%d.gob", endHeight)) + allocatorFileName := filepath.Join(evmStateGobDir, fmt.Sprintf("allocators-%d.gob", endHeight)) + return valueFileName, allocatorFileName +} diff --git a/fvm/evm/offchain/utils/collection_test.go b/fvm/evm/offchain/utils/collection_test.go index 827bb918601..e5b3059661b 100644 --- a/fvm/evm/offchain/utils/collection_test.go +++ b/fvm/evm/offchain/utils/collection_test.go @@ -2,7 +2,6 @@ package utils_test import ( "bufio" - "encoding/gob" "encoding/hex" "encoding/json" "fmt" @@ -114,9 +113,9 @@ func initStorageWithEVMStateGob(t *testing.T, chainID flow.ChainID, evmStateGob } valueFileName, allocatorFileName := evmStateGobFileNamesByEndHeight(evmStateGob, evmStateEndHeight) - values, err := deserialize(valueFileName) + values, err := DeserializeState(valueFileName) require.NoError(t, err) - allocators, err := deserializeAllocator(allocatorFileName) + allocators, err := DeserializeAllocator(allocatorFileName) require.NoError(t, err) store := GetSimpleValueStorePopulated(values, allocators) return store, evmStateEndHeight @@ -190,8 +189,8 @@ func dumpEVMStateToGobFiles(t *testing.T, store *TestValueStore, dir string, evm valuesFileName, allocatorsFileName := evmStateGobFileNamesByEndHeight(dir, evmStateEndHeight) values, allocators := store.Dump() - require.NoError(t, serialize(valuesFileName, values)) - require.NoError(t, serializeAllocator(allocatorsFileName, allocators)) + require.NoError(t, SerializeState(valuesFileName, values)) + require.NoError(t, SerializeAllocator(allocatorsFileName, allocators)) return valuesFileName, allocatorsFileName } @@ -244,85 +243,3 @@ func scanEventFilesAndRun( t.Fatal(err) } } - -// Serialize function: saves map data to a file -func serialize(filename string, data map[string][]byte) error { - // Create a file to save data - file, err := os.Create(filename) - if err != nil { - return err - } - defer file.Close() - - // Use gob to encode data - encoder := gob.NewEncoder(file) - err = encoder.Encode(data) - if err != nil { - return err - } - - return nil -} - -// Deserialize function: reads map data from a file -func deserialize(filename string) (map[string][]byte, error) { - // Open the file for reading - file, err := os.Open(filename) - if err != nil { - return nil, err - } - defer file.Close() - - // Prepare the map to store decoded data - var data map[string][]byte - - // Use gob to decode data - decoder := gob.NewDecoder(file) - err = decoder.Decode(&data) - if err != nil { - return nil, err - } - - return data, nil -} - -// Serialize function: saves map data to a file -func serializeAllocator(filename string, data map[string]uint64) error { - // Create a file to save data - file, err := os.Create(filename) - if err != nil { - return err - } - defer file.Close() - - // Use gob to encode data - encoder := gob.NewEncoder(file) - err = encoder.Encode(data) - if err != nil { - return err - } - - return nil -} - -// Deserialize function: reads map data from a file -func deserializeAllocator(filename string) (map[string]uint64, error) { - // Open the file for reading - file, err := os.Open(filename) - if err != nil { - return nil, err - } - defer file.Close() - - // Prepare the map to store decoded data - var data map[string]uint64 - - // Use gob to decode data - decoder := gob.NewDecoder(file) - err = decoder.Decode(&data) - if err != nil { - return nil, err - } - - return data, nil -} diff --git a/fvm/evm/testutils/gob.go b/fvm/evm/testutils/gob.go new file mode 100644 index 00000000000..1c944a1e9e3 --- /dev/null +++ b/fvm/evm/testutils/gob.go @@ -0,0 +1,88 @@ +package testutils + +import ( + "encoding/gob" + "os" +) + +// Serialize function: saves map data to a file +func SerializeState(filename string, data map[string][]byte) error { + // Create a file to save data + file, err := os.Create(filename) + if err != nil { + return err + } + defer file.Close() + + // Use gob to encode data + encoder := gob.NewEncoder(file) + err = encoder.Encode(data) + if err != nil { + return err + } + + return nil +} + +// Deserialize function: reads map data from a file +func DeserializeState(filename string) (map[string][]byte, error) { + // Open the file for reading + file, err := os.Open(filename) + if err != nil { + return nil, err + } + defer file.Close() + + // Prepare the map to store decoded data + var data map[string][]byte + + // Use gob to decode data + decoder := gob.NewDecoder(file) + err = decoder.Decode(&data) + if err != nil { + return nil, err + } + + return data, nil +} + +// Serialize function: saves map data to a file +func SerializeAllocator(filename string, data map[string]uint64) error { + // Create a file to save data + file, err := os.Create(filename) + if err != nil { + return err + } + defer file.Close() + + // Use gob to encode data + encoder := gob.NewEncoder(file) + err = encoder.Encode(data) + if err != nil { + return err + } + + return nil +} + +// Deserialize function: reads map data from a file +func DeserializeAllocator(filename string) (map[string]uint64, error) { + // Open the file for reading + file, err := os.Open(filename) + if err != nil { + return nil, err + } + defer file.Close() + + // Prepare the map to store decoded data + var data map[string]uint64 + + // Use gob to decode data + decoder := gob.NewDecoder(file) + err = decoder.Decode(&data) + if err != nil { + return nil, err + } + + return data, nil +} From 7be48402def6aa65cb21b7ecb06940563c9d7294 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Thu, 21 Nov 2024 15:20:56 -0800 Subject: [PATCH 24/36] add logging --- .../cmd/verify-evm-offchain-replay/main.go | 2 +- .../cmd/verify-evm-offchain-replay/verify.go | 23 ++++++++++++++++--- fvm/evm/offchain/utils/verify.go | 8 ++++--- 3 files changed, 26 insertions(+), 7 deletions(-) diff --git a/cmd/util/cmd/verify-evm-offchain-replay/main.go b/cmd/util/cmd/verify-evm-offchain-replay/main.go index 76581e8a471..2459a35cd59 100644 --- a/cmd/util/cmd/verify-evm-offchain-replay/main.go +++ b/cmd/util/cmd/verify-evm-offchain-replay/main.go @@ -55,7 +55,7 @@ func run(*cobra.Command, []string) { } log.Info().Msgf("verifying range from %d to %d", from, to) - err = Verify(from, to, flow.Testnet, flagDatadir, flagExecutionDataDir, flagEVMStateGobDir) + err = Verify(log.Logger, from, to, flow.Testnet, flagDatadir, flagExecutionDataDir, flagEVMStateGobDir) if err != nil { log.Fatal().Err(err).Msg("could not verify last k height") } diff --git a/cmd/util/cmd/verify-evm-offchain-replay/verify.go b/cmd/util/cmd/verify-evm-offchain-replay/verify.go index bbdd9911c21..f75cd8278b6 100644 --- a/cmd/util/cmd/verify-evm-offchain-replay/verify.go +++ b/cmd/util/cmd/verify-evm-offchain-replay/verify.go @@ -8,6 +8,7 @@ import ( "github.com/dgraph-io/badger/v2" badgerds "github.com/ipfs/go-ds-badger2" + "github.com/rs/zerolog" "github.com/onflow/flow-go/cmd/util/cmd/common" "github.com/onflow/flow-go/fvm/environment" @@ -20,7 +21,16 @@ import ( "github.com/onflow/flow-go/storage" ) -func Verify(from uint64, to uint64, chainID flow.ChainID, dataDir string, executionDataDir string, evmStateGobDir string) error { +// Verify verifies the offchain replay of EVM blocks from the given height range +// and updates the EVM state gob files with the latest state +func Verify(log zerolog.Logger, from uint64, to uint64, chainID flow.ChainID, dataDir string, executionDataDir string, evmStateGobDir string) error { + log.Info(). + Str("chain", chainID.String()). + Str("dataDir", dataDir). + Str("executionDataDir", executionDataDir). + Str("evmStateGobDir", evmStateGobDir). + Msgf("verifying range from %d to %d", from, to) + db, storages, executionDataStore, dsStore, err := initStorages(chainID, dataDir, executionDataDir) if err != nil { return fmt.Errorf("could not initialize storages: %w", err) @@ -32,6 +42,8 @@ func Verify(from uint64, to uint64, chainID flow.ChainID, dataDir string, execut var store *testutils.TestValueStore isRoot := isEVMRootHeight(chainID, from) if isRoot { + log.Info().Msgf("initializing EVM state for root height %d", from) + store = testutils.GetSimpleValueStore() as := environment.NewAccountStatus() rootAddr := evm.StorageAccountAddress(chainID) @@ -41,20 +53,23 @@ func Verify(from uint64, to uint64, chainID flow.ChainID, dataDir string, execut } } else { prev := from - 1 + log.Info().Msgf("loading EVM state from previous height %d", prev) + valueFileName, allocatorFileName := evmStateGobFileNamesByEndHeight(evmStateGobDir, prev) values, err := testutils.DeserializeState(valueFileName) if err != nil { - return err + return fmt.Errorf("could not deserialize state %v: %w", valueFileName, err) } allocators, err := testutils.DeserializeAllocator(allocatorFileName) if err != nil { - return err + return fmt.Errorf("could not deserialize allocator %v: %w", allocatorFileName, err) } store = testutils.GetSimpleValueStorePopulated(values, allocators) } err = utils.OffchainReplayBackwardCompatibilityTest( + log, chainID, from, to, @@ -79,6 +94,8 @@ func Verify(from uint64, to uint64, chainID flow.ChainID, dataDir string, execut return err } + log.Info().Msgf("saved EVM state to %s and %s", valueFileName, allocatorFileName) + return nil } diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index bf3ed506adf..ae99e827acb 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -5,10 +5,9 @@ import ( "errors" "strings" - "github.com/rs/zerolog/log" - "github.com/onflow/cadence" "github.com/onflow/cadence/encoding/ccf" + "github.com/rs/zerolog" "github.com/onflow/flow-go/fvm/environment" "github.com/onflow/flow-go/fvm/evm" @@ -25,6 +24,7 @@ import ( ) func OffchainReplayBackwardCompatibilityTest( + log zerolog.Logger, chainID flow.ChainID, flowStartHeight uint64, flowEndHeight uint64, @@ -97,7 +97,7 @@ func OffchainReplayBackwardCompatibilityTest( } sp := testutils.NewTestStorageProvider(store, evmBlockEvent.Height) - cr := sync.NewReplayer(chainID, rootAddr, sp, bp, log.Logger, nil, true) + cr := sync.NewReplayer(chainID, rootAddr, sp, bp, log, nil, true) res, err := cr.ReplayBlock(evmTxEvents, evmBlockEvent) if err != nil { return err @@ -125,6 +125,8 @@ func OffchainReplayBackwardCompatibilityTest( return err } } + + log.Info().Msgf("verified block %d", height) } return nil From 8b08a316a997ae96418057d3773f62bf86ae00e3 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 06:28:14 -0800 Subject: [PATCH 25/36] update error message --- cmd/util/cmd/verify-evm-offchain-replay/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/util/cmd/verify-evm-offchain-replay/main.go b/cmd/util/cmd/verify-evm-offchain-replay/main.go index 2459a35cd59..0bc6eef8187 100644 --- a/cmd/util/cmd/verify-evm-offchain-replay/main.go +++ b/cmd/util/cmd/verify-evm-offchain-replay/main.go @@ -57,7 +57,7 @@ func run(*cobra.Command, []string) { log.Info().Msgf("verifying range from %d to %d", from, to) err = Verify(log.Logger, from, to, flow.Testnet, flagDatadir, flagExecutionDataDir, flagEVMStateGobDir) if err != nil { - log.Fatal().Err(err).Msg("could not verify last k height") + log.Fatal().Err(err).Msg("could not verify height") } log.Info().Msgf("successfully verified range from %d to %d", from, to) From c5752a587788031ba9394ae0e55919d42a346dee Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 08:44:36 -0800 Subject: [PATCH 26/36] add register checks --- fvm/evm/offchain/utils/verify.go | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index ae99e827acb..2045de36f22 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -1,8 +1,10 @@ package utils import ( + "bytes" "context" "errors" + "fmt" "strings" "github.com/onflow/cadence" @@ -124,6 +126,21 @@ func OffchainReplayBackwardCompatibilityTest( if err != nil { return err } + + expectedUpdate, ok := updates[k] + if !ok { + return fmt.Errorf("missing update for register %v, %v", k, expectedUpdate) + } + + if !bytes.Equal(expectedUpdate, v) { + return fmt.Errorf("unexpected update for register %v, expected %v, got %v", k, expectedUpdate, v) + } + + delete(updates, k) + } + + if len(updates) > 0 { + return fmt.Errorf("missing updates for registers %v", updates) } log.Info().Msgf("verified block %d", height) From 17cc1a803f5196e475d0b57d20949ed1c732f8b1 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 10:30:11 -0800 Subject: [PATCH 27/36] store block proposal in replay --- fvm/evm/offchain/blocks/provider.go | 50 +++++++++++++++++++++-- fvm/evm/offchain/sync/replay.go | 37 +++++++++-------- fvm/evm/offchain/sync/replayer.go | 18 ++++---- fvm/evm/offchain/sync/replayer_test.go | 4 +- fvm/evm/offchain/utils/collection_test.go | 2 +- fvm/evm/offchain/utils/verify.go | 39 +++++++++++++++++- 6 files changed, 119 insertions(+), 31 deletions(-) diff --git a/fvm/evm/offchain/blocks/provider.go b/fvm/evm/offchain/blocks/provider.go index 9111be4ac64..b9da39bd468 100644 --- a/fvm/evm/offchain/blocks/provider.go +++ b/fvm/evm/offchain/blocks/provider.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/onflow/flow-go/fvm/evm/events" + "github.com/onflow/flow-go/fvm/evm/handler" "github.com/onflow/flow-go/fvm/evm/types" "github.com/onflow/flow-go/model/flow" ) @@ -13,7 +14,10 @@ import ( // a OnBlockReceived call before block execution and // a follow up OnBlockExecuted call after block execution. type BasicProvider struct { + chainID flow.ChainID blks *Blocks + rootAddr flow.Address + storage types.BackendStorage latestBlockPayload *events.BlockEventPayload } @@ -28,7 +32,12 @@ func NewBasicProvider( if err != nil { return nil, err } - return &BasicProvider{blks: blks}, nil + return &BasicProvider{ + chainID: chainID, + blks: blks, + rootAddr: rootAddr, + storage: storage, + }, nil } // GetSnapshotAt returns a block snapshot at the given height @@ -61,14 +70,49 @@ func (p *BasicProvider) OnBlockReceived(blockEvent *events.BlockEventPayload) er // OnBlockExecuted should be called after executing blocks. func (p *BasicProvider) OnBlockExecuted( height uint64, - resCol types.ReplayResultCollector) error { + resCol types.ReplayResultCollector, + blockProposal *types.BlockProposal, +) error { // we push the block hash after execution, so the behaviour of the blockhash is // identical to the evm.handler. if p.latestBlockPayload.Height != height { return fmt.Errorf("active block height doesn't match expected: %d, got: %d", p.latestBlockPayload.Height, height) } + + blockBytes, err := blockProposal.Block.ToBytes() + if err != nil { + return types.NewFatalError(err) + } + + // do the same as handler.CommitBlockProposal + err = p.storage.SetValue( + p.rootAddr[:], + []byte(handler.BlockStoreLatestBlockKey), + blockBytes, + ) + if err != nil { + return err + } + + blockProposalBytes, err := blockProposal.ToBytes() + if err != nil { + return types.NewFatalError(err) + } + + hash := p.latestBlockPayload.Hash + // update block proposal + err = p.storage.SetValue( + p.rootAddr[:], + []byte(handler.BlockStoreLatestBlockProposalKey), + blockProposalBytes, + ) + if err != nil { + return err + } + + // update block hash list return p.blks.PushBlockHash( p.latestBlockPayload.Height, - p.latestBlockPayload.Hash, + hash, ) } diff --git a/fvm/evm/offchain/sync/replay.go b/fvm/evm/offchain/sync/replay.go index 4516f37007d..e85fc21658c 100644 --- a/fvm/evm/offchain/sync/replay.go +++ b/fvm/evm/offchain/sync/replay.go @@ -30,25 +30,26 @@ func ReplayBlockExecution( transactionEvents []events.TransactionEventPayload, blockEvent *events.BlockEventPayload, validateResults bool, -) error { +) ([]*types.Result, error) { // check the passed block event if blockEvent == nil { - return fmt.Errorf("nil block event has been passed") + return nil, fmt.Errorf("nil block event has been passed") } // create a base block context for all transactions // tx related context values will be replaced during execution ctx, err := blockSnapshot.BlockContext() if err != nil { - return err + return nil, err } // update the tracer ctx.Tracer = tracer gasConsumedSoFar := uint64(0) txHashes := make(types.TransactionHashes, len(transactionEvents)) + results := make([]*types.Result, 0, len(transactionEvents)) for idx, tx := range transactionEvents { - err = replayTransactionExecution( + result, err := replayTransactionExecution( rootAddr, ctx, uint(idx), @@ -58,28 +59,30 @@ func ReplayBlockExecution( validateResults, ) if err != nil { - return fmt.Errorf("transaction execution failed, txIndex: %d, err: %w", idx, err) + return nil, fmt.Errorf("transaction execution failed, txIndex: %d, err: %w", idx, err) } gasConsumedSoFar += tx.GasConsumed txHashes[idx] = tx.Hash + + results = append(results, result) } if validateResults { // check transaction inclusion txHashRoot := gethTypes.DeriveSha(txHashes, gethTrie.NewStackTrie(nil)) if txHashRoot != blockEvent.TransactionHashRoot { - return fmt.Errorf("transaction root hash doesn't match [%x] != [%x]", txHashRoot, blockEvent.TransactionHashRoot) + return nil, fmt.Errorf("transaction root hash doesn't match [%x] != [%x]", txHashRoot, blockEvent.TransactionHashRoot) } // check total gas used if blockEvent.TotalGasUsed != gasConsumedSoFar { - return fmt.Errorf("total gas used doesn't match [%d] != [%d]", gasConsumedSoFar, blockEvent.TotalGasUsed) + return nil, fmt.Errorf("total gas used doesn't match [%d] != [%d]", gasConsumedSoFar, blockEvent.TotalGasUsed) } // no need to check the receipt root hash given we have checked the logs and other // values during tx execution. } - return nil + return results, nil } func replayTransactionExecution( @@ -90,7 +93,7 @@ func replayTransactionExecution( ledger atree.Ledger, txEvent *events.TransactionEventPayload, validate bool, -) error { +) (*types.Result, error) { // create emulator em := emulator.NewEmulator(ledger, rootAddr) @@ -102,7 +105,7 @@ func replayTransactionExecution( if len(txEvent.PrecompiledCalls) > 0 { pcs, err := types.AggregatedPrecompileCallsFromEncoded(txEvent.PrecompiledCalls) if err != nil { - return fmt.Errorf("error decoding precompiled calls [%x]: %w", txEvent.Payload, err) + return nil, fmt.Errorf("error decoding precompiled calls [%x]: %w", txEvent.Payload, err) } ctx.ExtraPrecompiledContracts = precompiles.AggregatedPrecompiledCallsToPrecompiledContracts(pcs) } @@ -110,7 +113,7 @@ func replayTransactionExecution( // create a new block view bv, err := em.NewBlockView(ctx) if err != nil { - return err + return nil, err } var res *types.Result @@ -119,31 +122,31 @@ func replayTransactionExecution( if txEvent.TransactionType == types.DirectCallTxType { call, err := types.DirectCallFromEncoded(txEvent.Payload) if err != nil { - return fmt.Errorf("failed to RLP-decode direct call [%x]: %w", txEvent.Payload, err) + return nil, fmt.Errorf("failed to RLP-decode direct call [%x]: %w", txEvent.Payload, err) } res, err = bv.DirectCall(call) if err != nil { - return fmt.Errorf("failed to execute direct call [%x]: %w", txEvent.Hash, err) + return nil, fmt.Errorf("failed to execute direct call [%x]: %w", txEvent.Hash, err) } } else { gethTx := &gethTypes.Transaction{} if err := gethTx.UnmarshalBinary(txEvent.Payload); err != nil { - return fmt.Errorf("failed to RLP-decode transaction [%x]: %w", txEvent.Payload, err) + return nil, fmt.Errorf("failed to RLP-decode transaction [%x]: %w", txEvent.Payload, err) } res, err = bv.RunTransaction(gethTx) if err != nil { - return fmt.Errorf("failed to run transaction [%x]: %w", txEvent.Hash, err) + return nil, fmt.Errorf("failed to run transaction [%x]: %w", txEvent.Hash, err) } } // validate results if validate { if err := ValidateResult(res, txEvent); err != nil { - return fmt.Errorf("transaction replay failed (txHash %x): %w", txEvent.Hash, err) + return nil, fmt.Errorf("transaction replay failed (txHash %x): %w", txEvent.Hash, err) } } - return nil + return res, nil } func ValidateResult( diff --git a/fvm/evm/offchain/sync/replayer.go b/fvm/evm/offchain/sync/replayer.go index 25ccdc10cbf..33411b7c133 100644 --- a/fvm/evm/offchain/sync/replayer.go +++ b/fvm/evm/offchain/sync/replayer.go @@ -46,7 +46,11 @@ func NewReplayer( // ReplayBlock replays the execution of the transactions of an EVM block // using the provided transactionEvents and blockEvents, -// which include all the context data for re-executing the transactions, and returns the replay result. +// which include all the context data for re-executing the transactions, and returns +// the replay result and the result of each transaction. +// the replay result contains the register updates, and the result of each transaction +// contains the execution result of each transaction, which is useful for recontstructing +// the EVM block proposal. // this method can be called concurrently if underlying storage // tracer and block snapshot provider support concurrency. // @@ -56,11 +60,11 @@ func NewReplayer( func (cr *Replayer) ReplayBlock( transactionEvents []events.TransactionEventPayload, blockEvent *events.BlockEventPayload, -) (types.ReplayResultCollector, error) { +) (types.ReplayResultCollector, []*types.Result, error) { // prepare storage st, err := cr.storageProvider.GetSnapshotAt(blockEvent.Height) if err != nil { - return nil, err + return nil, nil, err } // create storage @@ -69,11 +73,11 @@ func (cr *Replayer) ReplayBlock( // get block snapshot bs, err := cr.blockProvider.GetSnapshotAt(blockEvent.Height) if err != nil { - return nil, err + return nil, nil, err } // replay transactions - err = ReplayBlockExecution( + results, err := ReplayBlockExecution( cr.chainID, cr.rootAddr, state, @@ -84,8 +88,8 @@ func (cr *Replayer) ReplayBlock( cr.validateResults, ) if err != nil { - return nil, err + return nil, nil, err } - return state, nil + return state, results, nil } diff --git a/fvm/evm/offchain/sync/replayer_test.go b/fvm/evm/offchain/sync/replayer_test.go index f7c05ab63b5..d193163283b 100644 --- a/fvm/evm/offchain/sync/replayer_test.go +++ b/fvm/evm/offchain/sync/replayer_test.go @@ -162,9 +162,11 @@ func TestChainReplay(t *testing.T) { sp := NewTestStorageProvider(snapshot, 1) cr := sync.NewReplayer(chainID, rootAddr, sp, bp, zerolog.Logger{}, nil, true) - res, err := cr.ReplayBlock(txEventPayloads, blockEventPayload) + res, results, err := cr.ReplayBlock(txEventPayloads, blockEventPayload) require.NoError(t, err) + require.Len(t, results, totalTxCount) + err = bp.OnBlockExecuted(blockEventPayload.Height, res) require.NoError(t, err) diff --git a/fvm/evm/offchain/utils/collection_test.go b/fvm/evm/offchain/utils/collection_test.go index e5b3059661b..ae8b10a0e59 100644 --- a/fvm/evm/offchain/utils/collection_test.go +++ b/fvm/evm/offchain/utils/collection_test.go @@ -147,7 +147,7 @@ func replayEvents( sp := NewTestStorageProvider(store, blockEventPayload.Height) cr := sync.NewReplayer(chainID, rootAddr, sp, bp, zerolog.Logger{}, nil, true) - res, err := cr.ReplayBlock(txEvents, blockEventPayload) + res, _, err := cr.ReplayBlock(txEvents, blockEventPayload) require.NoError(t, err) // commit all changes diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index 2045de36f22..a3f3e871f13 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -18,6 +18,7 @@ import ( evmStorage "github.com/onflow/flow-go/fvm/evm/offchain/storage" "github.com/onflow/flow-go/fvm/evm/offchain/sync" "github.com/onflow/flow-go/fvm/evm/testutils" + "github.com/onflow/flow-go/fvm/evm/types" "github.com/onflow/flow-go/ledger" "github.com/onflow/flow-go/ledger/common/convert" "github.com/onflow/flow-go/model/flow" @@ -100,7 +101,7 @@ func OffchainReplayBackwardCompatibilityTest( sp := testutils.NewTestStorageProvider(store, evmBlockEvent.Height) cr := sync.NewReplayer(chainID, rootAddr, sp, bp, log, nil, true) - res, err := cr.ReplayBlock(evmTxEvents, evmBlockEvent) + res, results, err := cr.ReplayBlock(evmTxEvents, evmBlockEvent) if err != nil { return err } @@ -113,7 +114,9 @@ func OffchainReplayBackwardCompatibilityTest( } } - err = bp.OnBlockExecuted(evmBlockEvent.Height, res) + blockProposal := reconstructProposal(evmBlockEvent, evmTxEvents, results) + + err = bp.OnBlockExecuted(evmBlockEvent.Height, res, blockProposal) if err != nil { return err } @@ -185,3 +188,35 @@ func parseEVMEvents(evts flow.EventsList) (*events.BlockEventPayload, []events.T return blockEvent, txEvents, nil } + +func reconstructProposal( + blockEvent *events.BlockEventPayload, + txEvents []events.TransactionEventPayload, + results []*types.Result, +) *types.BlockProposal { + receipts := make([]types.LightReceipt, 0, len(results)) + + for _, result := range results { + receipts = append(receipts, *result.LightReceipt()) + } + + txHashes := make(types.TransactionHashes, 0, len(txEvents)) + for _, tx := range txEvents { + txHashes = append(txHashes, tx.Hash) + } + + return &types.BlockProposal{ + Block: types.Block{ + ParentBlockHash: blockEvent.ParentBlockHash, + Height: blockEvent.Height, + Timestamp: blockEvent.Timestamp, + TotalSupply: blockEvent.TotalSupply.Big(), + ReceiptRoot: blockEvent.ReceiptRoot, + TransactionHashRoot: blockEvent.TransactionHashRoot, + TotalGasUsed: blockEvent.TotalGasUsed, + PrevRandao: blockEvent.PrevRandao, + }, + Receipts: receipts, + TxHashes: txHashes, + } +} From 0c7e532394ae91f79ed52cb316cd6a4363a9b7b6 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 11:18:53 -0800 Subject: [PATCH 28/36] fix tests --- fvm/evm/offchain/blocks/block_proposal.go | 38 +++++++++++++++++++++++ fvm/evm/offchain/sync/replayer_test.go | 14 ++++----- fvm/evm/offchain/utils/collection_test.go | 6 ++-- fvm/evm/offchain/utils/verify.go | 35 +-------------------- 4 files changed, 50 insertions(+), 43 deletions(-) create mode 100644 fvm/evm/offchain/blocks/block_proposal.go diff --git a/fvm/evm/offchain/blocks/block_proposal.go b/fvm/evm/offchain/blocks/block_proposal.go new file mode 100644 index 00000000000..877ba3303fe --- /dev/null +++ b/fvm/evm/offchain/blocks/block_proposal.go @@ -0,0 +1,38 @@ +package blocks + +import ( + "github.com/onflow/flow-go/fvm/evm/events" + "github.com/onflow/flow-go/fvm/evm/types" +) + +func ReconstructProposal( + blockEvent *events.BlockEventPayload, + txEvents []events.TransactionEventPayload, + results []*types.Result, +) *types.BlockProposal { + receipts := make([]types.LightReceipt, 0, len(results)) + + for _, result := range results { + receipts = append(receipts, *result.LightReceipt()) + } + + txHashes := make(types.TransactionHashes, 0, len(txEvents)) + for _, tx := range txEvents { + txHashes = append(txHashes, tx.Hash) + } + + return &types.BlockProposal{ + Block: types.Block{ + ParentBlockHash: blockEvent.ParentBlockHash, + Height: blockEvent.Height, + Timestamp: blockEvent.Timestamp, + TotalSupply: blockEvent.TotalSupply.Big(), + ReceiptRoot: blockEvent.ReceiptRoot, + TransactionHashRoot: blockEvent.TransactionHashRoot, + TotalGasUsed: blockEvent.TotalGasUsed, + PrevRandao: blockEvent.PrevRandao, + }, + Receipts: receipts, + TxHashes: txHashes, + } +} diff --git a/fvm/evm/offchain/sync/replayer_test.go b/fvm/evm/offchain/sync/replayer_test.go index d193163283b..2da1a5ba76b 100644 --- a/fvm/evm/offchain/sync/replayer_test.go +++ b/fvm/evm/offchain/sync/replayer_test.go @@ -12,6 +12,7 @@ import ( "github.com/onflow/flow-go/fvm/evm" "github.com/onflow/flow-go/fvm/evm/events" "github.com/onflow/flow-go/fvm/evm/offchain/blocks" + "github.com/onflow/flow-go/fvm/evm/offchain/storage" "github.com/onflow/flow-go/fvm/evm/offchain/sync" . "github.com/onflow/flow-go/fvm/evm/testutils" "github.com/onflow/flow-go/fvm/evm/types" @@ -154,7 +155,8 @@ func TestChainReplay(t *testing.T) { // check replay - bp, err := blocks.NewBasicProvider(chainID, snapshot, rootAddr) + bpStorage := storage.NewEphemeralStorage(snapshot) + bp, err := blocks.NewBasicProvider(chainID, bpStorage, rootAddr) require.NoError(t, err) err = bp.OnBlockReceived(blockEventPayload) @@ -167,14 +169,12 @@ func TestChainReplay(t *testing.T) { require.Len(t, results, totalTxCount) - err = bp.OnBlockExecuted(blockEventPayload.Height, res) + proposal := blocks.ReconstructProposal(blockEventPayload, txEventPayloads, results) + + err = bp.OnBlockExecuted(blockEventPayload.Height, res, proposal) require.NoError(t, err) - // TODO: verify the state delta - // currently the backend storage doesn't work well with this - // changes needed to make this work, which is left for future PRs - // - // for k, v := range result.StorageRegisterUpdates() { + // for k, v := range bpStorage.StorageRegisterUpdates() { // ret, err := backend.GetValue([]byte(k.Owner), []byte(k.Key)) // require.NoError(t, err) // require.Equal(t, ret[:], v[:]) diff --git a/fvm/evm/offchain/utils/collection_test.go b/fvm/evm/offchain/utils/collection_test.go index ae8b10a0e59..a4385c7f664 100644 --- a/fvm/evm/offchain/utils/collection_test.go +++ b/fvm/evm/offchain/utils/collection_test.go @@ -147,7 +147,7 @@ func replayEvents( sp := NewTestStorageProvider(store, blockEventPayload.Height) cr := sync.NewReplayer(chainID, rootAddr, sp, bp, zerolog.Logger{}, nil, true) - res, _, err := cr.ReplayBlock(txEvents, blockEventPayload) + res, results, err := cr.ReplayBlock(txEvents, blockEventPayload) require.NoError(t, err) // commit all changes @@ -156,7 +156,9 @@ func replayEvents( require.NoError(t, err) } - err = bp.OnBlockExecuted(blockEventPayload.Height, res) + proposal := blocks.ReconstructProposal(blockEventPayload, txEvents, results) + + err = bp.OnBlockExecuted(blockEventPayload.Height, res, proposal) require.NoError(t, err) // commit all block hash list changes diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index a3f3e871f13..3a3d9d9b9ce 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -18,7 +18,6 @@ import ( evmStorage "github.com/onflow/flow-go/fvm/evm/offchain/storage" "github.com/onflow/flow-go/fvm/evm/offchain/sync" "github.com/onflow/flow-go/fvm/evm/testutils" - "github.com/onflow/flow-go/fvm/evm/types" "github.com/onflow/flow-go/ledger" "github.com/onflow/flow-go/ledger/common/convert" "github.com/onflow/flow-go/model/flow" @@ -114,7 +113,7 @@ func OffchainReplayBackwardCompatibilityTest( } } - blockProposal := reconstructProposal(evmBlockEvent, evmTxEvents, results) + blockProposal := blocks.ReconstructProposal(evmBlockEvent, evmTxEvents, results) err = bp.OnBlockExecuted(evmBlockEvent.Height, res, blockProposal) if err != nil { @@ -188,35 +187,3 @@ func parseEVMEvents(evts flow.EventsList) (*events.BlockEventPayload, []events.T return blockEvent, txEvents, nil } - -func reconstructProposal( - blockEvent *events.BlockEventPayload, - txEvents []events.TransactionEventPayload, - results []*types.Result, -) *types.BlockProposal { - receipts := make([]types.LightReceipt, 0, len(results)) - - for _, result := range results { - receipts = append(receipts, *result.LightReceipt()) - } - - txHashes := make(types.TransactionHashes, 0, len(txEvents)) - for _, tx := range txEvents { - txHashes = append(txHashes, tx.Hash) - } - - return &types.BlockProposal{ - Block: types.Block{ - ParentBlockHash: blockEvent.ParentBlockHash, - Height: blockEvent.Height, - Timestamp: blockEvent.Timestamp, - TotalSupply: blockEvent.TotalSupply.Big(), - ReceiptRoot: blockEvent.ReceiptRoot, - TransactionHashRoot: blockEvent.TransactionHashRoot, - TotalGasUsed: blockEvent.TotalGasUsed, - PrevRandao: blockEvent.PrevRandao, - }, - Receipts: receipts, - TxHashes: txHashes, - } -} From 0548f6a6d7c28308cbb8d645be79356bdd3ffb26 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 11:43:45 -0800 Subject: [PATCH 29/36] update error message --- fvm/evm/offchain/utils/verify.go | 76 +++++++++++++++++++++++++------- 1 file changed, 61 insertions(+), 15 deletions(-) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index 3a3d9d9b9ce..64c50fce3b7 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -68,7 +68,7 @@ func OffchainReplayBackwardCompatibilityTest( payloads = append(payloads, chunkData.TrieUpdate.Payloads...) } - updates := make(map[flow.RegisterID]flow.RegisterValue, len(payloads)) + expectedUpdates := make(map[flow.RegisterID]flow.RegisterValue, len(payloads)) for i := len(payloads) - 1; i >= 0; i-- { regID, regVal, err := convert.PayloadToRegister(payloads[i]) if err != nil { @@ -82,8 +82,8 @@ func OffchainReplayBackwardCompatibilityTest( // when iterating backwards, duplicated register updates are stale updates, // so skipping them - if _, ok := updates[regID]; !ok { - updates[regID] = regVal + if _, ok := expectedUpdates[regID]; !ok { + expectedUpdates[regID] = regVal } } @@ -105,12 +105,16 @@ func OffchainReplayBackwardCompatibilityTest( return err } + actualUpdates := make(map[flow.RegisterID]flow.RegisterValue, len(expectedUpdates)) + // commit all changes for k, v := range res.StorageRegisterUpdates() { err = store.SetValue([]byte(k.Owner), []byte(k.Key), v) if err != nil { return err } + + actualUpdates[k] = v } blockProposal := blocks.ReconstructProposal(evmBlockEvent, evmTxEvents, results) @@ -129,20 +133,12 @@ func OffchainReplayBackwardCompatibilityTest( return err } - expectedUpdate, ok := updates[k] - if !ok { - return fmt.Errorf("missing update for register %v, %v", k, expectedUpdate) - } - - if !bytes.Equal(expectedUpdate, v) { - return fmt.Errorf("unexpected update for register %v, expected %v, got %v", k, expectedUpdate, v) - } - - delete(updates, k) + actualUpdates[k] = v } - if len(updates) > 0 { - return fmt.Errorf("missing updates for registers %v", updates) + err = verifyRegisterUpdates(expectedUpdates, actualUpdates) + if err != nil { + return err } log.Info().Msgf("verified block %d", height) @@ -187,3 +183,53 @@ func parseEVMEvents(evts flow.EventsList) (*events.BlockEventPayload, []events.T return blockEvent, txEvents, nil } + +func verifyRegisterUpdates(expectedUpdates map[flow.RegisterID]flow.RegisterValue, actualUpdates map[flow.RegisterID]flow.RegisterValue) error { + missingUpdates := make(map[flow.RegisterID]flow.RegisterValue) + additionalUpdates := make(map[flow.RegisterID]flow.RegisterValue) + mismatchingUpdates := make(map[flow.RegisterID][2]flow.RegisterValue) + + for k, v := range expectedUpdates { + if actualVal, ok := actualUpdates[k]; !ok { + missingUpdates[k] = v + } else if !bytes.Equal(v, actualVal) { + mismatchingUpdates[k] = [2]flow.RegisterValue{v, actualVal} + } + + delete(actualUpdates, k) + } + + for k, v := range actualUpdates { + additionalUpdates[k] = v + } + + // Build a combined error message + var errorMessage strings.Builder + + if len(missingUpdates) > 0 { + errorMessage.WriteString("Missing register updates:\n") + for id, value := range missingUpdates { + errorMessage.WriteString(fmt.Sprintf(" RegisterID: %v, ExpectedValue: %v\n", id, value)) + } + } + + if len(additionalUpdates) > 0 { + errorMessage.WriteString("Additional register updates:\n") + for id, value := range additionalUpdates { + errorMessage.WriteString(fmt.Sprintf(" RegisterID: %v, ActualValue: %v\n", id, value)) + } + } + + if len(mismatchingUpdates) > 0 { + errorMessage.WriteString("Mismatching register updates:\n") + for id, values := range mismatchingUpdates { + errorMessage.WriteString(fmt.Sprintf(" RegisterID: %v, ExpectedValue: %v, ActualValue: %v\n", id, values[0], values[1])) + } + } + + if errorMessage.Len() > 0 { + return errors.New(errorMessage.String()) + } + + return nil +} From 5a197b6b37a614a6a4a889ae67f84163e121ef0f Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 11:45:59 -0800 Subject: [PATCH 30/36] update error message --- fvm/evm/offchain/utils/verify.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index 64c50fce3b7..9afb272acec 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -209,21 +209,21 @@ func verifyRegisterUpdates(expectedUpdates map[flow.RegisterID]flow.RegisterValu if len(missingUpdates) > 0 { errorMessage.WriteString("Missing register updates:\n") for id, value := range missingUpdates { - errorMessage.WriteString(fmt.Sprintf(" RegisterID: %v, ExpectedValue: %v\n", id, value)) + errorMessage.WriteString(fmt.Sprintf(" RegisterKey: %v, ExpectedValue: %x\n", id.Key, value)) } } if len(additionalUpdates) > 0 { errorMessage.WriteString("Additional register updates:\n") for id, value := range additionalUpdates { - errorMessage.WriteString(fmt.Sprintf(" RegisterID: %v, ActualValue: %v\n", id, value)) + errorMessage.WriteString(fmt.Sprintf(" RegisterKey: %v, ActualValue: %x\n", id.Key, value)) } } if len(mismatchingUpdates) > 0 { errorMessage.WriteString("Mismatching register updates:\n") for id, values := range mismatchingUpdates { - errorMessage.WriteString(fmt.Sprintf(" RegisterID: %v, ExpectedValue: %v, ActualValue: %v\n", id, values[0], values[1])) + errorMessage.WriteString(fmt.Sprintf(" RegisterKey: %v, ExpectedValue: %x, ActualValue: %x\n", id.Key, values[0], values[1])) } } From af0cc4f55ca5f87a5cfd504401f6dca66ff2dea8 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 12:06:47 -0800 Subject: [PATCH 31/36] add account status updates --- fvm/evm/offchain/blocks/block_proposal.go | 8 ++----- fvm/evm/offchain/sync/replayer_test.go | 2 +- fvm/evm/offchain/utils/collection_test.go | 2 +- fvm/evm/offchain/utils/verify.go | 27 +++++++++++++++++++---- 4 files changed, 27 insertions(+), 12 deletions(-) diff --git a/fvm/evm/offchain/blocks/block_proposal.go b/fvm/evm/offchain/blocks/block_proposal.go index 877ba3303fe..cd1d68ed517 100644 --- a/fvm/evm/offchain/blocks/block_proposal.go +++ b/fvm/evm/offchain/blocks/block_proposal.go @@ -7,18 +7,14 @@ import ( func ReconstructProposal( blockEvent *events.BlockEventPayload, - txEvents []events.TransactionEventPayload, results []*types.Result, ) *types.BlockProposal { receipts := make([]types.LightReceipt, 0, len(results)) + txHashes := make(types.TransactionHashes, 0, len(results)) for _, result := range results { receipts = append(receipts, *result.LightReceipt()) - } - - txHashes := make(types.TransactionHashes, 0, len(txEvents)) - for _, tx := range txEvents { - txHashes = append(txHashes, tx.Hash) + txHashes = append(txHashes, result.TxHash) } return &types.BlockProposal{ diff --git a/fvm/evm/offchain/sync/replayer_test.go b/fvm/evm/offchain/sync/replayer_test.go index 2da1a5ba76b..3668e445c84 100644 --- a/fvm/evm/offchain/sync/replayer_test.go +++ b/fvm/evm/offchain/sync/replayer_test.go @@ -169,7 +169,7 @@ func TestChainReplay(t *testing.T) { require.Len(t, results, totalTxCount) - proposal := blocks.ReconstructProposal(blockEventPayload, txEventPayloads, results) + proposal := blocks.ReconstructProposal(blockEventPayload, results) err = bp.OnBlockExecuted(blockEventPayload.Height, res, proposal) require.NoError(t, err) diff --git a/fvm/evm/offchain/utils/collection_test.go b/fvm/evm/offchain/utils/collection_test.go index a4385c7f664..8e292530534 100644 --- a/fvm/evm/offchain/utils/collection_test.go +++ b/fvm/evm/offchain/utils/collection_test.go @@ -156,7 +156,7 @@ func replayEvents( require.NoError(t, err) } - proposal := blocks.ReconstructProposal(blockEventPayload, txEvents, results) + proposal := blocks.ReconstructProposal(blockEventPayload, results) err = bp.OnBlockExecuted(blockEventPayload.Height, res, proposal) require.NoError(t, err) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index 9afb272acec..740989eac23 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -25,6 +25,16 @@ import ( "github.com/onflow/flow-go/storage" ) +// EVM Root Height is the first block that has EVM Block Event where the EVM block height is 1 +func isEVMRootHeight(chainID flow.ChainID, flowHeight uint64) bool { + if chainID == flow.Testnet { + return flowHeight == 211176671 + } else if chainID == flow.Mainnet { + return flowHeight == 85981136 + } + return flowHeight == 1 +} + func OffchainReplayBackwardCompatibilityTest( log zerolog.Logger, chainID flow.ChainID, @@ -44,6 +54,14 @@ func OffchainReplayBackwardCompatibilityTest( return err } + // setup account status at EVM root block + if isEVMRootHeight(chainID, flowStartHeight) { + err = bpStorage.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), environment.NewAccountStatus().ToBytes()) + if err != nil { + return err + } + } + for height := flowStartHeight; height <= flowEndHeight; height++ { blockID, err := headers.BlockIDByHeight(height) if err != nil { @@ -87,7 +105,7 @@ func OffchainReplayBackwardCompatibilityTest( } } - // parse events + // parse EVM events evmBlockEvent, evmTxEvents, err := parseEVMEvents(events) if err != nil { return err @@ -107,7 +125,7 @@ func OffchainReplayBackwardCompatibilityTest( actualUpdates := make(map[flow.RegisterID]flow.RegisterValue, len(expectedUpdates)) - // commit all changes + // commit all register changes from the EVM state transition for k, v := range res.StorageRegisterUpdates() { err = store.SetValue([]byte(k.Owner), []byte(k.Key), v) if err != nil { @@ -117,14 +135,15 @@ func OffchainReplayBackwardCompatibilityTest( actualUpdates[k] = v } - blockProposal := blocks.ReconstructProposal(evmBlockEvent, evmTxEvents, results) + blockProposal := blocks.ReconstructProposal(evmBlockEvent, results) err = bp.OnBlockExecuted(evmBlockEvent.Height, res, blockProposal) if err != nil { return err } - // verify and commit all block hash list changes + // commit all register changes from non-EVM state transition, such + // as block hash list changes for k, v := range bpStorage.StorageRegisterUpdates() { // verify the block hash list changes are included in the trie update From 96b7fa1de8316b95e462139da44aec5009165d0d Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 16:32:42 -0800 Subject: [PATCH 32/36] update provider --- fvm/evm/offchain/utils/verify.go | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index 740989eac23..f059874bbb9 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -48,21 +48,21 @@ func OffchainReplayBackwardCompatibilityTest( rootAddr := evm.StorageAccountAddress(chainID) rootAddrStr := string(rootAddr.Bytes()) - bpStorage := evmStorage.NewEphemeralStorage(store) - bp, err := blocks.NewBasicProvider(chainID, bpStorage, rootAddr) - if err != nil { - return err - } - - // setup account status at EVM root block - if isEVMRootHeight(chainID, flowStartHeight) { - err = bpStorage.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), environment.NewAccountStatus().ToBytes()) + for height := flowStartHeight; height <= flowEndHeight; height++ { + bpStorage := evmStorage.NewEphemeralStorage(store) + bp, err := blocks.NewBasicProvider(chainID, bpStorage, rootAddr) if err != nil { return err } - } - for height := flowStartHeight; height <= flowEndHeight; height++ { + // setup account status at EVM root block + if isEVMRootHeight(chainID, flowStartHeight) { + err = bpStorage.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), environment.NewAccountStatus().ToBytes()) + if err != nil { + return err + } + } + blockID, err := headers.BlockIDByHeight(height) if err != nil { return err From 67e32f7274bcb7427a1ebded85383909aa22847b Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 16:39:28 -0800 Subject: [PATCH 33/36] update verifable keys --- fvm/evm/handler/blockHashList.go | 9 +++++++++ fvm/evm/offchain/utils/verify.go | 14 ++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/fvm/evm/handler/blockHashList.go b/fvm/evm/handler/blockHashList.go index 91eefded24e..0db2aff73f9 100644 --- a/fvm/evm/handler/blockHashList.go +++ b/fvm/evm/handler/blockHashList.go @@ -3,6 +3,7 @@ package handler import ( "encoding/binary" "fmt" + "strings" gethCommon "github.com/onflow/go-ethereum/common" @@ -26,6 +27,14 @@ const ( heightEncodingSize ) +func IsBlockHashListBucketKeyFormat(id flow.RegisterID) bool { + return strings.HasPrefix(id.Key, "BlockHashListBucket") +} + +func IsBlockHashListMetaKey(id flow.RegisterID) bool { + return id.Key == blockHashListMetaKey +} + // BlockHashList stores the last `capacity` number of block hashes // // Under the hood it breaks the list of hashes into diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index f059874bbb9..cfa48d39f9d 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -14,6 +14,7 @@ import ( "github.com/onflow/flow-go/fvm/environment" "github.com/onflow/flow-go/fvm/evm" "github.com/onflow/flow-go/fvm/evm/events" + "github.com/onflow/flow-go/fvm/evm/handler" "github.com/onflow/flow-go/fvm/evm/offchain/blocks" evmStorage "github.com/onflow/flow-go/fvm/evm/offchain/storage" "github.com/onflow/flow-go/fvm/evm/offchain/sync" @@ -98,6 +99,10 @@ func OffchainReplayBackwardCompatibilityTest( continue } + if !verifiableKeys(regID) { + continue + } + // when iterating backwards, duplicated register updates are stale updates, // so skipping them if _, ok := expectedUpdates[regID]; !ok { @@ -152,7 +157,12 @@ func OffchainReplayBackwardCompatibilityTest( return err } + if !verifiableKeys(k) { + continue + } + actualUpdates[k] = v + } err = verifyRegisterUpdates(expectedUpdates, actualUpdates) @@ -166,6 +176,10 @@ func OffchainReplayBackwardCompatibilityTest( return nil } +func verifiableKeys(key flow.RegisterID) bool { + return handler.IsBlockHashListBucketKeyFormat(key) || handler.IsBlockHashListMetaKey(key) +} + func parseEVMEvents(evts flow.EventsList) (*events.BlockEventPayload, []events.TransactionEventPayload, error) { var blockEvent *events.BlockEventPayload txEvents := make([]events.TransactionEventPayload, 0) From c9bb7c19ee13758ce8c81b52d6862afb3293cf6f Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 16:42:00 -0800 Subject: [PATCH 34/36] update verifable keys --- fvm/evm/offchain/utils/verify.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index cfa48d39f9d..a269e81ec1b 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -137,6 +137,10 @@ func OffchainReplayBackwardCompatibilityTest( return err } + if !verifiableKeys(k) { + continue + } + actualUpdates[k] = v } From f5641613559e6b673a93ce53da2f493a22329ecc Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 20:37:34 -0800 Subject: [PATCH 35/36] skip register verification --- fvm/evm/offchain/utils/verify.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/fvm/evm/offchain/utils/verify.go b/fvm/evm/offchain/utils/verify.go index a269e81ec1b..9a6f6a45d87 100644 --- a/fvm/evm/offchain/utils/verify.go +++ b/fvm/evm/offchain/utils/verify.go @@ -14,7 +14,6 @@ import ( "github.com/onflow/flow-go/fvm/environment" "github.com/onflow/flow-go/fvm/evm" "github.com/onflow/flow-go/fvm/evm/events" - "github.com/onflow/flow-go/fvm/evm/handler" "github.com/onflow/flow-go/fvm/evm/offchain/blocks" evmStorage "github.com/onflow/flow-go/fvm/evm/offchain/storage" "github.com/onflow/flow-go/fvm/evm/offchain/sync" @@ -58,7 +57,8 @@ func OffchainReplayBackwardCompatibilityTest( // setup account status at EVM root block if isEVMRootHeight(chainID, flowStartHeight) { - err = bpStorage.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), environment.NewAccountStatus().ToBytes()) + err = bpStorage.SetValue(rootAddr[:], []byte(flow.AccountStatusKey), + environment.NewAccountStatus().ToBytes()) if err != nil { return err } @@ -166,7 +166,6 @@ func OffchainReplayBackwardCompatibilityTest( } actualUpdates[k] = v - } err = verifyRegisterUpdates(expectedUpdates, actualUpdates) @@ -181,7 +180,8 @@ func OffchainReplayBackwardCompatibilityTest( } func verifiableKeys(key flow.RegisterID) bool { - return handler.IsBlockHashListBucketKeyFormat(key) || handler.IsBlockHashListMetaKey(key) + return false + // return handler.IsBlockHashListBucketKeyFormat(key) || handler.IsBlockHashListMetaKey(key) } func parseEVMEvents(evts flow.EventsList) (*events.BlockEventPayload, []events.TransactionEventPayload, error) { From 20487a351260eefa63a6652cc52ea314a11ca430 Mon Sep 17 00:00:00 2001 From: "Leo Zhang (zhangchiqing)" Date: Fri, 22 Nov 2024 22:32:42 -0800 Subject: [PATCH 36/36] fix regresion --- fvm/evm/offchain/blocks/block_context.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fvm/evm/offchain/blocks/block_context.go b/fvm/evm/offchain/blocks/block_context.go index a18c7077378..1247253dcd0 100644 --- a/fvm/evm/offchain/blocks/block_context.go +++ b/fvm/evm/offchain/blocks/block_context.go @@ -67,7 +67,7 @@ func UseBlockHashCorrection(chainID flow.ChainID, evmHeightOfCurrentBlock uint64 // array of hashes. if chainID == flow.Mainnet && evmHeightOfCurrentBlock < blockHashListFixHCUEVMHeightMainnet { return fixedHashes[flow.Mainnet][queriedEVMHeight%256], true - } else if chainID == flow.Testnet && blockHashListBugIntroducedHCUEVMHeightTestnet <= evmHeightOfCurrentBlock && evmHeightOfCurrentBlock < blockHashListFixHCUEVMHeightTestnet { + } else if chainID == flow.Testnet && evmHeightOfCurrentBlock < blockHashListFixHCUEVMHeightTestnet { return fixedHashes[flow.Testnet][queriedEVMHeight%256], true } return gethCommon.Hash{}, false