From 639917a578af3af680b43776c112e341c7d0bd61 Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Sat, 25 Oct 2025 11:57:01 -0400 Subject: [PATCH 1/9] chore: minor improvements --- crates/cli/src/cli/mod.rs | 3 - crates/cli/src/cli/simnet/mod.rs | 24 ++- crates/cli/src/http/mod.rs | 35 ++++- crates/cli/src/tui/simnet.rs | 9 +- crates/core/src/rpc/full.rs | 4 +- crates/core/src/rpc/mod.rs | 18 ++- crates/core/src/rpc/surfnet_cheatcodes.rs | 61 +++++++- crates/core/src/rpc/ws.rs | 172 ++++++++++++++++++++++ crates/core/src/runloops/mod.rs | 2 +- crates/types/src/types.rs | 27 +++- 10 files changed, 317 insertions(+), 38 deletions(-) diff --git a/crates/cli/src/cli/mod.rs b/crates/cli/src/cli/mod.rs index 5a6f12fa..8151cd4d 100644 --- a/crates/cli/src/cli/mod.rs +++ b/crates/cli/src/cli/mod.rs @@ -163,9 +163,6 @@ pub struct StartSimnet { /// Display streams of logs instead of terminal UI dashboard(eg. surfpool start --no-tui) #[clap(long = "no-tui", default_value = "false")] pub no_tui: bool, - /// Include debug logs (eg. surfpool start --debug) - #[clap(long = "debug", action=ArgAction::SetTrue, default_value = "false")] - pub debug: bool, /// Disable auto deployments (eg. surfpool start --no-deploy) #[clap(long = "no-deploy", default_value = "false")] pub no_deploy: bool, diff --git a/crates/cli/src/cli/simnet/mod.rs b/crates/cli/src/cli/simnet/mod.rs index 1cee8e06..b5474a57 100644 --- a/crates/cli/src/cli/simnet/mod.rs +++ b/crates/cli/src/cli/simnet/mod.rs @@ -180,7 +180,10 @@ pub async fn handle_start_local_surfnet_command( // Non blocking check for new versions #[cfg(feature = "version_check")] { - let local_version = env!("CARGO_PKG_VERSION"); + let mut local_version = env!("CARGO_PKG_VERSION"); + if cmd.ci { + local_version = format!("{}-ci", local_version); + } let response = txtx_gql::kit::reqwest::get(format!( "{}/api/versions?v=/{}", super::DEFAULT_CLOUD_URL, @@ -237,13 +240,14 @@ async fn start_service( } else { DisplayedUrl::Studio(sanitized_config) }; + let include_debug_logs = cmd.log_level.to_lowercase().eq("debug"); // Start frontend - kept on main thread if cmd.daemon || cmd.no_tui { log_events( simnet_events_rx, subgraph_events_rx, - cmd.debug, + include_debug_logs, deploy_progress_rx, simnet_commands_tx, runloop_terminator.unwrap(), @@ -252,7 +256,7 @@ async fn start_service( tui::simnet::start_app( simnet_events_rx, simnet_commands_tx, - cmd.debug, + include_debug_logs, deploy_progress_rx, displayed_url, breaker, @@ -317,11 +321,7 @@ fn log_events( SimnetEvent::EpochInfoUpdate(_) => { info!("{}", event.epoch_info_update_msg()); } - SimnetEvent::SystemClockUpdated(_) => { - if include_debug_logs { - info!("{}", event.clock_update_msg()); - } - } + SimnetEvent::SystemClockUpdated(_) => {} SimnetEvent::ClockUpdate(_) => {} SimnetEvent::ErrorLog(_dt, log) => { error!("{}", log); @@ -330,9 +330,7 @@ fn log_events( info!("{}", log); } SimnetEvent::DebugLog(_dt, log) => { - if include_debug_logs { - debug!("{}", log); - } + debug!("{}", log); } SimnetEvent::WarnLog(_dt, log) => { warn!("{}", log); @@ -396,9 +394,7 @@ fn log_events( info!("{}", log); } SubgraphEvent::DebugLog(_dt, log) => { - if include_debug_logs { - info!("{}", log); - } + debug!("{}", log); } SubgraphEvent::WarnLog(_dt, log) => { warn!("{}", log); diff --git a/crates/cli/src/http/mod.rs b/crates/cli/src/http/mod.rs index 158193a6..603e4fa5 100644 --- a/crates/cli/src/http/mod.rs +++ b/crates/cli/src/http/mod.rs @@ -62,13 +62,14 @@ pub async fn start_subgraph_and_explorer_server( let schema_wrapped = Data::new(schema); let context_wrapped = Data::new(RwLock::new(context)); let config_wrapped = Data::new(RwLock::new(config.clone())); + let collections_metadata_lookup_wrapped = Data::new(RwLock::new(collections_metadata_lookup)); let subgraph_handle = start_subgraph_runloop( subgraph_events_tx, subgraph_commands_rx, context_wrapped.clone(), schema_wrapped.clone(), - collections_metadata_lookup, + collections_metadata_lookup_wrapped.clone(), config, ctx, )?; @@ -78,6 +79,7 @@ pub async fn start_subgraph_and_explorer_server( .app_data(schema_wrapped.clone()) .app_data(context_wrapped.clone()) .app_data(config_wrapped.clone()) + .app_data(collections_metadata_lookup_wrapped.clone()) .wrap( Cors::default() .allow_any_origin() @@ -90,8 +92,10 @@ pub async fn start_subgraph_and_explorer_server( .wrap(middleware::Compress::default()) .wrap(middleware::Logger::default()) .service(get_config) + .service(get_indexers) .service( web::scope("/workspace") + .route("/v1/indexers", web::post().to(post_graphql)) .route("/v1/graphql?", web::get().to(get_graphql)) .route("/v1/graphql", web::post().to(post_graphql)) .route("/v1/subscriptions", web::get().to(subscriptions)), @@ -145,6 +149,23 @@ async fn get_config( .body(api_config.to_string())) } +#[actix_web::get("/workspace/v1/indexers")] +async fn get_indexers( + collections_metadata_lookup: Data>, +) -> Result { + let lookup = collections_metadata_lookup + .read() + .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to read collections metadata"))?; + + let collections: Vec<&CollectionMetadata> = lookup.entries.values().collect(); + let response = serde_json::to_string(&collections) + .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to serialize collections"))?; + + Ok(HttpResponse::Ok() + .content_type("application/json") + .body(response)) +} + #[cfg(not(feature = "explorer"))] fn handle_embedded_file(_path: &str) -> HttpResponse { HttpResponse::NotFound().body("404 Not Found") @@ -221,7 +242,7 @@ fn start_subgraph_runloop( subgraph_commands_rx: Receiver, gql_context: Data>, gql_schema: Data>>, - mut collections_metadata_lookup: CollectionsMetadataLookup, + collections_metadata_lookup: Data>, config: SanitizedConfig, ctx: &Context, ) -> Result>, String> { @@ -263,10 +284,14 @@ fn start_subgraph_runloop( if let Err(e) = gql_context.pool.register_collection(&metadata, &request, &worker_id) { error!("{}", e); } - collections_metadata_lookup.add_collection(metadata); - gql_schema.replace(new_dynamic_schema(collections_metadata_lookup.clone())); - let console_url = format!("{}/subgraphs", config.studio_url.clone()); + let mut lookup = collections_metadata_lookup.write().map_err(|_| { + format!("{err_ctx}: Failed to acquire write lock on collections metadata lookup") + })?; + lookup.add_collection(metadata); + gql_schema.replace(new_dynamic_schema(lookup.clone())); + + let console_url = format!("{}/accounts", config.studio_url.clone()); let _ = sender.send(console_url); } SubgraphCommand::ObserveCollection(subgraph_observer_rx) => { diff --git a/crates/cli/src/tui/simnet.rs b/crates/cli/src/tui/simnet.rs index 75787ec7..3e95d577 100644 --- a/crates/cli/src/tui/simnet.rs +++ b/crates/cli/src/tui/simnet.rs @@ -50,7 +50,7 @@ const SUPPORTS_256_COLOR_INDICATOR: &str = "256"; const SUPPORTS_TRUECOLOR_INDICATOR: &str = "24bit"; /// Legacy VT100 terminal type - basic 16-color support only const LEGACY_VT100_TERMINAL: &str = "vt100"; -/// ANSI terminal type - basic 16-color support only +/// ANSI terminal type - basic 16-color support only const LEGACY_ANSI_TERMINAL: &str = "ansi"; /// Terminal detection and color capability analysis @@ -461,13 +461,6 @@ fn run_app(terminal: &mut Terminal, mut app: App) -> io::Result<( } SimnetEvent::SystemClockUpdated(clock) => { app.clock = clock.clone(); - if app.include_debug_logs { - new_events.push(( - EventType::Debug, - Local::now(), - event.clock_update_msg(), - )); - } } SimnetEvent::ClockUpdate(ClockCommand::Pause) => { app.paused = true; diff --git a/crates/core/src/rpc/full.rs b/crates/core/src/rpc/full.rs index 72a524bb..39c54a57 100644 --- a/crates/core/src/rpc/full.rs +++ b/crates/core/src/rpc/full.rs @@ -1550,7 +1550,7 @@ impl Full for SurfpoolFullRpc { let (status_update_tx, status_update_rx) = crossbeam_channel::bounded(1); ctx.simnet_commands_tx - .send(SimnetCommand::TransactionReceived( + .send(SimnetCommand::ProcessTransaction( ctx.id, unsanitized_tx, status_update_tx, @@ -2486,7 +2486,7 @@ mod tests { .unwrap(); match mempool_rx.recv() { - Ok(SimnetCommand::TransactionReceived(_, tx, status_tx, _)) => { + Ok(SimnetCommand::ProcessTransaction(_, tx, status_tx, _)) => { let mut writer = setup.context.svm_locker.0.write().await; let slot = writer.get_latest_absolute_slot(); writer.transactions_queued_for_confirmation.push_back(( diff --git a/crates/core/src/rpc/mod.rs b/crates/core/src/rpc/mod.rs index 3e38cfdc..0ea50d1b 100644 --- a/crates/core/src/rpc/mod.rs +++ b/crates/core/src/rpc/mod.rs @@ -9,7 +9,7 @@ use jsonrpc_core::{ }; use jsonrpc_pubsub::{PubSubMetadata, Session}; use solana_clock::Slot; -use surfpool_types::{SimnetCommand, types::RpcConfig}; +use surfpool_types::{SimnetCommand, SimnetEvent, types::RpcConfig}; use crate::{ PluginManagerCommand, @@ -215,6 +215,22 @@ impl SurfpoolWebsocketMeta { session, } } + + pub fn log_debug(&self, msg: &str) { + let _ = self + .runloop_context + .svm_locker + .simnet_events_tx() + .send(SimnetEvent::debug(msg)); + } + + pub fn log_warn(&self, msg: &str) { + let _ = self + .runloop_context + .svm_locker + .simnet_events_tx() + .send(SimnetEvent::warn(msg)); + } } impl State for Option { diff --git a/crates/core/src/rpc/surfnet_cheatcodes.rs b/crates/core/src/rpc/surfnet_cheatcodes.rs index 5806c502..c2bf825c 100644 --- a/crates/core/src/rpc/surfnet_cheatcodes.rs +++ b/crates/core/src/rpc/surfnet_cheatcodes.rs @@ -14,9 +14,9 @@ use solana_system_interface::program as system_program; use solana_transaction::versioned::VersionedTransaction; use spl_associated_token_account_interface::address::get_associated_token_address_with_program_id; use surfpool_types::{ - AccountSnapshot, ClockCommand, ExportSnapshotConfig, GetSurfnetInfoResponse, Idl, - ResetAccountConfig, RpcProfileResultConfig, SimnetCommand, SimnetEvent, StreamAccountConfig, - UiKeyedProfileResult, + AccountSnapshot, ClockCommand, ExportSnapshotConfig, GetStreamedAccountsResponse, + GetSurfnetInfoResponse, Idl, ResetAccountConfig, RpcProfileResultConfig, SimnetCommand, + SimnetEvent, StreamAccountConfig, UiKeyedProfileResult, types::{AccountUpdate, SetSomeAccount, SupplyUpdate, TokenAccountUpdate, UuidOrSignature}, }; @@ -883,6 +883,45 @@ pub trait SurfnetCheatcodes { config: Option, ) -> Result>; + /// A cheat code to retrieve the streamed accounts. + /// When a transaction is processed, the accounts that are accessed are downloaded from the datasource and cached in the SVM. + /// With this method, you can simulate the streaming of accounts by providing a pubkey. + /// + /// ## Parameters + /// + /// ## Returns + /// An `RpcResponse<()>` indicating whether the account stream registration was successful. + /// + /// ## Example Request + /// ```json + /// { + /// "jsonrpc": "2.0", + /// "id": 1, + /// "method": "surfnet_streamAccount", + /// "params": [ "4EXSeLGxVBpAZwq7vm6evLdewpcvE2H56fpqL2pPiLFa", { "includeOwnedAccounts": true } ] + /// } + /// ``` + /// + /// ## Example Response + /// ```json + /// { + /// "jsonrpc": "2.0", + /// "result": { + /// "context": { + /// "slot": 123456789, + /// "apiVersion": "2.3.8" + /// }, + /// "value": null + /// }, + /// "id": 1 + /// } + /// ``` + #[rpc(meta, name = "surfnet_getStreamedAccounts")] + fn get_streamed_accounts( + &self, + meta: Self::Metadata, + ) -> Result>; + /// A cheat code to get Surfnet network information. /// /// ## Returns @@ -1479,6 +1518,22 @@ impl SurfnetCheatcodes for SurfnetCheatcodesRpc { }) } + fn get_streamed_accounts( + &self, + meta: Self::Metadata, + ) -> Result> { + let svm_locker = meta.get_svm_locker()?; + + let value = svm_locker.with_svm_reader(|svm_reader| { + GetStreamedAccountsResponse::new(&svm_reader.streamed_accounts) + }); + + Ok(RpcResponse { + context: RpcResponseContext::new(svm_locker.get_latest_absolute_slot()), + value, + }) + } + fn get_surfnet_info( &self, meta: Self::Metadata, diff --git a/crates/core/src/rpc/ws.rs b/crates/core/src/rpc/ws.rs index c0a2ef08..cec896c2 100644 --- a/crates/core/src/rpc/ws.rs +++ b/crates/core/src/rpc/ws.rs @@ -577,6 +577,88 @@ pub trait Rpc { meta: Option, subscription: SubscriptionId, ) -> Result; + + #[pubsub(subscription = "rootNotification", subscribe, name = "rootSubscribe")] + fn root_subscribe(&self, meta: Self::Metadata, subscriber: Subscriber>); + + #[pubsub( + subscription = "rootNotification", + unsubscribe, + name = "rootUnsubscribe" + )] + fn root_unsubscribe( + &self, + meta: Option, + subscription: SubscriptionId, + ) -> Result; + + #[pubsub( + subscription = "programNotification", + subscribe, + name = "programSubscribe" + )] + fn program_subscribe(&self, meta: Self::Metadata, subscriber: Subscriber>); + + #[pubsub( + subscription = "programNotification", + unsubscribe, + name = "ProgramUnsubscribe" + )] + fn program_unsubscribe( + &self, + meta: Option, + subscription: SubscriptionId, + ) -> Result; + + #[pubsub( + subscription = "slotsUpdatesNotification", + subscribe, + name = "slotsUpdatesSubscribe" + )] + fn slots_updates_subscribe( + &self, + meta: Self::Metadata, + subscriber: Subscriber>, + ); + + #[pubsub( + subscription = "slotsUpdatesNotification", + unsubscribe, + name = "slotsUpdatesUnsubscribe" + )] + fn slots_updates_unsubscribe( + &self, + meta: Option, + subscription: SubscriptionId, + ) -> Result; + + #[pubsub(subscription = "blockNotification", subscribe, name = "blockSubscribe")] + fn block_subscribe(&self, meta: Self::Metadata, subscriber: Subscriber>); + + #[pubsub( + subscription = "blockNotification", + unsubscribe, + name = "blockUnsubscribe" + )] + fn block_unsubscribe( + &self, + meta: Option, + subscription: SubscriptionId, + ) -> Result; + + #[pubsub(subscription = "voteNotification", subscribe, name = "voteSubscribe")] + fn vote_subscribe(&self, meta: Self::Metadata, subscriber: Subscriber>); + + #[pubsub( + subscription = "voteNotification", + unsubscribe, + name = "voteUnsubscribe" + )] + fn vote_unsubscribe( + &self, + meta: Option, + subscription: SubscriptionId, + ) -> Result; } /// WebSocket RPC server implementation for Surfpool. @@ -652,6 +734,10 @@ impl Rpc for SurfpoolWsRpc { signature_str: String, config: Option, ) { + let _ = meta + .as_ref() + .map(|m| m.log_debug("Websocket 'signature_subscribe' connection established")); + let signature = match Signature::from_str(&signature_str) { Ok(sig) => sig, Err(_) => { @@ -905,6 +991,10 @@ impl Rpc for SurfpoolWsRpc { pubkey_str: String, config: Option, ) { + let _ = meta + .as_ref() + .map(|m| m.log_debug("Websocket 'account_subscribe' connection established")); + let pubkey = match Pubkey::from_str(&pubkey_str) { Ok(pk) => pk, Err(_) => { @@ -1035,6 +1125,10 @@ impl Rpc for SurfpoolWsRpc { } fn slot_subscribe(&self, meta: Self::Metadata, subscriber: Subscriber) { + let _ = meta + .as_ref() + .map(|m| m.log_debug("Websocket 'slot_subscribe' connection established")); + let id = self.uid.fetch_add(1, atomic::Ordering::SeqCst); let sub_id = SubscriptionId::Number(id as u64); let sink = match subscriber.assign_id(sub_id.clone()) { @@ -1130,6 +1224,10 @@ impl Rpc for SurfpoolWsRpc { mentions: Option, commitment: Option, ) { + let _ = meta + .as_ref() + .map(|m| m.log_debug("Websocket 'logs_subscribe' connection established")); + let id = self.uid.fetch_add(1, atomic::Ordering::SeqCst); let sub_id = SubscriptionId::Number(id as u64); let sink = match subscriber.assign_id(sub_id.clone()) { @@ -1223,4 +1321,78 @@ impl Rpc for SurfpoolWsRpc { }; Ok(true) } + + fn root_subscribe(&self, meta: Self::Metadata, _subscriber: Subscriber>) { + let _ = meta + .as_ref() + .map(|m| m.log_warn("Websocket method 'root_subscribe' is uninmplemented")); + } + + fn root_unsubscribe( + &self, + _meta: Option, + _subscription: SubscriptionId, + ) -> Result { + Ok(true) + } + + fn program_subscribe(&self, meta: Self::Metadata, _subscriber: Subscriber>) { + let _ = meta + .as_ref() + .map(|m| m.log_warn("Websocket method 'program_subscribe' is uninmplemented")); + } + + fn program_unsubscribe( + &self, + _meta: Option, + _subscription: SubscriptionId, + ) -> Result { + Ok(true) + } + + fn slots_updates_subscribe( + &self, + meta: Self::Metadata, + _subscriber: Subscriber>, + ) { + let _ = meta + .as_ref() + .map(|m| m.log_warn("Websocket method 'slots_updates_subscribe' is uninmplemented")); + } + + fn slots_updates_unsubscribe( + &self, + _meta: Option, + _subscription: SubscriptionId, + ) -> Result { + Ok(true) + } + + fn block_subscribe(&self, meta: Self::Metadata, _subscriber: Subscriber>) { + let _ = meta + .as_ref() + .map(|m| m.log_warn("Websocket method 'block_subscribe' is uninmplemented")); + } + + fn block_unsubscribe( + &self, + _meta: Option, + _subscription: SubscriptionId, + ) -> Result { + Ok(true) + } + + fn vote_subscribe(&self, meta: Self::Metadata, _subscriber: Subscriber>) { + let _ = meta + .as_ref() + .map(|m| m.log_warn("Websocket method 'vote_subscribe' is uninmplemented")); + } + + fn vote_unsubscribe( + &self, + _meta: Option, + _subscription: SubscriptionId, + ) -> Result { + Ok(true) + } } diff --git a/crates/core/src/runloops/mod.rs b/crates/core/src/runloops/mod.rs index 8bb2f0d3..8ac71e5b 100644 --- a/crates/core/src/runloops/mod.rs +++ b/crates/core/src/runloops/mod.rs @@ -214,7 +214,7 @@ pub async fn start_block_production_runloop( block_production_mode = update; continue } - SimnetCommand::TransactionReceived(_key, transaction, status_tx, skip_preflight) => { + SimnetCommand::ProcessTransaction(_key, transaction, status_tx, skip_preflight) => { if let Err(e) = svm_locker.process_transaction(&remote_client_with_commitment, transaction, status_tx, skip_preflight, sigverify).await { let _ = svm_locker.simnet_events_tx().send(SimnetEvent::error(format!("Failed to process transaction: {}", e))); } diff --git a/crates/types/src/types.rs b/crates/types/src/types.rs index 9dc885bd..a1db06e2 100644 --- a/crates/types/src/types.rs +++ b/crates/types/src/types.rs @@ -482,7 +482,7 @@ pub enum SimnetCommand { CommandClock(Option<(Hash, String)>, ClockCommand), UpdateInternalClock(Option<(Hash, String)>, Clock), UpdateBlockProductionMode(BlockProductionMode), - TransactionReceived( + ProcessTransaction( Option<(Hash, String)>, VersionedTransaction, Sender, @@ -1034,6 +1034,13 @@ impl Default for StreamAccountConfig { } } +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct StreamedAccountInfo { + pub pubkey: String, + pub include_owned_accounts: bool, +} + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct GetSurfnetInfoResponse { @@ -1045,6 +1052,24 @@ impl GetSurfnetInfoResponse { } } +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GetStreamedAccountsResponse { + accounts: Vec, +} +impl GetStreamedAccountsResponse { + pub fn new(streamed_accounts: &HashMap) -> Self { + let mut accounts = vec![]; + for (pubkey, include_owned_accounts) in streamed_accounts { + accounts.push(StreamedAccountInfo { + pubkey: pubkey.to_string(), + include_owned_accounts: *include_owned_accounts, + }); + } + Self { accounts } + } +} + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct RunbookExecutionStatusReport { From d86237f4043e726ae274ecf7a62df7f76cb455f4 Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Sat, 25 Oct 2025 11:57:42 -0400 Subject: [PATCH 2/9] fix: simplifying account mapping in partial transactions --- crates/core/src/surfnet/locker.rs | 1150 ++++++++++++++++++++--------- 1 file changed, 797 insertions(+), 353 deletions(-) diff --git a/crates/core/src/surfnet/locker.rs b/crates/core/src/surfnet/locker.rs index 12884ba4..a8a9201f 100644 --- a/crates/core/src/surfnet/locker.rs +++ b/crates/core/src/surfnet/locker.rs @@ -18,6 +18,7 @@ use solana_account_decoder::{ }; use solana_address_lookup_table_interface::state::AddressLookupTable; use solana_client::{ + rpc_client::SerializableTransaction, rpc_config::{ RpcAccountInfoConfig, RpcBlockConfig, RpcLargestAccountsConfig, RpcLargestAccountsFilter, RpcSignaturesForAddressConfig, RpcTransactionConfig, RpcTransactionLogsFilter, @@ -116,6 +117,48 @@ impl SvmAccessContext { pub type SurfpoolContextualizedResult = SurfpoolResult>; +/// Helper function to apply an override to a JSON value using dot notation path +/// +/// # Arguments +/// * `json` - The JSON value to modify +/// * `path` - Dot-separated path to the field (e.g., "price_message.price") +/// * `value` - The new value to set +/// +/// # Returns +/// Result indicating success or error +fn apply_override_to_json( + json: &mut serde_json::Value, + path: &str, + value: &serde_json::Value, +) -> SurfpoolResult<()> { + let parts: Vec<&str> = path.split('.').collect(); + + if parts.is_empty() { + return Err(SurfpoolError::internal("Empty path provided for override")); + } + + // Navigate to the parent of the target field + let mut current = json; + for part in &parts[..parts.len() - 1] { + current = current.get_mut(part).ok_or_else(|| { + SurfpoolError::internal(format!("Path segment '{}' not found in JSON", part)) + })?; + } + + // Set the final field + let final_key = parts[parts.len() - 1]; + match current { + serde_json::Value::Object(map) => { + map.insert(final_key.to_string(), value.clone()); + Ok(()) + } + _ => Err(SurfpoolError::internal(format!( + "Cannot set field '{}' - parent is not an object", + final_key + ))), + } +} + pub struct SurfnetSvmLocker(pub Arc>); impl Clone for SurfnetSvmLocker { @@ -351,6 +394,10 @@ impl SurfnetSvmLocker { local_results, )); } + debug!( + "Missing accounts will be fetched: {}", + missing_accounts.iter().join(", ") + ); // Fetch missing accounts from remote let remote_results = client @@ -377,7 +424,10 @@ impl SurfnetSvmLocker { .cloned() .unwrap_or(GetAccountResult::None(*pubkey)) } - found => found, // Keep found accounts (no clone, just move) + found => { + debug!("Keeping local account: {}", pubkey); + found + } // Keep found accounts (no clone, just move) } }) .collect(); @@ -862,6 +912,7 @@ impl SurfnetSvmLocker { ) -> SurfpoolResult { let signature = transaction.signatures[0]; + // Can we avoid this write? let latest_absolute_slot = self.with_svm_writer(|svm_writer| { let latest_absolute_slot = svm_writer.get_latest_absolute_slot(); svm_writer.notify_signature_subscribers( @@ -890,12 +941,18 @@ impl SurfnetSvmLocker { .map(|l| l.all_loaded_addresses()), ) .clone(); + debug!( + "Transaction {} accounts inputs: {}", + transaction.get_signature(), + transaction_accounts.iter().join(", ") + ); let account_updates = self .get_multiple_accounts(remote_ctx, &transaction_accounts, None) .await? .inner; + // I don't think this code is required // We also need the pubkeys of the ALTs to be pulled from the remote, so we'll do a fetch for them let alt_account_updates = self .get_multiple_accounts( @@ -1048,137 +1105,129 @@ impl SurfnetSvmLocker { if ix_count == 0 { return Ok(None); } - // Extract account categories from original transaction - let mutable_loaded_addresses = loaded_addresses - .as_ref() - .map(|l| l.writable_len()) - .unwrap_or(0); - let readonly_loaded_addresses = loaded_addresses - .as_ref() - .map(|l| l.readonly_len()) - .unwrap_or(0); - let loaded_address_count = mutable_loaded_addresses + readonly_loaded_addresses; - let last_signer_index = transaction.message.header().num_required_signatures as usize; - let last_mutable_signer_index = - last_signer_index - transaction.message.header().num_readonly_signed_accounts as usize; - let last_mutable_non_signer_index = transaction_accounts.len() - - transaction.message.header().num_readonly_unsigned_accounts as usize - - loaded_address_count; - let last_readonly_non_signer_index = transaction_accounts.len() - loaded_address_count; - let last_mutable_loaded_index = transaction_accounts.len() - readonly_loaded_addresses; - - let mutable_signers = &transaction_accounts[0..last_mutable_signer_index]; - let readonly_signers = &transaction_accounts[last_mutable_signer_index..last_signer_index]; - let mutable_non_signers = - &transaction_accounts[last_signer_index..last_mutable_non_signer_index]; - let readonly_non_signers = - &transaction_accounts[last_mutable_non_signer_index..last_readonly_non_signer_index]; - let mutable_loaded = - &transaction_accounts[last_readonly_non_signer_index..last_mutable_loaded_index]; - let readonly_loaded = &transaction_accounts[last_mutable_loaded_index..]; let mut ix_profile_results: Vec = vec![]; for idx in 1..=ix_count { - if let Some((partial_tx, all_required_accounts_for_last_ix)) = self - .create_partial_transaction( - instructions, - transaction_accounts, - mutable_signers, - readonly_signers, - mutable_non_signers, - readonly_non_signers, - mutable_loaded, - readonly_loaded, - transaction, - idx, - loaded_addresses, - ) - { - let (mut previous_execution_capture, previous_cus, previous_log_count) = { - let mut previous_execution_captures = ExecutionCapture::new(); - let mut previous_cus = 0; - let mut previous_log_count = 0; - for result in ix_profile_results.iter() { - previous_execution_captures.extend(result.post_execution_capture.clone()); - previous_cus += result.compute_units_consumed; - previous_log_count += - result.log_messages.as_ref().map(|m| m.len()).unwrap_or(0); - } - ( - previous_execution_captures, - previous_cus, - previous_log_count, - ) - }; + let partial_transaction_res = self.create_partial_transaction( + instructions, + transaction_accounts, + transaction, + idx, + loaded_addresses, + ); - let skip_preflight = true; - let sigverify = false; - let do_propagate = false; + let mut ix_required_accounts = IndexSet::new(); + for &account_idx in &instructions[idx - 1].accounts { + ix_required_accounts.insert(transaction_accounts[account_idx as usize]); + } + ix_required_accounts + .insert(transaction_accounts[instructions[idx - 1].program_id_index as usize]); - let pre_execution_capture = { - let mut capture = pre_execution_capture.clone(); + let Some(partial_tx) = partial_transaction_res else { + debug!("Unable to create partial transaction"); + return Ok(None); + }; - // If a pre-execution capture was provided, any pubkeys that are in the capture - // that we just took should be replaced with those from the pre-execution capture. - let capture_keys: Vec<_> = pre_execution_capture.keys().cloned().collect(); - for pubkey in capture_keys.into_iter() { - if let Some(pre_account) = previous_execution_capture.remove(&pubkey) { - // Replace the account with the pre-execution one - capture.insert(pubkey, pre_account); - } - } - capture - }; + let mut previous_execution_captures = ExecutionCapture::new(); + let mut previous_cus = 0; + let mut previous_log_count = 0; + for result in ix_profile_results.iter() { + previous_execution_captures.extend(result.post_execution_capture.clone()); + previous_cus += result.compute_units_consumed; + previous_log_count += result.log_messages.as_ref().map(|m| m.len()).unwrap_or(0); + } - let mut profile_result = { - let mut svm_clone = self.with_svm_reader(|svm_reader| svm_reader.clone()); - - let (dummy_simnet_tx, _) = crossbeam_channel::bounded(1); - let (dummy_geyser_tx, _) = crossbeam_channel::bounded(1); - svm_clone.simnet_events_tx = dummy_simnet_tx; - svm_clone.geyser_events_tx = dummy_geyser_tx; - - let svm_locker = SurfnetSvmLocker::new(svm_clone); - svm_locker - .process_transaction_internal( - partial_tx, - skip_preflight, - sigverify, - transaction_accounts, - &loaded_addresses.as_ref().map(|l| l.loaded_addresses()), - accounts_before, - token_accounts_before, - token_programs, - pre_execution_capture, - status_tx, - do_propagate, - ) - .await? - }; + let skip_preflight = true; + let sigverify = false; + let do_propagate = false; + + let mut pre_execution_capture_cursor = pre_execution_capture.clone(); + // If a pre-execution capture was provided, any pubkeys that are in the capture + // that we just took should be replaced with those from the pre-execution capture. + let capture_keys: Vec<_> = pre_execution_capture_cursor.keys().cloned().collect(); + for pubkey in capture_keys.into_iter() { + if let Some(pre_account) = previous_execution_captures.remove(&pubkey) { + // Replace the account with the pre-execution one + pre_execution_capture_cursor.insert(pubkey, pre_account); + } + } + let mut svm_clone = self.with_svm_reader(|svm_reader| svm_reader.clone()); + + let (dummy_simnet_tx, _) = crossbeam_channel::bounded(1); + let (dummy_geyser_tx, _) = crossbeam_channel::bounded(1); + svm_clone.simnet_events_tx = dummy_simnet_tx; + svm_clone.geyser_events_tx = dummy_geyser_tx; + + let svm_locker = SurfnetSvmLocker::new(svm_clone); + let mut profile_result = svm_locker + .process_transaction_internal( + partial_tx, + skip_preflight, + sigverify, + transaction_accounts, + &loaded_addresses.as_ref().map(|l| l.loaded_addresses()), + accounts_before, + token_accounts_before, + token_programs, + pre_execution_capture_cursor, + status_tx, + do_propagate, + ) + .await?; + eprintln!( + "DEBUG: pre_execution_capture keys = {:?}", profile_result .pre_execution_capture - .retain(|pubkey, _| all_required_accounts_for_last_ix.contains(pubkey)); + .keys() + .map(|p| p.to_string()) + .collect::>() + ); + eprintln!( + "DEBUG: post_execution_capture keys = {:?}", profile_result .post_execution_capture - .retain(|pubkey, _| all_required_accounts_for_last_ix.contains(pubkey)); - - profile_result.compute_units_consumed = profile_result - .compute_units_consumed - .saturating_sub(previous_cus); - profile_result.log_messages = profile_result.log_messages.map(|logs| { - logs.into_iter() - .skip(previous_log_count) - .collect::>() - }); + .keys() + .map(|p| p.to_string()) + .collect::>() + ); - ix_profile_results.push(profile_result); - } else { - return Ok(None); - // panic!("No partial transaction created for instruction {}", idx); - } + profile_result + .pre_execution_capture + .retain(|pubkey, _| ix_required_accounts.contains(pubkey)); + profile_result + .post_execution_capture + .retain(|pubkey, _| ix_required_accounts.contains(pubkey)); + + eprintln!( + "DEBUG: After retain - pre keys = {:?}", + profile_result + .pre_execution_capture + .keys() + .map(|p| p.to_string()) + .collect::>() + ); + eprintln!( + "DEBUG: After retain - post keys = {:?}", + profile_result + .post_execution_capture + .keys() + .map(|p| p.to_string()) + .collect::>() + ); + + profile_result.compute_units_consumed = profile_result + .compute_units_consumed + .saturating_sub(previous_cus); + profile_result.log_messages = profile_result.log_messages.map(|logs| { + logs.into_iter() + .skip(previous_log_count) + .collect::>() + }); + + ix_profile_results.push(profile_result); } Ok(Some(ix_profile_results)) @@ -1617,7 +1666,10 @@ impl SurfnetSvmLocker { match self.with_svm_writer(|svm_writer| { svm_writer .send_transaction(transaction, false /* cu_analysis_enabled */, sigverify) - .map_err(ProcessTransactionResult::ExecutionFailure) + .map_err(|e| { + debug!("Transaction execution failure: {:?}", e.meta); + ProcessTransactionResult::ExecutionFailure(e) + }) .map(ProcessTransactionResult::Success) }) { Ok(res) => res, @@ -1702,6 +1754,10 @@ impl SurfnetSvmLocker { }); Ok(()) } + + pub fn get_streamed_accounts(&self) -> HashMap { + self.with_svm_reader(|svm_reader| svm_reader.streamed_accounts.clone()) + } } /// Token account related functions @@ -2023,7 +2079,6 @@ impl SurfnetSvmLocker { VersionedMessage::V0(message) => { let mut acc_keys = message.account_keys.clone(); - // acc_keys.append(&mut alt_pubkeys); if let Some(loaded_addresses) = all_transaction_lookup_table_addresses { acc_keys.extend(loaded_addresses); } @@ -2041,13 +2096,12 @@ impl SurfnetSvmLocker { match message { VersionedMessage::Legacy(_) => Ok(None), VersionedMessage::V0(message) => { - let alts = message.address_table_lookups.clone(); - if alts.is_empty() { + if message.address_table_lookups.is_empty() { return Ok(None); } let mut loaded = TransactionLoadedAddresses::new(); - for alt in alts { - self.get_lookup_table_addresses(remote_ctx, &alt, &mut loaded) + for alt in message.address_table_lookups.iter() { + self.get_lookup_table_addresses(remote_ctx, alt, &mut loaded) .await?; } @@ -2177,249 +2231,45 @@ impl SurfnetSvmLocker { &self, instructions: &[CompiledInstruction], message_accounts: &[Pubkey], - mutable_signers: &[Pubkey], - readonly_signers: &[Pubkey], - mutable_non_signers: &[Pubkey], - readonly_non_signers: &[Pubkey], - mutable_loaded_addresses: &[Pubkey], - readonly_loaded_addresses: &[Pubkey], transaction: &VersionedTransaction, idx: usize, loaded_addresses: &Option, - ) -> Option<(VersionedTransaction, IndexSet)> { + ) -> Option { + // Keep the full account map from the original transaction for every partial pass. + // This simplifies remapping: we only keep the first `idx` instructions, but retain + // the original `message_accounts` ordering and address table lookups. let ixs_for_tx = instructions[0..idx].to_vec(); - // Collect all required accounts for the partial transaction - let mut all_required_accounts = IndexSet::new(); - for ix in &ixs_for_tx { - // Add instruction accounts - for &account_idx in &ix.accounts { - all_required_accounts.insert(message_accounts[account_idx as usize]); - } - // If we still don't have any accounts, it means our instruction doesn't have any accounts. - // Some instructions don't need to read/write any accounts, so the account list is empty. - // However, we're still using an account to sign, so we add that here. - { - if all_required_accounts.is_empty() { - let mut mutable_signers = - mutable_signers.iter().cloned().collect::>(); - all_required_accounts.append(&mut mutable_signers); - } - if all_required_accounts.is_empty() { - let mut readonly_signers = - readonly_signers.iter().cloned().collect::>(); - all_required_accounts.append(&mut readonly_signers); - } - } - // Add program ID - all_required_accounts.insert(message_accounts[ix.program_id_index as usize]); - } - - // Profiling our partial transaction is really about knowing the impacts of the _last_ - // instruction, so we want to have a separate list of all of the accounts that are - // used in the last instruction. - let mut all_required_accounts_for_last_ix = IndexSet::new(); - let last = ixs_for_tx.last().unwrap(); - for &account_idx in &last.accounts { - all_required_accounts_for_last_ix.insert(message_accounts[account_idx as usize]); - } - { - if all_required_accounts_for_last_ix.is_empty() { - let mut mutable_signers = mutable_signers.iter().cloned().collect::>(); - all_required_accounts_for_last_ix.append(&mut mutable_signers); - } - if all_required_accounts_for_last_ix.is_empty() { - let mut readonly_signers = - readonly_signers.iter().cloned().collect::>(); - all_required_accounts_for_last_ix.append(&mut readonly_signers); - } - } - all_required_accounts_for_last_ix.insert(message_accounts[last.program_id_index as usize]); - - // Categorize accounts based on their original positions - let mut new_mutable_signers = HashSet::new(); - let mut new_readonly_signers = HashSet::new(); - let mut new_mutable_non_signers = HashSet::new(); - let mut new_readonly_non_signers = HashSet::new(); - let mut new_mutable_loaded = HashSet::new(); - let mut new_readonly_loaded = HashSet::new(); - - for &account in &all_required_accounts { - if let Some(idx) = message_accounts.iter().position(|pk| pk == &account) { - match idx { - i if i < mutable_signers.len() => { - new_mutable_signers.insert(account); - } - i if i < mutable_signers.len() + readonly_signers.len() => { - new_readonly_signers.insert(account); - } - i if i < mutable_signers.len() - + readonly_signers.len() - + mutable_non_signers.len() => - { - new_mutable_non_signers.insert(account); - } - i if i < mutable_signers.len() - + readonly_signers.len() - + mutable_non_signers.len() - + readonly_non_signers.len() => - { - new_readonly_non_signers.insert(account); - } - i if i < mutable_signers.len() - + readonly_signers.len() - + mutable_non_signers.len() - + readonly_non_signers.len() - + mutable_loaded_addresses.len() => - { - new_mutable_loaded.insert(account); - } - i if i < mutable_signers.len() - + readonly_signers.len() - + mutable_non_signers.len() - + readonly_non_signers.len() - + mutable_loaded_addresses.len() - + readonly_loaded_addresses.len() => - { - new_readonly_loaded.insert(account); - } - _ => {} - }; - } - } - - // Build account keys in correct order: signers first, then non-signers - let mut new_account_keys = Vec::new(); - for &account in &all_required_accounts { - if new_mutable_signers.contains(&account) { - new_account_keys.push(account); - } - } - for &account in &all_required_accounts { - if new_readonly_signers.contains(&account) { - new_account_keys.push(account); - } - } - for &account in &all_required_accounts { - if new_mutable_non_signers.contains(&account) { - new_account_keys.push(account); - } - } - for &account in &all_required_accounts { - if new_readonly_non_signers.contains(&account) { - new_account_keys.push(account); - } - } - - // Create account index mapping - let mut account_index_mapping = HashMap::new(); - { - for (new_idx, pubkey) in new_account_keys.iter().enumerate() { - if let Some(old_idx) = message_accounts.iter().position(|pk| pk == pubkey) { - account_index_mapping.insert(old_idx, new_idx); - } - } - - // Accounts loaded from an ALT shouldn't be in the overall message accounts, but their - // indices should be remapped correctly. - let non_loaded_address_len = mutable_signers.len() - + readonly_signers.len() - + mutable_non_signers.len() - + readonly_non_signers.len(); - for pubkey in new_mutable_loaded.iter() { - if let Some(old_idx) = message_accounts.iter().position(|pk| pk == pubkey) { - let placement = old_idx - non_loaded_address_len; - account_index_mapping.insert(old_idx, new_account_keys.len() + placement); - } - } - - for pubkey in new_readonly_loaded.iter() { - if let Some(old_idx) = message_accounts.iter().position(|pk| pk == pubkey) { - let placement = - old_idx - non_loaded_address_len - mutable_loaded_addresses.len(); - account_index_mapping.insert( - old_idx, - new_account_keys.len() + new_mutable_loaded.len() + placement, - ); - } - } - } - - // Remap instructions - let mut remapped_instructions = Vec::new(); - for ix in ixs_for_tx { - let mut remapped_accounts = Vec::new(); - for &account_idx in &ix.accounts { - if let Some(&new_idx) = account_index_mapping.get(&(account_idx as usize)) { - remapped_accounts.push(new_idx as u8); - } else { - continue; // Skip instructions with unmappable accounts, this should be an unreachable path - } - } - - if remapped_accounts.len() == ix.accounts.len() { - let new_program_id_idx = account_index_mapping - .get(&(ix.program_id_index as usize)) - .copied() - .unwrap_or(0) as u8; - - remapped_instructions.push(CompiledInstruction { - program_id_index: new_program_id_idx, - accounts: remapped_accounts, - data: ix.data.clone(), - }); - } - } - - if remapped_instructions.is_empty() { - // panic!("No valid instructions after remapping, skipping partial transaction creation."); - return None; - } - - // Create new message - let num_required_signatures = new_mutable_signers.len() + new_readonly_signers.len(); - let num_readonly_signed_accounts = new_readonly_signers.len(); - let num_readonly_unsigned_accounts = new_readonly_non_signers.len(); - - let new_message = match transaction.version() { - TransactionVersion::Legacy(_) => VersionedMessage::Legacy(Message { - header: MessageHeader { - num_required_signatures: num_required_signatures as u8, - num_readonly_signed_accounts: num_readonly_signed_accounts as u8, - num_readonly_unsigned_accounts: num_readonly_unsigned_accounts as u8, - }, - account_keys: new_account_keys, + // Build a new message that keeps the original account map and address table lookups, + // but only contains the first `idx` instructions. + let new_message = match transaction.message { + VersionedMessage::Legacy(ref message) => VersionedMessage::Legacy(Message { + account_keys: message_accounts[..message.account_keys.len()].to_vec(), + header: message.header, recent_blockhash: *transaction.message.recent_blockhash(), - instructions: remapped_instructions, - }), - TransactionVersion::Number(_) => VersionedMessage::V0(solana_message::v0::Message { - header: MessageHeader { - num_required_signatures: num_required_signatures as u8, - num_readonly_signed_accounts: num_readonly_signed_accounts as u8, - num_readonly_unsigned_accounts: num_readonly_unsigned_accounts as u8, - }, - account_keys: new_account_keys, - recent_blockhash: *transaction.message.recent_blockhash(), - instructions: remapped_instructions, - address_table_lookups: loaded_addresses - .as_ref() - .map(|l| { - l.filter_from_members(&new_mutable_loaded, &new_readonly_loaded) - .to_address_table_lookups() - }) - .unwrap_or_default(), + instructions: ixs_for_tx.clone(), }), + VersionedMessage::V0(ref message) => { + VersionedMessage::V0(solana_message::v0::Message { + account_keys: message_accounts[..message.account_keys.len()].to_vec(), + header: message.header, + recent_blockhash: *transaction.message.recent_blockhash(), + instructions: ixs_for_tx.clone(), + // Preserve the original address table lookups when available. + address_table_lookups: loaded_addresses + .as_ref() + .map(|l| l.to_address_table_lookups()) + .unwrap_or_default(), + }) + } }; - // Create partial transaction with appropriate signatures - let signatures_to_use = transaction.signatures[0..num_required_signatures].to_vec(); - let tx = VersionedTransaction { - signatures: signatures_to_use, + signatures: transaction.signatures.clone(), message: new_message, }; - Some((tx, all_required_accounts_for_last_ix)) + Some(tx) } /// Returns the profile result for a given signature or UUID, and whether it exists in the SVM. @@ -2486,6 +2336,81 @@ impl SurfnetSvmLocker { }) }) } + + /// Forges account data by decoding with IDL, applying overrides, and re-encoding. + /// + /// # Arguments + /// * `account_pubkey` - The public key of the account (used for error messages) + /// * `account_data` - The raw account data bytes + /// * `idl` - The IDL for decoding/encoding the account data + /// * `overrides` - HashMap of field paths (dot notation) to values to override + /// + /// # Returns + /// The modified account data bytes with discriminator + pub fn get_forged_account_data( + &self, + account_pubkey: &Pubkey, + account_data: &[u8], + idl: &Idl, + overrides: &HashMap, + ) -> SurfpoolResult> { + // Step 1: Validate account data size + if account_data.len() < 8 { + return Err(SurfpoolError::invalid_account_data( + account_pubkey, + "Account data too small to be an Anchor account (need at least 8 bytes for discriminator)", + Some("Data length too small"), + )); + } + + // Step 3: Split discriminator and data + let discriminator = &account_data[..8]; + let serialized_data = &account_data[8..]; + + // Step 4: Find the account type using the discriminator + let _account_def = idl + .accounts + .iter() + .find(|acc| acc.discriminator.eq(discriminator)) + .ok_or_else(|| { + SurfpoolError::internal(format!( + "Account with discriminator '{:?}' not found in IDL", + discriminator + )) + })?; + + // Step 5: Deserialize the account data to JSON + // For now, we'll use a simple approach: deserialize as raw JSON + let mut account_json: serde_json::Value = serde_json::from_slice(serialized_data) + .map_err(|e| { + SurfpoolError::deserialize_error( + "account data", + format!( + "Failed to deserialize account data as JSON: {}. \ + Note: This is a simplified implementation that expects JSON-serialized data. \ + For Anchor accounts, proper Borsh deserialization should be implemented.", + e + ) + ) + })?; + + // Step 6: Apply overrides using dot notation + for (path, value) in overrides { + apply_override_to_json(&mut account_json, path, value)?; + } + + // Step 7: Re-serialize the modified data + let modified_data = serde_json::to_vec(&account_json).map_err(|e| { + SurfpoolError::internal(format!("Failed to serialize modified account data: {}", e)) + })?; + + // Step 8: Reconstruct the account data with discriminator + let mut new_account_data = Vec::with_capacity(8 + modified_data.len()); + new_account_data.extend_from_slice(discriminator); + new_account_data.extend_from_slice(&modified_data); + + Ok(new_account_data) + } } /// Program account related functions impl SurfnetSvmLocker { @@ -3126,3 +3051,522 @@ pub fn format_ui_amount(amount: u64, decimals: u8) -> f64 { amount as f64 } } + +#[cfg(test)] +mod tests { + use crate::scenarios::registry::PYTH_V2_IDL_CONTENT; + use crate::surfnet::SurfnetSvm; + + use super::*; + use solana_account::Account; + use solana_account_decoder::UiAccountEncoding; + use std::collections::HashMap; + + #[test] + fn test_get_forged_account_data_with_pyth_fixture() { + use borsh::{BorshDeserialize, BorshSerialize}; + + // Define local structures matching Pyth IDL + #[derive(BorshSerialize, BorshDeserialize, Debug, Clone, PartialEq)] + pub enum VerificationLevel { + Partial { num_signatures: u8 }, + Full, + } + + #[derive(BorshSerialize, BorshDeserialize, Debug, Clone, PartialEq)] + pub struct PriceFeedMessage { + pub feed_id: [u8; 32], + pub price: i64, + pub conf: u64, + pub exponent: i32, + pub publish_time: i64, + pub prev_publish_time: i64, + pub ema_price: i64, + pub ema_conf: u64, + } + + #[derive(BorshSerialize, BorshDeserialize, Debug, Clone, PartialEq)] + pub struct PriceUpdateV2 { + pub write_authority: Pubkey, + pub verification_level: VerificationLevel, + pub price_message: PriceFeedMessage, + pub posted_slot: u64, + } + + // Pyth price feed account data fixture + let account_data_hex = vec![ + 0x22, 0xf1, 0x23, 0x63, 0x9d, 0x7e, 0xf4, 0xcd, // Discriminator + 0x35, 0xa7, 0x0c, 0x11, 0x16, 0x2f, 0xbf, 0x5a, 0x0e, 0x7f, 0x7d, 0x2f, 0x96, 0xe1, + 0x9f, 0x97, 0xb0, 0x22, 0x46, 0xa1, 0x56, 0x87, 0xee, 0x67, 0x27, 0x94, 0x89, 0x74, + 0x48, 0xe6, 0x58, 0xde, 0x01, 0xe6, 0x2d, 0xf6, 0xc8, 0xb4, 0xa8, 0x5f, 0xe1, 0xa6, + 0x7d, 0xb4, 0x4d, 0xc1, 0x2d, 0xe5, 0xdb, 0x33, 0x0f, 0x7a, 0xc6, 0x6b, 0x72, 0xdc, + 0x65, 0x8a, 0xfe, 0xdf, 0x0f, 0x4a, 0x41, 0x5b, 0x43, 0xd7, 0x1f, 0x18, 0x64, 0x5f, + 0x0a, 0x00, 0x00, 0x96, 0x67, 0xea, 0xc5, 0x00, 0x00, 0x00, 0x00, 0xf8, 0xff, 0xff, + 0xff, 0x5f, 0x2b, 0x00, 0x69, 0x00, 0x00, 0x00, 0x00, 0x5e, 0x2b, 0x00, 0x69, 0x00, + 0x00, 0x00, 0x00, 0xa0, 0x7c, 0x1a, 0x38, 0x63, 0x0a, 0x00, 0x00, 0x94, 0xa6, 0xb9, + 0xb5, 0x00, 0x00, 0x00, 0x00, 0x8c, 0x5e, 0x6d, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, + ]; + + // Create a minimal Pyth IDL for testing + let idl: Idl = serde_json::from_str(PYTH_V2_IDL_CONTENT).expect("Failed to load IDL"); + + // Create overrides - note: this won't actually work with the JSON deserialization + // since the account data is Borsh-encoded, but we're testing the structure + let mut overrides: HashMap = HashMap::new(); + + // Verify IDL has matching discriminator + let account_def = idl + .accounts + .iter() + .find(|acc| acc.discriminator.eq(&account_data_hex[..8])); + + assert!( + account_def.is_some(), + "Should find PriceUpdateV2 account by discriminator" + ); + assert_eq!(account_def.unwrap().name, "PriceUpdateV2"); + + // Step 1: Instantiate an offline Svm instance + let (surfnet_svm, _simnet_events_rx, _geyser_events_rx) = SurfnetSvm::new(); + let svm_locker = SurfnetSvmLocker::new(surfnet_svm); + + // Step 2: Register the IDL for this account + let account_pubkey = Pubkey::from_str_const("rec5EKMGg6MxZYaMdyBfgwp4d5rB9T1VQH5pJv5LtFJ"); + svm_locker.register_idl(idl.clone(), None); + + // Step 3: Create an account with the Pyth data + let pyth_account = Account { + lamports: 1_000_000, + data: account_data_hex.clone(), + owner: account_pubkey, + executable: false, + rent_epoch: 0, + }; + + // Step 4: Use encode_ui_account to decode/encode the account data + let ui_account = svm_locker.encode_ui_account( + &account_pubkey, + &pyth_account, + UiAccountEncoding::JsonParsed, + None, + None, // data_slice + ); + + // Step 5: Verify the UI account has parsed data + println!("UI Account lamports: {}", ui_account.lamports); + println!("UI Account owner: {}", ui_account.owner); + + // Assert on parsed account data + use solana_account_decoder::UiAccountData; + match &ui_account.data { + UiAccountData::Json(parsed_account) => { + let parsed_obj = &parsed_account.parsed; + + // Extract price_message object + let price_message = parsed_obj + .get("price_message") + .expect("Should have price_message field") + .as_object() + .expect("price_message should be an object"); + + // Assert on price + let price = price_message + .get("price") + .expect("Should have price field") + .as_i64() + .expect("price should be a number"); + assert_eq!(price, 11404817473495, "Price should match expected value"); + + // Assert on exponent + let exponent = price_message + .get("exponent") + .expect("Should have exponent field") + .as_i64() + .expect("exponent should be a number"); + assert_eq!(exponent, -8, "Exponent should be -8"); + + // Assert on ema_price + let ema_price = price_message + .get("ema_price") + .expect("Should have ema_price field") + .as_i64() + .expect("ema_price should be a number"); + assert_eq!( + ema_price, 11421259300000, + "EMA price should match expected value" + ); + + // Assert on publish_time + let publish_time = price_message + .get("publish_time") + .expect("Should have publish_time field") + .as_i64() + .expect("publish_time should be a number"); + assert_eq!( + publish_time, 1761618783, + "Publish time should match expected value" + ); + + println!("✓ All price assertions passed!"); + } + _ => panic!("Expected JSON parsed account data"), + } + + // Step 6: Test get_forged_account_data without overrides (should return same data) + println!("\n--- Testing get_forged_account_data without overrides ---"); + let forged_data_no_overrides = svm_locker.get_forged_account_data( + &account_pubkey, + &account_data_hex, + &idl, + &overrides, + ); + + match forged_data_no_overrides { + Ok(data) => { + // If it succeeds, verify the data is unchanged + assert_eq!( + data, account_data_hex, + "Data without overrides should match original" + ); + println!("✓ Forged data without overrides matches original!"); + } + Err(e) => { + // If it fails, it's due to Borsh/JSON mismatch (expected for now) + println!("Expected error (Borsh vs JSON): {:?}", e); + println!("Note: This documents the need for proper Borsh implementation"); + } + } + + // Step 7: Test get_forged_account_data with overrides + println!("\n--- Testing get_forged_account_data with overrides ---"); + + // Set new values for price and publish_time + let new_price = 999999999999i64; + let new_publish_time = 1234567890i64; + let new_ema_price = 888888888888i64; + + overrides.insert("price_message.price".into(), json!(new_price)); + overrides.insert("price_message.publish_time".into(), json!(new_publish_time)); + overrides.insert("price_message.ema_price".into(), json!(new_ema_price)); + + let forged_data_with_overrides = svm_locker.get_forged_account_data( + &account_pubkey, + &account_data_hex, + &idl, + &overrides, + ); + + match forged_data_with_overrides { + Ok(modified_data) => { + // Verify the data is different from original + assert_ne!( + modified_data, account_data_hex, + "Modified data should be different from original" + ); + println!("✓ Modified data is different from original!"); + + // Create a modified account to verify the changes + let modified_account = Account { + lamports: 1_000_000, + data: modified_data.clone(), + owner: account_pubkey, + executable: false, + rent_epoch: 0, + }; + + // Re-encode the modified account to verify the changes + let modified_ui_account = svm_locker.encode_ui_account( + &account_pubkey, + &modified_account, + UiAccountEncoding::JsonParsed, + None, + None, + ); + + // Verify the modified values in the re-encoded account + match &modified_ui_account.data { + UiAccountData::Json(parsed_account) => { + let parsed_obj = &parsed_account.parsed; + let price_message = parsed_obj + .get("price_message") + .expect("Should have price_message field") + .as_object() + .expect("price_message should be an object"); + + // Verify new price + let modified_price = price_message + .get("price") + .expect("Should have price field") + .as_i64() + .expect("price should be a number"); + assert_eq!( + modified_price, new_price, + "Modified price should match override value" + ); + + // Verify new publish_time + let modified_publish_time = price_message + .get("publish_time") + .expect("Should have publish_time field") + .as_i64() + .expect("publish_time should be a number"); + assert_eq!( + modified_publish_time, new_publish_time, + "Modified publish_time should match override value" + ); + + // Verify new ema_price + let modified_ema_price = price_message + .get("ema_price") + .expect("Should have ema_price field") + .as_i64() + .expect("ema_price should be a number"); + assert_eq!( + modified_ema_price, new_ema_price, + "Modified ema_price should match override value" + ); + + // Verify exponent is unchanged + let exponent = price_message + .get("exponent") + .expect("Should have exponent field") + .as_i64() + .expect("exponent should be a number"); + assert_eq!(exponent, -8, "Exponent should remain unchanged"); + + println!("✓ All override assertions passed!"); + println!(" - Price changed: 11404817473495 → {}", new_price); + println!( + " - Publish time changed: 1761618783 → {}", + new_publish_time + ); + println!(" - EMA price changed: 11421259300000 → {}", new_ema_price); + println!(" - Exponent unchanged: -8"); + } + _ => panic!("Expected JSON parsed account data for modified account"), + } + } + Err(e) => { + // If it fails, it's due to Borsh/JSON mismatch (expected for now) + println!("Expected error (Borsh vs JSON): {:?}", e); + println!("Note: Once Borsh serialization is implemented, this test will:"); + println!(" 1. Successfully modify the account data"); + println!(" 2. Verify price changed to: {}", new_price); + println!(" 3. Verify publish_time changed to: {}", new_publish_time); + println!(" 4. Verify ema_price changed to: {}", new_ema_price); + println!(" 5. Verify other fields remain unchanged"); + } + } + + // Step 8: Demonstrate proper Borsh deserialization/serialization + println!("\n--- Step 8: Testing with Borsh structures ---"); + + // Deserialize the original account data using Borsh + let account_bytes = &account_data_hex[8..]; + println!( + "Account data length (without discriminator): {} bytes", + account_bytes.len() + ); + + let mut reader = std::io::Cursor::new(account_bytes); + let original_price_update = PriceUpdateV2::deserialize_reader(&mut reader) + .expect("Should deserialize Pyth account data with Borsh"); + + let bytes_read = reader.position() as usize; + println!("Bytes read by Borsh: {}", bytes_read); + if bytes_read < account_bytes.len() { + println!( + "Note: {} extra bytes at end (likely padding)", + account_bytes.len() - bytes_read + ); + } + + println!("Original Borsh-deserialized data:"); + println!(" - Price: {}", original_price_update.price_message.price); + println!( + " - Exponent: {}", + original_price_update.price_message.exponent + ); + println!( + " - EMA Price: {}", + original_price_update.price_message.ema_price + ); + println!( + " - Publish time: {}", + original_price_update.price_message.publish_time + ); + + // Assert original values match what we saw in JSON parsing + assert_eq!( + original_price_update.price_message.price, 11404817473495, + "Borsh price should match JSON parsed value" + ); + assert_eq!( + original_price_update.price_message.exponent, -8, + "Borsh exponent should match JSON parsed value" + ); + assert_eq!( + original_price_update.price_message.ema_price, 11421259300000, + "Borsh ema_price should match JSON parsed value" + ); + assert_eq!( + original_price_update.price_message.publish_time, 1761618783, + "Borsh publish_time should match JSON parsed value" + ); + + println!("✓ Borsh deserialization matches JSON parsing!"); + + // Step 9: Modify and re-serialize with Borsh + println!("\n--- Step 9: Modifying account data with Borsh ---"); + + let mut modified_price_update = original_price_update.clone(); + modified_price_update.price_message.price = new_price; + modified_price_update.price_message.publish_time = new_publish_time; + modified_price_update.price_message.ema_price = new_ema_price; + + // Serialize back to bytes + let modified_account_data = + borsh::to_vec(&modified_price_update).expect("Should serialize modified data"); + + // Prepend the discriminator + let mut full_modified_data = account_data_hex[..8].to_vec(); + full_modified_data.extend_from_slice(&modified_account_data); + + println!("Modified Borsh-serialized data:"); + println!( + " - Price: {} → {}", + original_price_update.price_message.price, new_price + ); + println!( + " - Publish time: {} → {}", + original_price_update.price_message.publish_time, new_publish_time + ); + println!( + " - EMA Price: {} → {}", + original_price_update.price_message.ema_price, new_ema_price + ); + println!( + " - Exponent: {} (unchanged)", + modified_price_update.price_message.exponent + ); + + // Verify the modified data is different + assert_ne!( + full_modified_data, account_data_hex, + "Modified data should differ from original" + ); + + // Verify we can deserialize the modified data back + let mut modified_reader = std::io::Cursor::new(&full_modified_data[8..]); + let reloaded_price_update = PriceUpdateV2::deserialize_reader(&mut modified_reader) + .expect("Should deserialize modified data"); + + assert_eq!( + reloaded_price_update.price_message.price, new_price, + "Reloaded price should match modified value" + ); + assert_eq!( + reloaded_price_update.price_message.publish_time, new_publish_time, + "Reloaded publish_time should match modified value" + ); + assert_eq!( + reloaded_price_update.price_message.ema_price, new_ema_price, + "Reloaded ema_price should match modified value" + ); + assert_eq!( + reloaded_price_update.price_message.exponent, + original_price_update.price_message.exponent, + "Exponent should remain unchanged" + ); + + println!("✓ Borsh round-trip successful!"); + + // Step 10: Verify with encode_ui_account + println!("\n--- Step 10: Verify modified data with encode_ui_account ---"); + + let modified_test_account = Account { + lamports: 1_000_000, + data: full_modified_data, + owner: account_pubkey, + executable: false, + rent_epoch: 0, + }; + + let modified_ui_account = svm_locker.encode_ui_account( + &account_pubkey, + &modified_test_account, + UiAccountEncoding::JsonParsed, + None, + None, + ); + + // Verify through JSON encoding as well + match &modified_ui_account.data { + UiAccountData::Json(parsed_account) => { + let parsed_obj = &parsed_account.parsed; + let price_message = parsed_obj + .get("price_message") + .expect("Should have price_message") + .as_object() + .expect("Should be object"); + + let final_price = price_message + .get("price") + .expect("Should have price") + .as_i64() + .expect("Should be i64"); + let final_publish_time = price_message + .get("publish_time") + .expect("Should have publish_time") + .as_i64() + .expect("Should be i64"); + let final_ema_price = price_message + .get("ema_price") + .expect("Should have ema_price") + .as_i64() + .expect("Should be i64"); + + assert_eq!( + final_price, new_price, + "JSON-parsed price should match Borsh value" + ); + assert_eq!( + final_publish_time, new_publish_time, + "JSON-parsed publish_time should match Borsh value" + ); + assert_eq!( + final_ema_price, new_ema_price, + "JSON-parsed ema_price should match Borsh value" + ); + } + _ => panic!("Expected JSON parsed data"), + } + } + + #[test] + fn test_apply_override_to_json() { + let mut json = serde_json::json!({ + "price_message": { + "price": 100, + "publish_time": 1234567890 + }, + "expo": -8 + }); + + // Test simple override + let result = apply_override_to_json(&mut json, "expo", &serde_json::json!(-6)); + assert!(result.is_ok()); + assert_eq!(json["expo"], -6); + + // Test nested override + let result = + apply_override_to_json(&mut json, "price_message.price", &serde_json::json!(200)); + assert!(result.is_ok()); + assert_eq!(json["price_message"]["price"], 200); + + // Test invalid path + let result = + apply_override_to_json(&mut json, "nonexistent.field", &serde_json::json!(999)); + assert!(result.is_err()); + } +} From bd48ff4635bbd170903f0a88755f4f4a58434042 Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Tue, 28 Oct 2025 11:53:06 -0400 Subject: [PATCH 3/9] feat: draft scenarios implementation --- .gitignore | 1 - Cargo.lock | 1 + crates/cli/src/http/mod.rs | 41 +- crates/core/Cargo.toml | 1 + crates/core/src/lib.rs | 1 + crates/core/src/rpc/surfnet_cheatcodes.rs | 132 ++++++- crates/core/src/scenarios/mod.rs | 3 + .../src/scenarios/protocols/pyth/v2/idl.json | 130 ++++++ .../protocols/pyth/v2/overrides.yaml | 37 ++ crates/core/src/scenarios/registry.rs | 85 ++++ crates/core/src/surfnet/locker.rs | 15 +- crates/types/Cargo.toml | 2 +- crates/types/src/lib.rs | 2 + crates/types/src/scenarios.rs | 370 ++++++++++++++++++ 14 files changed, 799 insertions(+), 22 deletions(-) create mode 100644 crates/core/src/scenarios/mod.rs create mode 100644 crates/core/src/scenarios/protocols/pyth/v2/idl.json create mode 100644 crates/core/src/scenarios/protocols/pyth/v2/overrides.yaml create mode 100644 crates/core/src/scenarios/registry.rs create mode 100644 crates/types/src/scenarios.rs diff --git a/.gitignore b/.gitignore index cd69a8b4..57dd8073 100644 --- a/.gitignore +++ b/.gitignore @@ -10,5 +10,4 @@ test-ledger .surfpool # local -runbooks txtx.yml diff --git a/Cargo.lock b/Cargo.lock index a48009d9..232f4c39 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12103,6 +12103,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", + "serde_yaml", "solana-account 3.1.0", "solana-account-decoder", "solana-address-lookup-table-interface 3.0.0", diff --git a/crates/cli/src/http/mod.rs b/crates/cli/src/http/mod.rs index 603e4fa5..77a9d704 100644 --- a/crates/cli/src/http/mod.rs +++ b/crates/cli/src/http/mod.rs @@ -19,6 +19,7 @@ use juniper_graphql_ws::ConnectionConfig; use log::{debug, error, info, trace, warn}; #[cfg(feature = "explorer")] use rust_embed::RustEmbed; +use surfpool_core::scenarios::TemplateRegistry; use surfpool_gql::{ DynamicSchema, db::schema::collections, @@ -28,7 +29,8 @@ use surfpool_gql::{ }; use surfpool_studio_ui::serve_studio_static_files; use surfpool_types::{ - DataIndexingCommand, SanitizedConfig, SubgraphCommand, SubgraphEvent, SurfpoolConfig, + DataIndexingCommand, OverrideTemplate, SanitizedConfig, SubgraphCommand, SubgraphEvent, + SurfpoolConfig, }; use txtx_core::kit::types::types::Value; use txtx_gql::kit::uuid::Uuid; @@ -64,6 +66,12 @@ pub async fn start_subgraph_and_explorer_server( let config_wrapped = Data::new(RwLock::new(config.clone())); let collections_metadata_lookup_wrapped = Data::new(RwLock::new(collections_metadata_lookup)); + // Initialize template registry and load templates + let mut template_registry = TemplateRegistry::new(); + template_registry.load_pyth_overrides(); + + let template_registry_wrapped = Data::new(RwLock::new(template_registry)); + let subgraph_handle = start_subgraph_runloop( subgraph_events_tx, subgraph_commands_rx, @@ -80,6 +88,7 @@ pub async fn start_subgraph_and_explorer_server( .app_data(context_wrapped.clone()) .app_data(config_wrapped.clone()) .app_data(collections_metadata_lookup_wrapped.clone()) + .app_data(template_registry_wrapped.clone()) .wrap( Cors::default() .allow_any_origin() @@ -93,6 +102,7 @@ pub async fn start_subgraph_and_explorer_server( .wrap(middleware::Logger::default()) .service(get_config) .service(get_indexers) + .service(get_scenario_templates) .service( web::scope("/workspace") .route("/v1/indexers", web::post().to(post_graphql)) @@ -153,19 +163,38 @@ async fn get_config( async fn get_indexers( collections_metadata_lookup: Data>, ) -> Result { - let lookup = collections_metadata_lookup - .read() - .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to read collections metadata"))?; + let lookup = collections_metadata_lookup.read().map_err(|_| { + actix_web::error::ErrorInternalServerError("Failed to read collections metadata") + })?; let collections: Vec<&CollectionMetadata> = lookup.entries.values().collect(); - let response = serde_json::to_string(&collections) - .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to serialize collections"))?; + let response = serde_json::to_string(&collections).map_err(|_| { + actix_web::error::ErrorInternalServerError("Failed to serialize collections") + })?; + + Ok(HttpResponse::Ok() + .content_type("application/json") + .body(response)) +} + +#[actix_web::get("/v1/scenarios/templates")] +async fn get_scenario_templates( + template_registry: Data>, +) -> Result { + let registry = template_registry.read().map_err(|_| { + actix_web::error::ErrorInternalServerError("Failed to read template registry") + })?; + + let templates: Vec<&OverrideTemplate> = registry.all(); + let response = serde_json::to_string(&templates) + .map_err(|_| actix_web::error::ErrorInternalServerError("Failed to serialize templates"))?; Ok(HttpResponse::Ok() .content_type("application/json") .body(response)) } +#[allow(dead_code)] #[cfg(not(feature = "explorer"))] fn handle_embedded_file(_path: &str) -> HttpResponse { HttpResponse::NotFound().body("404 Not Found") diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 79dffcb3..6e0e45fa 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -44,6 +44,7 @@ reqwest = { workspace = true } serde = { workspace = true } serde_derive = { workspace = true } # must match the serde version, see https://github.com/serde-rs/serde/issues/2584#issuecomment-1685252251 serde_json = { workspace = true } +serde_yaml = "0.9" solana-account = { workspace = true } solana-account-decoder = { workspace = true } solana-address-lookup-table-interface = { workspace = true } diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 33d70364..61825312 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -14,6 +14,7 @@ pub mod error; pub mod helpers; pub mod rpc; pub mod runloops; +pub mod scenarios; pub mod surfnet; pub mod types; diff --git a/crates/core/src/rpc/surfnet_cheatcodes.rs b/crates/core/src/rpc/surfnet_cheatcodes.rs index c2bf825c..67d437aa 100644 --- a/crates/core/src/rpc/surfnet_cheatcodes.rs +++ b/crates/core/src/rpc/surfnet_cheatcodes.rs @@ -15,8 +15,8 @@ use solana_transaction::versioned::VersionedTransaction; use spl_associated_token_account_interface::address::get_associated_token_address_with_program_id; use surfpool_types::{ AccountSnapshot, ClockCommand, ExportSnapshotConfig, GetStreamedAccountsResponse, - GetSurfnetInfoResponse, Idl, ResetAccountConfig, RpcProfileResultConfig, SimnetCommand, - SimnetEvent, StreamAccountConfig, UiKeyedProfileResult, + GetSurfnetInfoResponse, Idl, ResetAccountConfig, RpcProfileResultConfig, Scenario, + SimnetCommand, SimnetEvent, StreamAccountConfig, UiKeyedProfileResult, types::{AccountUpdate, SetSomeAccount, SupplyUpdate, TokenAccountUpdate, UuidOrSignature}, }; @@ -962,6 +962,116 @@ pub trait SurfnetCheatcodes { #[rpc(meta, name = "surfnet_getSurfnetInfo")] fn get_surfnet_info(&self, meta: Self::Metadata) -> Result>; + + /// A cheat code to register a scenario with account overrides. + /// + /// ## Parameters + /// - `scenario`: The Scenario object containing: + /// - `id`: Unique identifier for the scenario + /// - `name`: Human-readable name + /// - `description`: Description of the scenario + /// - `overrides`: Array of OverrideInstance objects, each containing: + /// - `id`: Unique identifier for this override instance + /// - `templateId`: Reference to the override template + /// - `values`: HashMap of field paths to override values + /// - `slotHeight`: The slot at which this override should be applied + /// - `label`: Optional label for this override + /// - `enabled`: Whether this override is active + /// - `fetchBeforeUse`: If true, fetch fresh account data just before transaction execution (useful for price feeds, oracle updates, and dynamic balances) + /// - `account`: Account address (either `{ "pubkey": "..." }` or `{ "pda": { "programId": "...", "seeds": [...] } }`) + /// - `tags`: Array of tags for categorization + /// - `slot` (optional): The slot at which the scenario should start. If omitted, uses the current slot. + /// + /// ## Returns + /// A `RpcResponse<()>` indicating whether the Scenario registration was successful. + /// + /// ## Example Request (with slot) + /// ```json + /// { + /// "jsonrpc": "2.0", + /// "id": 1, + /// "method": "surfnet_registerScenario", + /// "params": [ + /// { + /// "id": "scenario-1", + /// "name": "Price Feed Override", + /// "description": "Override Pyth BTC/USD price at specific slots", + /// "overrides": [ + /// { + /// "id": "override-1", + /// "templateId": "pyth_btcusd", + /// "values": { + /// "price_message.price_value": 67500, + /// "price_message.conf": 100, + /// "price_message.expo": -8 + /// }, + /// "slotHeight": 100, + /// "label": "Set BTC price to $67,500", + /// "enabled": true, + /// "fetchBeforeUse": false, + /// "account": { + /// "pubkey": "H6ARHf6YXhGYeQfUzQNGk6rDNnLBQKrenN712K4QJNYH" + /// } + /// } + /// ], + /// "tags": ["defi", "price-feed"] + /// }, + /// 355684457 + /// ] + /// } + /// ``` + /// ## Example Request (without slot) + /// ```json + /// { + /// "jsonrpc": "2.0", + /// "id": 1, + /// "method": "surfnet_registerScenario", + /// "params": [ + /// { + /// "id": "scenario-1", + /// "name": "Price Feed Override", + /// "description": "Override Pyth BTC/USD price", + /// "overrides": [ + /// { + /// "id": "override-1", + /// "templateId": "pyth_btcusd", + /// "values": { + /// "price_message.price_value": 67500 + /// }, + /// "slotHeight": 100, + /// "label": "Set BTC price", + /// "enabled": true, + /// "fetchBeforeUse": true, + /// "account": { + /// "pubkey": "H6ARHf6YXhGYeQfUzQNGk6rDNnLBQKrenN712K4QJNYH" + /// } + /// } + /// ], + /// "tags": [] + /// } + /// ] + /// } + /// ``` + /// + /// ## Example Response + /// ```json + /// { + /// "jsonrpc": "2.0", + /// "context": { + /// "slot": 355684457, + /// "apiVersion": "2.2.2" + /// }, + /// "value": null, + /// "id": 1 + /// } + /// ``` + #[rpc(meta, name = "surfnet_registerScenario")] + fn register_scenario( + &self, + meta: Self::Metadata, + scenario: Scenario, + slot: Option, + ) -> Result>; } #[derive(Clone)] @@ -1559,6 +1669,18 @@ impl SurfnetCheatcodes for SurfnetCheatcodesRpc { value: snapshot, }) } + + fn register_scenario( + &self, + meta: Self::Metadata, + scenario: Scenario, + slot: Option, + ) -> Result> { + Ok(RpcResponse { + context: RpcResponseContext::new(0), + value: (), + }) + } } #[cfg(test)] @@ -1677,7 +1799,7 @@ mod tests { ); // Amount of tokens to mint (100 tokens with 2 decimal places) - let amount = 100_00; + let amount = 10_000; // Create mint_to instruction to mint tokens to the source token account let mint_to_instruction = mint_to( @@ -1736,7 +1858,7 @@ mod tests { .instruction_profiles .as_ref() .unwrap() - .get(0) + .first() .expect("instruction profile should exist"); assert!( ix_profile.error_message.is_none(), @@ -2230,7 +2352,7 @@ mod tests { .instruction_profiles .as_ref() .unwrap() - .get(0) + .first() .expect("instruction profile should exist"); assert!( ix_profile.error_message.is_none(), diff --git a/crates/core/src/scenarios/mod.rs b/crates/core/src/scenarios/mod.rs new file mode 100644 index 00000000..b258bb5b --- /dev/null +++ b/crates/core/src/scenarios/mod.rs @@ -0,0 +1,3 @@ +pub mod registry; + +pub use registry::TemplateRegistry; diff --git a/crates/core/src/scenarios/protocols/pyth/v2/idl.json b/crates/core/src/scenarios/protocols/pyth/v2/idl.json new file mode 100644 index 00000000..d791ed86 --- /dev/null +++ b/crates/core/src/scenarios/protocols/pyth/v2/idl.json @@ -0,0 +1,130 @@ +{ + "address": "rec5EKMGg6MxZYaMdyBfgwp4d5rB9T1VQH5pJv5LtFJ", + "metadata": { + "name": "price_feed", + "version": "0.1.0", + "spec": "0.1.0", + "description": "Created with Anchor" + }, + "instructions": [], + "accounts": [ + { + "name": "PriceUpdateV2", + "discriminator": [34, 241, 35, 99, 157, 126, 244, 205] + } + ], + "types": [ + { + "name": "PriceFeedMessage", + "type": { + "kind": "struct", + "fields": [ + { + "name": "feed_id", + "docs": [ + "`FeedId` but avoid the type alias because of compatibility issues with Anchor's `idl-build` feature." + ], + "type": { + "array": ["u8", 32] + } + }, + { + "name": "price", + "type": "i64" + }, + { + "name": "conf", + "type": "u64" + }, + { + "name": "exponent", + "type": "i32" + }, + { + "name": "publish_time", + "docs": ["The timestamp of this price update in seconds"], + "type": "i64" + }, + { + "name": "prev_publish_time", + "docs": [ + "The timestamp of the previous price update. This field is intended to allow users to", + "identify the single unique price update for any moment in time:", + "for any time t, the unique update is the one such that prev_publish_time < t <= publish_time.", + "", + "Note that there may not be such an update while we are migrating to the new message-sending logic,", + "as some price updates on pythnet may not be sent to other chains (because the message-sending", + "logic may not have triggered). We can solve this problem by making the message-sending mandatory", + "(which we can do once publishers have migrated over).", + "", + "Additionally, this field may be equal to publish_time if the message is sent on a slot where", + "where the aggregation was unsuccesful. This problem will go away once all publishers have", + "migrated over to a recent version of pyth-agent." + ], + "type": "i64" + }, + { + "name": "ema_price", + "type": "i64" + }, + { + "name": "ema_conf", + "type": "u64" + } + ] + } + }, + { + "name": "PriceUpdateV2", + "type": { + "kind": "struct", + "fields": [ + { + "name": "write_authority", + "type": "pubkey" + }, + { + "name": "verification_level", + "type": { + "defined": { + "name": "VerificationLevel" + } + } + }, + { + "name": "price_message", + "type": { + "defined": { + "name": "PriceFeedMessage" + } + } + }, + { + "name": "posted_slot", + "type": "u64" + } + ] + } + }, + { + "name": "VerificationLevel", + "type": { + "kind": "enum", + "variants": [ + { + "name": "Partial", + "fields": [ + { + "name": "num_signatures", + "type": "u8" + } + ] + }, + { + "name": "Full" + } + ] + } + } + ] +} diff --git a/crates/core/src/scenarios/protocols/pyth/v2/overrides.yaml b/crates/core/src/scenarios/protocols/pyth/v2/overrides.yaml new file mode 100644 index 00000000..a99626ae --- /dev/null +++ b/crates/core/src/scenarios/protocols/pyth/v2/overrides.yaml @@ -0,0 +1,37 @@ +protocol: Pyth +version: v2 +account_type: PriceUpdateV2 +idl_file_path: pyth_price_store.json + +tags: + - oracle + - price-feed + - defi + +templates: + - id: pyth-btc-usd-v2 + name: Override BTC/USD Price Feed + description: Override Pyth BTC/USD price feed with custom price data + idl_account_name: PriceUpdateV2 + properties: ["price_message.price", "price_message.publish_time"] + address: + type: pubkey + value: 4cSM2e6rvbGQUFiJbqytoVMi5GgghSMr8LwVrT9VPSPo + + - id: pyth-eth-btc-v2 + name: Override ETH/BTC Price Feed + description: Override Pyth ETH/BTC price feed with custom price data + idl_account_name: PriceUpdateV2 + properties: ["price_message.price", "price_message.publish_time"] + address: + type: pubkey + value: 5JwbqPPMNpzE2jVAdobWo6m5gkhsDhRdGBo3FYbSfmaK + + - id: pyth-eth-usd-v2 + name: Override ETH/USD Price Feed + description: Override Pyth ETH/USD price feed with custom price data + idl_account_name: PriceUpdateV2 + properties: ["price_message.price", "price_message.publish_time"] + address: + type: pubkey + value: 42amVS4KgzR9rA28tkVYqVXjq9Qa8dcZQMbH5EYFX6XC diff --git a/crates/core/src/scenarios/registry.rs b/crates/core/src/scenarios/registry.rs new file mode 100644 index 00000000..82d71d6a --- /dev/null +++ b/crates/core/src/scenarios/registry.rs @@ -0,0 +1,85 @@ +use std::collections::BTreeMap; + +use log::debug; +use surfpool_types::{OverrideTemplate, YamlOverrideTemplateCollection}; + +pub const PYTH_V2_IDL_CONTENT: &str = include_str!("./protocols/pyth/v2/idl.json"); +pub const PYTH_V2_OVERRIDES_CONTENT: &str = include_str!("./protocols/pyth/v2/overrides.yaml"); + +/// Registry for managing override templates loaded from YAML files +#[derive(Clone, Debug, Default)] +pub struct TemplateRegistry { + /// Map of template ID to template + templates: BTreeMap, +} + +impl TemplateRegistry { + /// Create a new template registry + pub fn new() -> Self { + Self::default() + } + + pub fn load_pyth_overrides(&mut self) { + let idl = match serde_json::from_str(PYTH_V2_IDL_CONTENT) { + Ok(idl) => idl, + Err(e) => panic!("unable to load pyth idl: {}", e), + }; + + let Ok(collection) = + serde_yaml::from_str::(PYTH_V2_OVERRIDES_CONTENT) + else { + panic!("unable to load pyth overrides"); + }; + + // Convert all templates in the collection + let templates = collection.to_override_templates(idl); + + // Register each template + for template in templates { + let template_id = template.id.clone(); + self.templates.insert(template_id.clone(), template); + debug!(" Registered template: {}", template_id); + } + } + + /// Get a template by ID + pub fn get(&self, template_id: &str) -> Option<&OverrideTemplate> { + self.templates.get(template_id) + } + + /// Get all templates + pub fn all(&self) -> Vec<&OverrideTemplate> { + self.templates.values().collect() + } + + /// Get templates for a specific protocol + pub fn by_protocol(&self, protocol: &str) -> Vec<&OverrideTemplate> { + self.templates + .values() + .filter(|t| t.protocol.eq_ignore_ascii_case(protocol)) + .collect() + } + + /// Get templates matching any of the given tags + pub fn by_tags(&self, tags: &[String]) -> Vec<&OverrideTemplate> { + self.templates + .values() + .filter(|t| t.tags.iter().any(|tag| tags.contains(tag))) + .collect() + } + + /// Get the number of loaded templates + pub fn count(&self) -> usize { + self.templates.len() + } + + /// Check if a template exists + pub fn contains(&self, template_id: &str) -> bool { + self.templates.contains_key(template_id) + } + + /// List all template IDs + pub fn list_ids(&self) -> Vec { + self.templates.keys().cloned().collect() + } +} diff --git a/crates/core/src/surfnet/locker.rs b/crates/core/src/surfnet/locker.rs index 74c9617a..f302d3b3 100644 --- a/crates/core/src/surfnet/locker.rs +++ b/crates/core/src/surfnet/locker.rs @@ -36,17 +36,14 @@ use solana_epoch_info::EpochInfo; use solana_hash::Hash; use solana_loader_v3_interface::{get_program_data_address, state::UpgradeableLoaderState}; use solana_message::{ - Message, MessageHeader, SimpleAddressLoader, VersionedMessage, + Message, SimpleAddressLoader, VersionedMessage, compiled_instruction::CompiledInstruction, v0::{LoadedAddresses, MessageAddressTableLookup}, }; use solana_pubkey::Pubkey; use solana_rpc_client_api::response::SlotInfo; use solana_signature::Signature; -use solana_transaction::{ - sanitized::SanitizedTransaction, - versioned::{TransactionVersion, VersionedTransaction}, -}; +use solana_transaction::{sanitized::SanitizedTransaction, versioned::VersionedTransaction}; use solana_transaction_error::TransactionError; use solana_transaction_status::{ EncodedConfirmedTransactionWithStatusMeta, @@ -3049,13 +3046,13 @@ pub fn format_ui_amount(amount: u64, decimals: u8) -> f64 { #[cfg(test)] mod tests { - use crate::scenarios::registry::PYTH_V2_IDL_CONTENT; - use crate::surfnet::SurfnetSvm; + use std::collections::HashMap; - use super::*; use solana_account::Account; use solana_account_decoder::UiAccountEncoding; - use std::collections::HashMap; + + use super::*; + use crate::{scenarios::registry::PYTH_V2_IDL_CONTENT, surfnet::SurfnetSvm}; #[test] fn test_get_forged_account_data_with_pyth_fixture() { diff --git a/crates/types/Cargo.toml b/crates/types/Cargo.toml index 0180eb19..fecfd699 100644 --- a/crates/types/Cargo.toml +++ b/crates/types/Cargo.toml @@ -20,7 +20,7 @@ crossbeam-channel = { workspace = true } once_cell = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -serde_with = { workspace = true, features = ["alloc"] } +serde_with = { workspace = true, features = ["alloc", "macros"] } solana-account = { workspace = true } solana-account-decoder-client-types = { workspace = true } solana-clock = { workspace = true } diff --git a/crates/types/src/lib.rs b/crates/types/src/lib.rs index e196ceff..f6951265 100644 --- a/crates/types/src/lib.rs +++ b/crates/types/src/lib.rs @@ -1,8 +1,10 @@ pub use txtx_addon_network_svm_types as txtx_svm_types; +pub mod scenarios; pub mod types; pub mod verified_tokens; pub use crossbeam_channel as channel; +pub use scenarios::*; pub use types::*; pub use verified_tokens::{TokenInfo, VERIFIED_TOKENS_BY_SYMBOL}; diff --git a/crates/types/src/scenarios.rs b/crates/types/src/scenarios.rs new file mode 100644 index 00000000..57b24c94 --- /dev/null +++ b/crates/types/src/scenarios.rs @@ -0,0 +1,370 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; +use solana_clock::Slot; +use uuid::Uuid; + +use crate::Idl; + +// ======================================== +// Core Scenarios Types +// ======================================== + +/// Defines how an account address should be determined +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum AccountAddress { + /// A specific public key + Pubkey(String), + /// A Program Derived Address with seeds + Pda { + program_id: String, + seeds: Vec, + }, +} + +/// Seeds used for PDA derivation +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum PdaSeed { + String(String), + Bytes(Vec), + Pubkey(String), + /// Reference to a property value + PropertyRef(String), +} + +/// A reusable template for creating account overrides +/// Values are mapped directly to IDL fields using dot notation (e.g., "agg.price", "expo") +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverrideTemplate { + /// Unique identifier for the template + pub id: String, + /// Human-readable name + pub name: String, + /// Description of what this template does + pub description: String, + /// Protocol this template is for (e.g., "Pyth", "Switchboard") + pub protocol: String, + /// IDL for the account structure - defines all available fields and types + pub idl: Idl, + /// How to determine the account address + pub address: AccountAddress, + /// Account type name from the IDL (e.g., "PriceAccount") + /// This specifies which account struct in the IDL to use + pub account_type: String, + pub properties: Vec, + /// Tags for categorization and search + pub tags: Vec, +} + +impl OverrideTemplate { + pub fn new( + id: String, + name: String, + description: String, + protocol: String, + idl: Idl, + address: AccountAddress, + properties: Vec, + account_type: String, + ) -> Self { + Self { + id, + name, + description, + protocol, + idl, + address, + account_type, + properties, + tags: Vec::new(), + } + } +} + +/// A concrete instance of an override template with specific values +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct OverrideInstance { + /// Unique identifier for this instance + pub id: String, + /// Reference to the template being used + pub template_id: String, + /// Values for the template properties (flat key-value map with dot notation, e.g., "price_message.price_value") + pub values: HashMap, + /// Slot height when this override should be applied + pub slot_height: Slot, + /// Optional label for this instance + pub label: Option, + /// Whether this override is enabled + pub enabled: bool, + /// Whether to fetch fresh account data just before transaction execution + /// Useful for time-sensitive data like price feeds, oracle updates, and dynamic balances + #[serde(default)] + pub fetch_before_use: bool, + /// Account address to override + pub account: AccountAddress, +} + +impl OverrideInstance { + pub fn new(template_id: String, slot_height: Slot, account: AccountAddress) -> Self { + Self { + id: Uuid::new_v4().to_string(), + template_id, + values: HashMap::new(), + slot_height, + label: None, + enabled: true, + fetch_before_use: false, + account, + } + } + + pub fn with_values(mut self, values: HashMap) -> Self { + self.values = values; + self + } + + pub fn with_label(mut self, label: String) -> Self { + self.label = Some(label); + self + } +} + +/// A scenario containing a timeline of overrides +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Scenario { + /// Unique identifier for the scenario + pub id: String, + /// Human-readable name + pub name: String, + /// Description of this scenario + pub description: String, + /// List of override instances in this scenario + pub overrides: Vec, + /// Tags for categorization + pub tags: Vec, +} + +impl Scenario { + pub fn new(name: String, description: String) -> Self { + Self { + id: Uuid::new_v4().to_string(), + name, + description, + overrides: Vec::new(), + tags: Vec::new(), + } + } + + pub fn add_override(&mut self, override_instance: OverrideInstance) { + self.overrides.push(override_instance); + // Sort by slot height for efficient lookup + self.overrides.sort_by_key(|o| o.slot_height); + } + + pub fn remove_override(&mut self, override_id: &str) { + self.overrides.retain(|o| o.id != override_id); + } + + pub fn get_overrides_for_slot(&self, slot: Slot) -> Vec<&OverrideInstance> { + self.overrides + .iter() + .filter(|o| o.enabled && o.slot_height == slot) + .collect() + } +} + +/// Configuration for scenario execution +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ScenarioConfig { + /// Whether scenarios are enabled + pub enabled: bool, + /// Currently active scenario + pub active_scenario: Option, + /// Whether to auto-save scenario changes + pub auto_save: bool, +} + +impl Default for ScenarioConfig { + fn default() -> Self { + Self { + enabled: false, + active_scenario: None, + auto_save: true, + } + } +} + +// ======================================== +// YAML Template File Types +// ======================================== + +/// YAML representation of an override template loaded from file +/// References an external IDL file via idl_file_path +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct YamlOverrideTemplateFile { + pub id: String, + pub name: String, + pub description: String, + pub protocol: String, + pub version: String, + pub account_type: String, + pub properties: Vec, + pub idl_file_path: String, + pub address: YamlAccountAddress, + #[serde(default)] + pub tags: Vec, +} + +impl YamlOverrideTemplateFile { + /// Convert file-based template to runtime OverrideTemplate with loaded IDL + pub fn to_override_template(self, idl: Idl) -> OverrideTemplate { + OverrideTemplate { + id: self.id, + name: self.name, + description: self.description, + protocol: self.protocol, + idl, + address: self.address.into(), + account_type: self.account_type, + properties: self.properties, + tags: self.tags, + } + } +} + +/// Collection of override templates sharing the same IDL +/// Used when one YAML file defines multiple templates (e.g., multiple Pyth price feeds) +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct YamlOverrideTemplateCollection { + /// Protocol these templates are for + pub protocol: String, + /// Version identifier + pub version: String, + /// Path to shared IDL file + pub idl_file_path: String, + /// Common tags for all templates + #[serde(default)] + pub tags: Vec, + /// The templates + pub templates: Vec, +} + +/// Individual template entry in a collection +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct YamlOverrideTemplateEntry { + pub id: String, + pub name: String, + pub description: String, + pub idl_account_name: String, + pub properties: Vec, + pub address: YamlAccountAddress, +} + +impl YamlOverrideTemplateCollection { + /// Convert collection to runtime OverrideTemplates with loaded IDL + pub fn to_override_templates(self, idl: Idl) -> Vec { + self.templates + .into_iter() + .map(|entry| OverrideTemplate { + id: entry.id, + name: entry.name, + description: entry.description, + protocol: self.protocol.clone(), + idl: idl.clone(), + address: entry.address.into(), + account_type: entry.idl_account_name, + properties: entry.properties, + tags: self.tags.clone(), + }) + .collect() + } +} + +/// YAML representation of an override template with embedded IDL +/// Used for RPC methods where file access is not available +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct YamlOverrideTemplate { + pub id: String, + pub name: String, + pub description: String, + pub protocol: String, + pub version: String, + pub account_type: String, + pub idl: Idl, + pub address: YamlAccountAddress, + pub properties: Vec, + #[serde(default)] + pub tags: Vec, +} + +impl YamlOverrideTemplate { + /// Convert to runtime OverrideTemplate + pub fn to_override_template(self) -> OverrideTemplate { + OverrideTemplate { + id: self.id, + name: self.name, + description: self.description, + protocol: self.protocol, + idl: self.idl, + address: self.address.into(), + account_type: self.account_type, + properties: self.properties, + tags: self.tags, + } + } +} + +/// YAML representation of account address +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum YamlAccountAddress { + Pubkey { + #[serde(default)] + value: Option, + }, + Pda { + program_id: String, + seeds: Vec, + }, +} + +impl From for AccountAddress { + fn from(yaml: YamlAccountAddress) -> Self { + match yaml { + YamlAccountAddress::Pubkey { value } => { + AccountAddress::Pubkey(value.unwrap_or_default()) + } + YamlAccountAddress::Pda { program_id, seeds } => AccountAddress::Pda { + program_id, + seeds: seeds.into_iter().map(|s| s.into()).collect(), + }, + } + } +} + +/// YAML representation of PDA seeds +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum YamlPdaSeed { + String { value: String }, + Bytes { value: Vec }, + Pubkey { value: String }, + PropertyRef { value: String }, +} + +impl From for PdaSeed { + fn from(yaml: YamlPdaSeed) -> Self { + match yaml { + YamlPdaSeed::String { value } => PdaSeed::String(value), + YamlPdaSeed::Bytes { value } => PdaSeed::Bytes(value), + YamlPdaSeed::Pubkey { value } => PdaSeed::Pubkey(value), + YamlPdaSeed::PropertyRef { value } => PdaSeed::PropertyRef(value), + } + } +} From 3256c73c3a7ae1be2479459aa239b32b42484897 Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Tue, 28 Oct 2025 15:56:24 -0400 Subject: [PATCH 4/9] fix: build warning --- crates/cli/src/tui/simnet.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/cli/src/tui/simnet.rs b/crates/cli/src/tui/simnet.rs index 3e95d577..2c3c9b50 100644 --- a/crates/cli/src/tui/simnet.rs +++ b/crates/cli/src/tui/simnet.rs @@ -746,7 +746,7 @@ fn ui(f: &mut Frame, app: &mut App) { render_footer(f, app, rects[2].inner(Margin::new(2, 1))); } -fn title_block(title: &str, alignment: Alignment) -> Block { +fn title_block<'a>(title: &'a str, alignment: Alignment) -> Block<'a> { let title = Line::from(title).alignment(alignment); Block::new().borders(Borders::NONE).title(title) } From 9a09d964fd63204e193a97d4c3f1224f948b1483 Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Tue, 28 Oct 2025 15:57:46 -0400 Subject: [PATCH 5/9] feat: scenarios code complete --- crates/cli/src/http/mod.rs | 5 +- crates/cli/src/tui/simnet.rs | 2 +- crates/core/Cargo.toml | 5 +- crates/core/src/rpc/full.rs | 21 +- crates/core/src/rpc/surfnet_cheatcodes.rs | 43 +- crates/core/src/runloops/mod.rs | 82 +++- crates/core/src/scenarios/registry.rs | 8 +- crates/core/src/surfnet/locker.rs | 254 +++++------- crates/core/src/surfnet/svm.rs | 463 +++++++++++++++++++++- crates/core/src/tests/integration.rs | 72 ++-- crates/types/src/scenarios.rs | 14 +- crates/types/src/types.rs | 3 + 12 files changed, 746 insertions(+), 226 deletions(-) diff --git a/crates/cli/src/http/mod.rs b/crates/cli/src/http/mod.rs index 77a9d704..a2c48ca2 100644 --- a/crates/cli/src/http/mod.rs +++ b/crates/cli/src/http/mod.rs @@ -67,10 +67,7 @@ pub async fn start_subgraph_and_explorer_server( let collections_metadata_lookup_wrapped = Data::new(RwLock::new(collections_metadata_lookup)); // Initialize template registry and load templates - let mut template_registry = TemplateRegistry::new(); - template_registry.load_pyth_overrides(); - - let template_registry_wrapped = Data::new(RwLock::new(template_registry)); + let template_registry_wrapped = Data::new(RwLock::new(TemplateRegistry::new())); let subgraph_handle = start_subgraph_runloop( subgraph_events_tx, diff --git a/crates/cli/src/tui/simnet.rs b/crates/cli/src/tui/simnet.rs index 2c3c9b50..e0f7c670 100644 --- a/crates/cli/src/tui/simnet.rs +++ b/crates/cli/src/tui/simnet.rs @@ -462,7 +462,7 @@ fn run_app(terminal: &mut Terminal, mut app: App) -> io::Result<( SimnetEvent::SystemClockUpdated(clock) => { app.clock = clock.clone(); } - SimnetEvent::ClockUpdate(ClockCommand::Pause) => { + SimnetEvent::ClockUpdate(ClockCommand::PauseWithConfirmation(_)) => { app.paused = true; } SimnetEvent::ClockUpdate(ClockCommand::Resume) => { diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 6e0e45fa..d91458d8 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -71,7 +71,7 @@ solana-rpc-client = { workspace = true } solana-rpc-client-api = { workspace = true } solana-runtime = { workspace = true } solana-sdk-ids = { workspace = true } -solana-signature = { workspace = true, features = ["rand"]} +solana-signature = { workspace = true, features = ["rand"] } solana-signer = { workspace = true } solana-slot-hashes = { workspace = true } solana-system-interface = { workspace = true } @@ -89,8 +89,11 @@ uuid = { workspace = true } surfpool-subgraph = { workspace = true, optional = true } surfpool-types = { workspace = true } +anchor-lang-idl = { workspace = true } txtx-addon-kit = { workspace = true } txtx-addon-network-svm-types = { workspace = true } +txtx-addon-network-svm = { workspace = true } + [dev-dependencies] test-case = { workspace = true } diff --git a/crates/core/src/rpc/full.rs b/crates/core/src/rpc/full.rs index 7296e3c9..29578579 100644 --- a/crates/core/src/rpc/full.rs +++ b/crates/core/src/rpc/full.rs @@ -2615,7 +2615,12 @@ mod tests { } // confirm a block to move transactions to confirmed status - setup.context.svm_locker.confirm_current_block().unwrap(); + setup + .context + .svm_locker + .confirm_current_block(&None) + .await + .unwrap(); let res = setup .rpc .get_signature_statuses( @@ -3363,7 +3368,12 @@ mod tests { ) .unwrap(); - setup.context.svm_locker.confirm_current_block().unwrap(); + setup + .context + .svm_locker + .confirm_current_block(&None) + .await + .unwrap(); } // send two transactions that include a compute budget instruction @@ -3403,7 +3413,12 @@ mod tests { .await .join() .unwrap(); - setup.context.svm_locker.confirm_current_block().unwrap(); + setup + .context + .svm_locker + .confirm_current_block(&None) + .await + .unwrap(); } // sending the get_recent_prioritization_fees request with an account diff --git a/crates/core/src/rpc/surfnet_cheatcodes.rs b/crates/core/src/rpc/surfnet_cheatcodes.rs index 67d437aa..2d9276b4 100644 --- a/crates/core/src/rpc/surfnet_cheatcodes.rs +++ b/crates/core/src/rpc/surfnet_cheatcodes.rs @@ -897,8 +897,8 @@ pub trait SurfnetCheatcodes { /// { /// "jsonrpc": "2.0", /// "id": 1, - /// "method": "surfnet_streamAccount", - /// "params": [ "4EXSeLGxVBpAZwq7vm6evLdewpcvE2H56fpqL2pPiLFa", { "includeOwnedAccounts": true } ] + /// "method": "surfnet_getStreamedAccounts", + /// "params": [] /// } /// ``` /// @@ -911,8 +911,10 @@ pub trait SurfnetCheatcodes { /// "slot": 123456789, /// "apiVersion": "2.3.8" /// }, - /// "value": null - /// }, + /// "value": [ + /// "4EXSeLGxVBpAZwq7vm6evLdewpcvE2H56fpqL2pPiLFa" + /// ] + /// }, /// "id": 1 /// } /// ``` @@ -974,13 +976,13 @@ pub trait SurfnetCheatcodes { /// - `id`: Unique identifier for this override instance /// - `templateId`: Reference to the override template /// - `values`: HashMap of field paths to override values - /// - `slotHeight`: The slot at which this override should be applied + /// - `scenarioRelativeSlot`: The relative slot offset (from base slot) when this override should be applied /// - `label`: Optional label for this override /// - `enabled`: Whether this override is active /// - `fetchBeforeUse`: If true, fetch fresh account data just before transaction execution (useful for price feeds, oracle updates, and dynamic balances) /// - `account`: Account address (either `{ "pubkey": "..." }` or `{ "pda": { "programId": "...", "seeds": [...] } }`) /// - `tags`: Array of tags for categorization - /// - `slot` (optional): The slot at which the scenario should start. If omitted, uses the current slot. + /// - `slot` (optional): The base slot from which relative slot offsets are calculated. If omitted, uses the current slot. /// /// ## Returns /// A `RpcResponse<()>` indicating whether the Scenario registration was successful. @@ -1005,7 +1007,7 @@ pub trait SurfnetCheatcodes { /// "price_message.conf": 100, /// "price_message.expo": -8 /// }, - /// "slotHeight": 100, + /// "scenarioRelativeSlot": 100, /// "label": "Set BTC price to $67,500", /// "enabled": true, /// "fetchBeforeUse": false, @@ -1038,7 +1040,7 @@ pub trait SurfnetCheatcodes { /// "values": { /// "price_message.price_value": 67500 /// }, - /// "slotHeight": 100, + /// "scenarioRelativeSlot": 100, /// "label": "Set BTC price", /// "enabled": true, /// "fetchBeforeUse": true, @@ -1556,9 +1558,24 @@ impl SurfnetCheatcodes for SurfnetCheatcodesRpc { let key = meta.as_ref().map(|ctx| ctx.id.clone()).unwrap_or_default(); let surfnet_command_tx: crossbeam_channel::Sender = meta.get_surfnet_command_tx()?; - let _ = surfnet_command_tx.send(SimnetCommand::CommandClock(key, ClockCommand::Pause)); - meta.with_svm_reader(|svm_reader| svm_reader.latest_epoch_info.clone()) - .map_err(Into::into) + + // Create a channel to receive confirmation + let (response_tx, response_rx) = crossbeam_channel::bounded(1); + + // Send pause command with confirmation + let _ = surfnet_command_tx.send(SimnetCommand::CommandClock( + key, + ClockCommand::PauseWithConfirmation(response_tx), + )); + + // Wait for confirmation with timeout + response_rx + .recv_timeout(std::time::Duration::from_secs(2)) + .map_err(|e| jsonrpc_core::Error { + code: jsonrpc_core::ErrorCode::InternalError, + message: format!("Failed to confirm clock pause: {}", e), + data: None, + }) } fn resume_clock(&self, meta: Self::Metadata) -> Result { @@ -1676,8 +1693,10 @@ impl SurfnetCheatcodes for SurfnetCheatcodesRpc { scenario: Scenario, slot: Option, ) -> Result> { + let svm_locker = meta.get_svm_locker()?; + svm_locker.register_scenario(scenario, slot)?; Ok(RpcResponse { - context: RpcResponseContext::new(0), + context: RpcResponseContext::new(svm_locker.get_latest_absolute_slot()), value: (), }) } diff --git a/crates/core/src/runloops/mod.rs b/crates/core/src/runloops/mod.rs index 9bf893a7..77e8a1b5 100644 --- a/crates/core/src/runloops/mod.rs +++ b/crates/core/src/runloops/mod.rs @@ -195,10 +195,54 @@ pub async fn start_block_production_runloop( svm_writer.slot_time = updated_slot_time; }); } - let _ = clock_command_tx.send(update); + + // Handle PauseWithConfirmation specially + if let ClockCommand::PauseWithConfirmation(response_tx) = update { + // Get current slot and slot_time before pausing + let (current_slot, slot_time) = svm_locker.with_svm_reader(|svm_reader| { + (svm_reader.latest_epoch_info.absolute_slot, svm_reader.slot_time) + }); + + // Send Pause to clock runloop + let _ = clock_command_tx.send(ClockCommand::Pause); + + // Loop and check if the slot has stopped advancing + let max_attempts = 10; + let mut attempts = 0; + loop { + tokio::time::sleep(tokio::time::Duration::from_millis(slot_time)).await; + + let new_slot = svm_locker.with_svm_reader(|svm_reader| { + svm_reader.latest_epoch_info.absolute_slot + }); + + // If slot hasn't changed, clock has stopped + if new_slot == current_slot || attempts >= max_attempts { + break; + } + + attempts += 1; + } + + // Read epoch info after clock has stopped + let epoch_info = svm_locker.with_svm_reader(|svm_reader| { + svm_reader.latest_epoch_info.clone() + }); + // Send response + let _ = response_tx.send(epoch_info); + } else { + let _ = clock_command_tx.send(update); + } continue } SimnetCommand::UpdateInternalClock(_, clock) => { + // Confirm the current block to materialize any scheduled overrides for this slot + if let Err(e) = svm_locker.confirm_current_block(&remote_client_with_commitment).await { + let _ = svm_locker.simnet_events_tx().send(SimnetEvent::error(format!( + "Failed to confirm block after time travel: {}", e + ))); + } + svm_locker.with_svm_writer(|svm_writer| { svm_writer.inner.set_sysvar(&clock); svm_writer.updated_at = clock.unix_timestamp as u64 * 1_000; @@ -210,6 +254,29 @@ pub async fn start_block_production_runloop( let _ = svm_writer.simnet_events_tx.send(SimnetEvent::SystemClockUpdated(clock)); }); } + SimnetCommand::UpdateInternalClockWithConfirmation(_, clock, response_tx) => { + // Confirm the current block to materialize any scheduled overrides for this slot + if let Err(e) = svm_locker.confirm_current_block(&remote_client_with_commitment).await { + let _ = svm_locker.simnet_events_tx().send(SimnetEvent::error(format!( + "Failed to confirm block after time travel: {}", e + ))); + } + + let epoch_info = svm_locker.with_svm_writer(|svm_writer| { + svm_writer.inner.set_sysvar(&clock); + svm_writer.updated_at = clock.unix_timestamp as u64 * 1_000; + svm_writer.latest_epoch_info.absolute_slot = clock.slot; + svm_writer.latest_epoch_info.epoch = clock.epoch; + svm_writer.latest_epoch_info.slot_index = clock.slot; + svm_writer.latest_epoch_info.epoch = clock.epoch; + svm_writer.latest_epoch_info.absolute_slot = clock.slot + clock.epoch * svm_writer.latest_epoch_info.slots_in_epoch; + let _ = svm_writer.simnet_events_tx.send(SimnetEvent::SystemClockUpdated(clock)); + svm_writer.latest_epoch_info.clone() + }); + + // Send confirmation back + let _ = response_tx.send(epoch_info); + } SimnetCommand::UpdateBlockProductionMode(update) => { block_production_mode = update; continue @@ -256,7 +323,9 @@ pub async fn start_block_production_runloop( { if do_produce_block { - svm_locker.confirm_current_block()?; + svm_locker + .confirm_current_block(&remote_client_with_commitment) + .await?; } } } @@ -300,6 +369,15 @@ pub fn start_clock_runloop( Ok(ClockCommand::UpdateSlotInterval(updated_slot_time)) => { slot_time = updated_slot_time; } + Ok(ClockCommand::PauseWithConfirmation(_)) => { + // This should be handled in the block production runloop, not here + // If it reaches here, just treat it as a regular Pause + enabled = false; + if let Some(ref simnet_events_tx) = simnet_events_tx { + let _ = + simnet_events_tx.send(SimnetEvent::ClockUpdate(ClockCommand::Pause)); + } + } Err(_e) => {} } sleep(Duration::from_millis(slot_time)); diff --git a/crates/core/src/scenarios/registry.rs b/crates/core/src/scenarios/registry.rs index 82d71d6a..9d2fd16e 100644 --- a/crates/core/src/scenarios/registry.rs +++ b/crates/core/src/scenarios/registry.rs @@ -1,6 +1,5 @@ use std::collections::BTreeMap; -use log::debug; use surfpool_types::{OverrideTemplate, YamlOverrideTemplateCollection}; pub const PYTH_V2_IDL_CONTENT: &str = include_str!("./protocols/pyth/v2/idl.json"); @@ -10,13 +9,15 @@ pub const PYTH_V2_OVERRIDES_CONTENT: &str = include_str!("./protocols/pyth/v2/ov #[derive(Clone, Debug, Default)] pub struct TemplateRegistry { /// Map of template ID to template - templates: BTreeMap, + pub templates: BTreeMap, } impl TemplateRegistry { /// Create a new template registry pub fn new() -> Self { - Self::default() + let mut default = Self::default(); + default.load_pyth_overrides(); + default } pub fn load_pyth_overrides(&mut self) { @@ -38,7 +39,6 @@ impl TemplateRegistry { for template in templates { let template_id = template.id.clone(); self.templates.insert(template_id.clone(), template); - debug!(" Registered template: {}", template_id); } } diff --git a/crates/core/src/surfnet/locker.rs b/crates/core/src/surfnet/locker.rs index f302d3b3..00764f29 100644 --- a/crates/core/src/surfnet/locker.rs +++ b/crates/core/src/surfnet/locker.rs @@ -123,39 +123,6 @@ pub type SurfpoolContextualizedResult = SurfpoolResult>; /// /// # Returns /// Result indicating success or error -fn apply_override_to_json( - json: &mut serde_json::Value, - path: &str, - value: &serde_json::Value, -) -> SurfpoolResult<()> { - let parts: Vec<&str> = path.split('.').collect(); - - if parts.is_empty() { - return Err(SurfpoolError::internal("Empty path provided for override")); - } - - // Navigate to the parent of the target field - let mut current = json; - for part in &parts[..parts.len() - 1] { - current = current.get_mut(part).ok_or_else(|| { - SurfpoolError::internal(format!("Path segment '{}' not found in JSON", part)) - })?; - } - - // Set the final field - let final_key = parts[parts.len() - 1]; - match current { - serde_json::Value::Object(map) => { - map.insert(final_key.to_string(), value.clone()); - Ok(()) - } - _ => Err(SurfpoolError::internal(format!( - "Cannot set field '{}' - parent is not an object", - final_key - ))), - } -} - pub struct SurfnetSvmLocker(pub Arc>); impl Clone for SurfnetSvmLocker { @@ -909,7 +876,6 @@ impl SurfnetSvmLocker { ) -> SurfpoolResult { let signature = transaction.signatures[0]; - // Can we avoid this write? let latest_absolute_slot = self.with_svm_writer(|svm_writer| { let latest_absolute_slot = svm_writer.get_latest_absolute_slot(); svm_writer.notify_signature_subscribers( @@ -949,7 +915,6 @@ impl SurfnetSvmLocker { .await? .inner; - // I don't think this code is required // We also need the pubkeys of the ALTs to be pulled from the remote, so we'll do a fetch for them let alt_account_updates = self .get_multiple_accounts( @@ -1174,23 +1139,6 @@ impl SurfnetSvmLocker { ) .await?; - eprintln!( - "DEBUG: pre_execution_capture keys = {:?}", - profile_result - .pre_execution_capture - .keys() - .map(|p| p.to_string()) - .collect::>() - ); - eprintln!( - "DEBUG: post_execution_capture keys = {:?}", - profile_result - .post_execution_capture - .keys() - .map(|p| p.to_string()) - .collect::>() - ); - profile_result .pre_execution_capture .retain(|pubkey, _| ix_required_accounts.contains(pubkey)); @@ -1198,23 +1146,6 @@ impl SurfnetSvmLocker { .post_execution_capture .retain(|pubkey, _| ix_required_accounts.contains(pubkey)); - eprintln!( - "DEBUG: After retain - pre keys = {:?}", - profile_result - .pre_execution_capture - .keys() - .map(|p| p.to_string()) - .collect::>() - ); - eprintln!( - "DEBUG: After retain - post keys = {:?}", - profile_result - .post_execution_capture - .keys() - .map(|p| p.to_string()) - .collect::>() - ); - profile_result.compute_units_consumed = profile_result .compute_units_consumed .saturating_sub(previous_cus); @@ -1750,6 +1681,15 @@ impl SurfnetSvmLocker { pub fn get_streamed_accounts(&self) -> HashMap { self.with_svm_reader(|svm_reader| svm_reader.streamed_accounts.clone()) } + + /// Registers a scenario for execution + pub fn register_scenario( + &self, + scenario: surfpool_types::Scenario, + slot: Option, + ) -> SurfpoolResult<()> { + self.with_svm_writer(move |svm_writer| svm_writer.register_scenario(scenario, slot)) + } } /// Token account related functions @@ -2339,6 +2279,18 @@ impl SurfnetSvmLocker { /// /// # Returns /// The modified account data bytes with discriminator + /// Forges account data by applying overrides to existing account data + /// + /// This delegates to the SurfnetSvm implementation. + /// + /// # Arguments + /// * `account_pubkey` - The account address (for error messages) + /// * `account_data` - The original account data bytes + /// * `idl` - The IDL for the account's program + /// * `overrides` - Map of field paths to new values + /// + /// # Returns + /// The forged account data as bytes, or an error pub fn get_forged_account_data( &self, account_pubkey: &Pubkey, @@ -2346,62 +2298,9 @@ impl SurfnetSvmLocker { idl: &Idl, overrides: &HashMap, ) -> SurfpoolResult> { - // Step 1: Validate account data size - if account_data.len() < 8 { - return Err(SurfpoolError::invalid_account_data( - account_pubkey, - "Account data too small to be an Anchor account (need at least 8 bytes for discriminator)", - Some("Data length too small"), - )); - } - - // Step 3: Split discriminator and data - let discriminator = &account_data[..8]; - let serialized_data = &account_data[8..]; - - // Step 4: Find the account type using the discriminator - let _account_def = idl - .accounts - .iter() - .find(|acc| acc.discriminator.eq(discriminator)) - .ok_or_else(|| { - SurfpoolError::internal(format!( - "Account with discriminator '{:?}' not found in IDL", - discriminator - )) - })?; - - // Step 5: Deserialize the account data to JSON - // For now, we'll use a simple approach: deserialize as raw JSON - let mut account_json: serde_json::Value = serde_json::from_slice(serialized_data) - .map_err(|e| { - SurfpoolError::deserialize_error( - "account data", - format!( - "Failed to deserialize account data as JSON: {}. \ - Note: This is a simplified implementation that expects JSON-serialized data. \ - For Anchor accounts, proper Borsh deserialization should be implemented.", - e - ) - ) - })?; - - // Step 6: Apply overrides using dot notation - for (path, value) in overrides { - apply_override_to_json(&mut account_json, path, value)?; - } - - // Step 7: Re-serialize the modified data - let modified_data = serde_json::to_vec(&account_json).map_err(|e| { - SurfpoolError::internal(format!("Failed to serialize modified account data: {}", e)) - })?; - - // Step 8: Reconstruct the account data with discriminator - let mut new_account_data = Vec::with_capacity(8 + modified_data.len()); - new_account_data.extend_from_slice(discriminator); - new_account_data.extend_from_slice(&modified_data); - - Ok(new_account_data) + self.with_svm_reader(|svm_reader| { + svm_reader.get_forged_account_data(account_pubkey, account_data, idl, overrides) + }) } } /// Program account related functions @@ -2802,7 +2701,7 @@ impl SurfnetSvmLocker { simnet_command_tx: Sender, config: TimeTravelConfig, ) -> SurfpoolResult { - let (mut epoch_info, slot_time, updated_at) = self.with_svm_reader(|svm_reader| { + let (epoch_info, slot_time, updated_at) = self.with_svm_reader(|svm_reader| { ( svm_reader.latest_epoch_info.clone(), svm_reader.slot_time, @@ -2818,17 +2717,30 @@ impl SurfnetSvmLocker { .unwrap_or_else(|| chrono::DateTime::from_timestamp(0, 0).unwrap()) .format("%Y-%m-%d %H:%M:%S") .to_string(); - epoch_info.slot_index = clock_update.slot; - epoch_info.epoch = clock_update.epoch; - epoch_info.absolute_slot = - clock_update.slot + clock_update.epoch * epoch_info.slots_in_epoch; - let _ = simnet_command_tx.send(SimnetCommand::UpdateInternalClock(key, clock_update)); + + // Create a channel for confirmation + let (response_tx, response_rx) = crossbeam_channel::bounded(1); + + // Send the command with confirmation + let _ = simnet_command_tx.send(SimnetCommand::UpdateInternalClockWithConfirmation( + key, + clock_update, + response_tx, + )); + + // Wait for confirmation with timeout + let updated_epoch_info = response_rx + .recv_timeout(std::time::Duration::from_secs(2)) + .map_err(|e| { + SurfpoolError::internal(format!("Failed to confirm clock update: {}", e)) + })?; + let _ = self.simnet_events_tx().send(SimnetEvent::info(format!( "Time travel to {} successful (epoch {} / slot {})", - formated_time, epoch_info.epoch, epoch_info.absolute_slot + formated_time, updated_epoch_info.epoch, updated_epoch_info.absolute_slot ))); - Ok(epoch_info) + Ok(updated_epoch_info) } /// Retrieves the latest absolute slot from the underlying SVM. @@ -2869,8 +2781,17 @@ impl SurfnetSvmLocker { } /// Confirms the current block on the underlying SVM, returning `Ok(())` or an error. - pub fn confirm_current_block(&self) -> SurfpoolResult<()> { - self.with_svm_writer(|svm_writer| svm_writer.confirm_current_block()) + pub async fn confirm_current_block( + &self, + remote_ctx: &Option<(SurfnetRemoteClient, CommitmentConfig)>, + ) -> SurfpoolResult<()> { + // First, confirm the block synchronously + self.with_svm_writer(|svm_writer| svm_writer.confirm_current_block())?; + + // Then, materialize any scheduled overrides for the new slot + // TODO: Pass remote client when available for fetch_before_use support + let mut svm_writer = self.0.write().await; + svm_writer.materialize_overrides(remote_ctx).await } /// Subscribes for signature updates (confirmed/finalized) and returns a receiver of events. @@ -3052,7 +2973,10 @@ mod tests { use solana_account_decoder::UiAccountEncoding; use super::*; - use crate::{scenarios::registry::PYTH_V2_IDL_CONTENT, surfnet::SurfnetSvm}; + use crate::{ + scenarios::registry::PYTH_V2_IDL_CONTENT, + surfnet::{SurfnetSvm, svm::apply_override_to_decoded_account}, + }; #[test] fn test_get_forged_account_data_with_pyth_fixture() { @@ -3536,29 +3460,57 @@ mod tests { } #[test] - fn test_apply_override_to_json() { - let mut json = serde_json::json!({ - "price_message": { - "price": 100, - "publish_time": 1234567890 - }, - "expo": -8 - }); + fn test_apply_override_to_decoded_account() { + use txtx_addon_kit::{indexmap::IndexMap, types::types::Value}; + + // Create a txtx Value object + let mut price_message_obj = IndexMap::new(); + price_message_obj.insert("price".to_string(), Value::Integer(100)); + price_message_obj.insert("publish_time".to_string(), Value::Integer(1234567890)); + + let mut decoded_value = IndexMap::new(); + decoded_value.insert( + "price_message".to_string(), + Value::Object(price_message_obj), + ); + decoded_value.insert("expo".to_string(), Value::Integer(-8)); + + let mut decoded_value = Value::Object(decoded_value); // Test simple override - let result = apply_override_to_json(&mut json, "expo", &serde_json::json!(-6)); + let result = + apply_override_to_decoded_account(&mut decoded_value, "expo", &serde_json::json!(-6)); assert!(result.is_ok()); - assert_eq!(json["expo"], -6); + match &decoded_value { + Value::Object(map) => { + assert_eq!(map.get("expo"), Some(&Value::Integer(-6))); + } + _ => panic!("Expected Object"), + } // Test nested override - let result = - apply_override_to_json(&mut json, "price_message.price", &serde_json::json!(200)); + let result = apply_override_to_decoded_account( + &mut decoded_value, + "price_message.price", + &serde_json::json!(200), + ); assert!(result.is_ok()); - assert_eq!(json["price_message"]["price"], 200); + match &decoded_value { + Value::Object(map) => match map.get("price_message") { + Some(Value::Object(price_msg)) => { + assert_eq!(price_msg.get("price"), Some(&Value::Integer(200))); + } + _ => panic!("Expected price_message to be Object"), + }, + _ => panic!("Expected Object"), + } // Test invalid path - let result = - apply_override_to_json(&mut json, "nonexistent.field", &serde_json::json!(999)); + let result = apply_override_to_decoded_account( + &mut decoded_value, + "nonexistent.field", + &serde_json::json!(999), + ); assert!(result.is_err()); } } diff --git a/crates/core/src/surfnet/svm.rs b/crates/core/src/surfnet/svm.rs index 9a5df117..05ecda2e 100644 --- a/crates/core/src/surfnet/svm.rs +++ b/crates/core/src/surfnet/svm.rs @@ -27,7 +27,7 @@ use solana_client::{ rpc_response::{RpcKeyedAccount, RpcLogsResponse, RpcPerfSample}, }; use solana_clock::{Clock, Slot}; -use solana_commitment_config::CommitmentLevel; +use solana_commitment_config::{CommitmentConfig, CommitmentLevel}; use solana_epoch_info::EpochInfo; use solana_feature_gate_interface::Feature; use solana_genesis_config::GenesisConfig; @@ -52,16 +52,20 @@ use spl_token_2022_interface::extension::{ }; use surfpool_types::{ AccountChange, AccountProfileState, AccountSnapshot, DEFAULT_PROFILING_MAP_CAPACITY, - DEFAULT_SLOT_TIME_MS, ExportSnapshotConfig, FifoMap, Idl, ProfileResult, RpcProfileDepth, - RpcProfileResultConfig, RunbookExecutionStatusReport, SimnetEvent, + DEFAULT_SLOT_TIME_MS, ExportSnapshotConfig, FifoMap, Idl, OverrideInstance, ProfileResult, + RpcProfileDepth, RpcProfileResultConfig, RunbookExecutionStatusReport, SimnetEvent, TransactionConfirmationStatus, TransactionStatusEvent, UiAccountChange, UiAccountProfileState, UiProfileResult, VersionedIdl, types::{ ComputeUnitsEstimationResult, KeyedProfileResult, UiKeyedProfileResult, UuidOrSignature, }, }; -use txtx_addon_kit::{indexmap::IndexMap, types::types::AddonJsonConverter}; -use txtx_addon_network_svm_types::subgraph::idl::parse_bytes_to_value_with_expected_idl_type_def_ty; +use txtx_addon_kit::{indexmap::IndexMap, types::types::{AddonJsonConverter, Value}}; +use txtx_addon_network_svm::codec::idl::borsh_encode_value_to_idl_type; +use txtx_addon_network_svm_types::subgraph::idl::{ + parse_bytes_to_value_with_expected_idl_type_def_ty, + parse_bytes_to_value_with_expected_idl_type_def_ty_with_leftover_bytes, +}; use uuid::Uuid; use super::{ @@ -72,6 +76,7 @@ use super::{ use crate::{ error::{SurfpoolError, SurfpoolResult}, rpc::utils::convert_transaction_metadata_from_canonical, + scenarios::TemplateRegistry, surfnet::{LogsSubscriptionData, locker::is_supported_token_program}, types::{ GeyserAccountUpdate, MintAccount, SurfnetTransactionStatus, SyntheticBlockhash, @@ -79,6 +84,84 @@ use crate::{ }, }; +/// Helper function to apply an override to a decoded account value using dot notation +pub fn apply_override_to_decoded_account( + decoded_value: &mut Value, + path: &str, + value: &serde_json::Value, +) -> SurfpoolResult<()> { + let parts: Vec<&str> = path.split('.').collect(); + + if parts.is_empty() { + return Err(SurfpoolError::internal("Empty path provided for override")); + } + + // Navigate to the parent of the target field + let mut current = decoded_value; + for part in &parts[..parts.len() - 1] { + match current { + Value::Object(map) => { + current = map.get_mut(&part.to_string()).ok_or_else(|| { + SurfpoolError::internal(format!("Path segment '{}' not found in decoded account", part)) + })?; + } + _ => { + return Err(SurfpoolError::internal(format!( + "Cannot navigate through field '{}' - not an object", + part + ))); + } + } + } + + // Set the final field + let final_key = parts[parts.len() - 1]; + match current { + Value::Object(map) => { + // Convert serde_json::Value to txtx Value + let txtx_value = json_to_txtx_value(value)?; + map.insert(final_key.to_string(), txtx_value); + Ok(()) + } + _ => Err(SurfpoolError::internal(format!( + "Cannot set field '{}' - parent is not an object", + final_key + ))), + } +} + +/// Helper function to convert serde_json::Value to txtx Value +fn json_to_txtx_value(json: &serde_json::Value) -> SurfpoolResult { + match json { + serde_json::Value::Null => Ok(Value::Null), + serde_json::Value::Bool(b) => Ok(Value::Bool(*b)), + serde_json::Value::Number(n) => { + if let Some(i) = n.as_i64() { + Ok(Value::Integer(i as i128)) + } else if let Some(u) = n.as_u64() { + Ok(Value::Integer(u as i128)) + } else if let Some(f) = n.as_f64() { + Ok(Value::Float(f)) + } else { + Err(SurfpoolError::internal(format!("Unable to convert number: {}", n))) + } + } + serde_json::Value::String(s) => Ok(Value::String(s.clone())), + serde_json::Value::Array(arr) => { + let txtx_arr: Result, _> = + arr.iter().map(json_to_txtx_value).collect(); + Ok(Value::Array(Box::new(txtx_arr?))) + } + serde_json::Value::Object(obj) => { + let mut txtx_obj = IndexMap::new(); + for (k, v) in obj.iter() { + txtx_obj.insert(k.clone(), json_to_txtx_value(v)?); + } + Ok(Value::Object(txtx_obj)) + } + } +} + pub type AccountOwner = Pubkey; #[allow(deprecated)] @@ -151,6 +234,7 @@ pub struct SurfnetSvm { /// the update with higher write_version should supersede the one with lower write_version. pub write_version: u64, pub registered_idls: HashMap>, + // pub registered_idls: HashMap<[u8; 8], BinaryHeap>, pub feature_set: FeatureSet, pub instruction_profiling_enabled: bool, pub max_profiles: usize, @@ -158,6 +242,7 @@ pub struct SurfnetSvm { pub account_update_slots: HashMap, pub streamed_accounts: HashMap, pub recent_blockhashes: VecDeque<(SyntheticBlockhash, i64)>, + pub scheduled_overrides: HashMap>, } pub const FEATURE: Feature = Feature { @@ -249,6 +334,7 @@ impl SurfnetSvm { account_update_slots: HashMap::new(), streamed_accounts: HashMap::new(), recent_blockhashes: VecDeque::new(), + scheduled_overrides: HashMap::new(), }; // Generate the initial synthetic blockhash @@ -286,6 +372,11 @@ impl SurfnetSvm { self.set_profiling_map_capacity(self.max_profiles); self.inner.set_log_bytes_limit(log_bytes_limit); + let registry = TemplateRegistry::new(); + for (_, template) in registry.templates.into_iter() { + self.register_idl(template.idl, None); + } + if let Some(remote_client) = remote_ctx { let _ = self .simnet_events_tx @@ -724,6 +815,7 @@ impl SurfnetSvm { self.registered_idls.clear(); self.runbook_executions.clear(); self.streamed_accounts.clear(); + self.scheduled_overrides.clear(); Ok(()) } @@ -1219,6 +1311,328 @@ impl SurfnetSvm { Ok(()) } + /// Materializes scheduled overrides for the current slot + /// + /// This function: + /// 1. Dequeues overrides scheduled for the current slot + /// 2. Resolves account addresses (Pubkey or PDA) + /// 3. Optionally fetches fresh account data from remote if `fetch_before_use` is enabled + /// 4. Applies the overrides to the account data + /// 5. Updates the SVM state + pub async fn materialize_overrides( + &mut self, + remote_ctx: &Option<(SurfnetRemoteClient, CommitmentConfig)>, + ) -> SurfpoolResult<()> { + let current_slot = self.latest_epoch_info.absolute_slot; + + // Remove and get overrides for this slot + let Some(overrides) = self.scheduled_overrides.remove(¤t_slot) else { + // No overrides for this slot + return Ok(()); + }; + + debug!( + "Materializing {} override(s) for slot {}", + overrides.len(), + current_slot + ); + + for override_instance in overrides { + if !override_instance.enabled { + debug!("Skipping disabled override: {}", override_instance.id); + continue; + } + + // Resolve account address + let account_pubkey = match &override_instance.account { + surfpool_types::AccountAddress::Pubkey(pubkey_str) => { + match Pubkey::from_str(pubkey_str) { + Ok(pubkey) => pubkey, + Err(e) => { + warn!( + "Failed to parse pubkey '{}' for override {}: {}", + pubkey_str, override_instance.id, e + ); + continue; + } + } + } + surfpool_types::AccountAddress::Pda { + program_id: _, + seeds: _, + } => unimplemented!(), + }; + + debug!( + "Processing override {} for account {} (label: {:?})", + override_instance.id, account_pubkey, override_instance.label + ); + + // Fetch fresh account data from remote if requested + if override_instance.fetch_before_use { + if let Some((client, _)) = remote_ctx { + debug!( + "Fetching fresh account data for {} from remote", + account_pubkey + ); + + match client + .get_account(&account_pubkey, CommitmentConfig::confirmed()) + .await + { + Ok(GetAccountResult::FoundAccount(_pubkey, remote_account, _)) => { + debug!( + "Fetched account {} from remote: {} lamports, {} bytes", + account_pubkey, + remote_account.lamports(), + remote_account.data().len() + ); + + // Set the fresh account data in the SVM + if let Err(e) = self.inner.set_account(account_pubkey, remote_account) { + warn!( + "Failed to set account {} from remote: {}", + account_pubkey, e + ); + } + } + Ok(GetAccountResult::None(_)) => { + debug!("Account {} not found on remote", account_pubkey); + } + Ok(_) => { + debug!("Account {} fetched (other variant)", account_pubkey); + } + Err(e) => { + warn!( + "Failed to fetch account {} from remote: {}", + account_pubkey, e + ); + } + } + } else { + debug!( + "fetch_before_use enabled but no remote client available for override {}", + override_instance.id + ); + } + } + + // Apply the override values to the account data + if !override_instance.values.is_empty() { + debug!( + "Override {} applying {} field modification(s) to account {}", + override_instance.id, + override_instance.values.len(), + account_pubkey + ); + + // Get the account from the SVM + let Some(account) = self.inner.get_account(&account_pubkey) else { + warn!( + "Account {} not found in SVM for override {}, skipping modifications", + account_pubkey, override_instance.id + ); + continue; + }; + + // Get the account owner (program ID) + let owner_program_id = account.owner(); + + // Look up the IDL for the owner program + let Some(idl_versions) = self.registered_idls.get(owner_program_id) else { + warn!( + "No IDL registered for program {} (owner of account {}), skipping override {}", + owner_program_id, account_pubkey, override_instance.id + ); + continue; + }; + + // Get the latest IDL version + let Some(versioned_idl) = idl_versions.peek() else { + warn!( + "IDL versions empty for program {}, skipping override {}", + owner_program_id, override_instance.id + ); + continue; + }; + + let idl = &versioned_idl.1; + + // Get account data + let account_data = account.data(); + + // Use get_forged_account_data to apply the overrides + let new_account_data = match self.get_forged_account_data( + &account_pubkey, + account_data, + idl, + &override_instance.values, + ) { + Ok(data) => data, + Err(e) => { + warn!( + "Failed to forge account data for {} (override {}): {}", + account_pubkey, override_instance.id, e + ); + continue; + } + }; + + // Create a new account with modified data + let modified_account = Account { + lamports: account.lamports(), + data: new_account_data, + owner: *account.owner(), + executable: account.executable(), + rent_epoch: account.rent_epoch(), + }; + + // Update the account in the SVM + if let Err(e) = self.inner.set_account(account_pubkey, modified_account) { + warn!( + "Failed to set modified account {} in SVM: {}", + account_pubkey, e + ); + } else { + debug!( + "Successfully applied {} override(s) to account {} (override {})", + override_instance.values.len(), + account_pubkey, + override_instance.id + ); + } + } + } + + Ok(()) + } + + /// Forges account data by applying overrides to existing account data + /// + /// This function: + /// 1. Validates account data size (must be at least 8 bytes for discriminator) + /// 2. Splits discriminator and serialized data + /// 3. Finds the account type in the IDL using the discriminator + /// 4. Deserializes the account data + /// 5. Applies field overrides using dot notation + /// 6. Re-serializes the modified data + /// 7. Reconstructs the account data with the original discriminator + /// + /// # Arguments + /// * `account_pubkey` - The account address (for error messages) + /// * `account_data` - The original account data bytes + /// * `idl` - The IDL for the account's program + /// * `overrides` - Map of field paths to new values + /// + /// # Returns + /// The forged account data as bytes, or an error + pub fn get_forged_account_data( + &self, + account_pubkey: &Pubkey, + account_data: &[u8], + idl: &Idl, + overrides: &HashMap, + ) -> SurfpoolResult> { + // Validate account data size + if account_data.len() < 8 { + return Err(SurfpoolError::invalid_account_data( + account_pubkey, + "Account data too small to be an Anchor account (need at least 8 bytes for discriminator)", + Some("Data length too small"), + )); + } + + // Split discriminator and data + let discriminator = &account_data[..8]; + let serialized_data = &account_data[8..]; + + // Find the account type using the discriminator + let account_def = idl + .accounts + .iter() + .find(|acc| acc.discriminator.eq(discriminator)) + .ok_or_else(|| { + SurfpoolError::internal(format!( + "Account with discriminator '{:?}' not found in IDL", + discriminator + )) + })?; + + // Find the corresponding type definition + let account_type = idl + .types + .iter() + .find(|t| t.name == account_def.name) + .ok_or_else(|| { + SurfpoolError::internal(format!( + "Type definition for account '{}' not found in IDL", + account_def.name + )) + })?; + + // Set up generics for parsing + let empty_vec = vec![]; + let idl_type_def_generics = idl + .types + .iter() + .find(|t| t.name == account_type.name) + .map(|t| &t.generics); + + // Deserialize the account data using proper Borsh deserialization + // Use the version that returns leftover bytes to preserve any trailing padding + let (mut parsed_value, leftover_bytes) = + parse_bytes_to_value_with_expected_idl_type_def_ty_with_leftover_bytes( + serialized_data, + &account_type.ty, + &idl.types, + &vec![], + idl_type_def_generics.unwrap_or(&empty_vec), + ) + .map_err(|e| { + SurfpoolError::deserialize_error( + "account data", + format!("Failed to deserialize account data using Borsh: {}", e), + ) + })?; + + // Apply overrides to the decoded value + for (path, value) in overrides { + apply_override_to_decoded_account(&mut parsed_value, path, value)?; + } + + // Construct an IdlType::Defined that references the account type + // This is needed because borsh_encode_value_to_idl_type expects IdlType, not IdlTypeDefTy + use anchor_lang_idl::types::{IdlGenericArg, IdlType}; + let defined_type = IdlType::Defined { + name: account_type.name.clone(), + generics: account_type + .generics + .iter() + .map(|_| IdlGenericArg::Type { ty: IdlType::String }) + .collect(), + }; + + // Re-encode the value using Borsh + let re_encoded_data = borsh_encode_value_to_idl_type( + &parsed_value, + &defined_type, + &idl.types, + None, + ) + .map_err(|e| { + SurfpoolError::internal(format!("Failed to re-encode account data using Borsh: {}", e)) + })?; + + // Reconstruct the account data with discriminator and preserve any trailing bytes + let mut new_account_data = + Vec::with_capacity(8 + re_encoded_data.len() + leftover_bytes.len()); + new_account_data.extend_from_slice(discriminator); + new_account_data.extend_from_slice(&re_encoded_data); + new_account_data.extend_from_slice(leftover_bytes); + + Ok(new_account_data) + } + /// Subscribes for updates on a transaction signature for a given subscription type. /// /// # Arguments @@ -1950,6 +2364,45 @@ impl SurfnetSvm { } fixtures } + + /// Registers a scenario for execution by scheduling its overrides + /// + /// The `slot` parameter is the base slot from which relative override slot heights are calculated. + /// If not provided, uses the current slot. + pub fn register_scenario( + &mut self, + scenario: surfpool_types::Scenario, + slot: Option, + ) -> SurfpoolResult<()> { + // Use provided slot or current slot as the base for relative slot heights + let base_slot = slot.unwrap_or(self.latest_epoch_info.absolute_slot); + + info!( + "Registering scenario: {} ({}) with {} overrides at base slot {}", + scenario.name, + scenario.id, + scenario.overrides.len(), + base_slot + ); + + // Schedule overrides by adding base slot to their scenario-relative slots + for override_instance in scenario.overrides { + let scenario_relative_slot = override_instance.scenario_relative_slot; + let absolute_slot = base_slot + scenario_relative_slot; + + debug!( + "Scheduling override at absolute slot {} (base {} + relative {})", + absolute_slot, base_slot, scenario_relative_slot + ); + + self.scheduled_overrides + .entry(absolute_slot) + .or_insert_with(Vec::new) + .push(override_instance); + } + + Ok(()) + } } #[cfg(test)] diff --git a/crates/core/src/tests/integration.rs b/crates/core/src/tests/integration.rs index 2a15b8a2..632b8bb5 100644 --- a/crates/core/src/tests/integration.rs +++ b/crates/core/src/tests/integration.rs @@ -428,6 +428,9 @@ async fn test_add_alt_entries_fetching() { Ok(SimnetEvent::ClockUpdate(_)) => { // do nothing } + Ok(SimnetEvent::SystemClockUpdated(_)) => { + // do nothing - clock ticks from time travel or normal progression + } other => println!("Unexpected event: {:?}", other), } @@ -2780,7 +2783,7 @@ async fn test_profile_transaction_versioned_message() { .airdrop(&payer.pubkey(), 2 * lamports_to_send) .unwrap(); - svm_locker.confirm_current_block().unwrap(); + svm_locker.confirm_current_block(&None).await.unwrap(); // Create a transfer instruction let instruction = transfer(&payer.pubkey(), &recipient, lamports_to_send); @@ -2851,7 +2854,10 @@ async fn test_get_local_signatures_without_limit() { .airdrop(&payer.pubkey(), lamports_to_send * 2) .unwrap(); - svm_locker_for_context.confirm_current_block().unwrap(); + svm_locker_for_context + .confirm_current_block(&None) + .await + .unwrap(); let create_account_instruction = system_instruction::create_account( &payer.pubkey(), @@ -2880,7 +2886,10 @@ async fn test_get_local_signatures_without_limit() { .await .unwrap(); // Confirm the block after creating the account - svm_locker_for_context.confirm_current_block().unwrap(); + svm_locker_for_context + .confirm_current_block(&None) + .await + .unwrap(); // Now create the transfer transaction let instruction = transfer(&payer.pubkey(), &recipient.pubkey(), lamports_to_send); @@ -2898,7 +2907,10 @@ async fn test_get_local_signatures_without_limit() { .unwrap(); // Confirm the current block to create a block with the transaction signature - svm_locker_for_context.confirm_current_block().unwrap(); + svm_locker_for_context + .confirm_current_block(&None) + .await + .unwrap(); let get_local_signatures_response: JsonRpcResult>> = rpc_server @@ -2941,7 +2953,10 @@ async fn test_get_local_signatures_with_limit() { .airdrop(&payer.pubkey(), lamports_to_send * 10) .unwrap(); - svm_locker_for_context.confirm_current_block().unwrap(); + svm_locker_for_context + .confirm_current_block(&None) + .await + .unwrap(); // Get the initial number of signatures to establish a baseline let initial_signatures_response: JsonRpcResult>> = rpc_server @@ -2981,7 +2996,10 @@ async fn test_get_local_signatures_with_limit() { transaction_signatures.push(tx.signatures[0]); // Confirm the current block to create a new block with this transaction - svm_locker_for_context.confirm_current_block().unwrap(); + svm_locker_for_context + .confirm_current_block(&None) + .await + .unwrap(); } // Test with different limit values @@ -3109,7 +3127,7 @@ fn boot_simnet( #[test] fn test_time_travel_resume_paused_clock() { let rpc_server = SurfnetCheatcodesRpc; - let (svm_locker, simnet_cmd_tx, _) = boot_simnet(BlockProductionMode::Clock, Some(20)); + let (svm_locker, simnet_cmd_tx, _) = boot_simnet(BlockProductionMode::Clock, Some(100)); let (plugin_cmd_tx, _plugin_cmd_rx) = crossbeam_unbounded::(); let runloop_context = RunloopContext { @@ -3219,17 +3237,11 @@ fn test_time_travel_absolute_timestamp() { let target_timestamp = svm_locker.0.blocking_read().updated_at + seven_days; // Test time travel to absolute timestamp + // Note: time_travel now uses confirmation mechanism, so it waits internally let time_travel_response: JsonRpcResult = rpc_server.time_travel( Some(runloop_context.clone()), Some(TimeTravelConfig::AbsoluteTimestamp(target_timestamp)), ); - loop { - if let Ok(SimnetEvent::SystemClockUpdated(_clock_updated)) = - simnet_events_rx.recv_timeout(Duration::from_millis(5000)) - { - break; - } - } assert!( time_travel_response.is_ok(), @@ -3299,17 +3311,11 @@ fn test_time_travel_absolute_slot() { let target_slot = initial_epoch_info.absolute_slot + 1000000; // A future slot number // Test time travel to absolute slot + // Note: time_travel now uses confirmation mechanism, so it waits internally let time_travel_response: JsonRpcResult = rpc_server.time_travel( Some(runloop_context.clone()), Some(TimeTravelConfig::AbsoluteSlot(target_slot)), ); - loop { - if let Ok(SimnetEvent::SystemClockUpdated(_clock_updated)) = - simnet_events_rx.recv_timeout(Duration::from_millis(5000)) - { - break; - } - } assert!( time_travel_response.is_ok(), @@ -3377,17 +3383,11 @@ fn test_time_travel_absolute_epoch() { let target_epoch = initial_epoch_info.epoch + 100; // A future epoch number // Test time travel to absolute epoch + // Note: time_travel now uses confirmation mechanism, so it waits internally let time_travel_response: JsonRpcResult = rpc_server.time_travel( Some(runloop_context.clone()), Some(TimeTravelConfig::AbsoluteEpoch(target_epoch)), ); - loop { - if let Ok(SimnetEvent::SystemClockUpdated(_clock_updated)) = - simnet_events_rx.recv_timeout(Duration::from_millis(5000)) - { - break; - } - } assert!( time_travel_response.is_ok(), @@ -3889,8 +3889,8 @@ fn test_reset_account_cascade() { svm_locker.reset_account(owned, false).unwrap(); } -#[test] -fn test_reset_streamed_account() { +#[tokio::test(flavor = "multi_thread")] +async fn test_reset_streamed_account() { let (svm_instance, _simnet_events_rx, _geyser_events_rx) = SurfnetSvm::new(); let svm_locker = SurfnetSvmLocker::new(svm_instance); let p1 = Keypair::new(); @@ -3898,13 +3898,13 @@ fn test_reset_streamed_account() { svm_locker.airdrop(&p1.pubkey(), LAMPORTS_PER_SOL).unwrap(); // account is created in the SVM println!("Airdropped SOL to p1"); - let _ = svm_locker.confirm_current_block(); + let _ = svm_locker.confirm_current_block(&None).await; // Account still exists assert!(!svm_locker.get_account_local(&p1.pubkey()).inner.is_none()); svm_locker.stream_account(p1.pubkey(), false).unwrap(); - let _ = svm_locker.confirm_current_block(); + let _ = svm_locker.confirm_current_block(&None).await; // Account is cleaned up as soon as the block is processed assert!( svm_locker.get_account_local(&p1.pubkey()).inner.is_none(), @@ -3912,8 +3912,8 @@ fn test_reset_streamed_account() { ); } -#[test] -fn test_reset_streamed_account_cascade() { +#[tokio::test(flavor = "multi_thread")] +async fn test_reset_streamed_account_cascade() { let (svm_instance, _simnet_events_rx, _geyser_events_rx) = SurfnetSvm::new(); let svm_locker = SurfnetSvmLocker::new(svm_instance); @@ -3950,13 +3950,13 @@ fn test_reset_streamed_account_cascade() { assert!(!svm_locker.get_account_local(&owner).inner.is_none()); assert!(!svm_locker.get_account_local(&owned).inner.is_none()); - let _ = svm_locker.confirm_current_block(); + let _ = svm_locker.confirm_current_block(&None).await; // Accounts still exists assert!(!svm_locker.get_account_local(&owner).inner.is_none()); assert!(!svm_locker.get_account_local(&owned).inner.is_none()); svm_locker.stream_account(owner, true).unwrap(); - let _ = svm_locker.confirm_current_block(); + let _ = svm_locker.confirm_current_block(&None).await; // Owner is deleted, owned account is deleted assert!(svm_locker.get_account_local(&owner).inner.is_none()); diff --git a/crates/types/src/scenarios.rs b/crates/types/src/scenarios.rs index 57b24c94..3e32f531 100644 --- a/crates/types/src/scenarios.rs +++ b/crates/types/src/scenarios.rs @@ -94,8 +94,8 @@ pub struct OverrideInstance { pub template_id: String, /// Values for the template properties (flat key-value map with dot notation, e.g., "price_message.price_value") pub values: HashMap, - /// Slot height when this override should be applied - pub slot_height: Slot, + /// Relative slot when this override should be applied (relative to scenario registration slot) + pub scenario_relative_slot: Slot, /// Optional label for this instance pub label: Option, /// Whether this override is enabled @@ -109,12 +109,12 @@ pub struct OverrideInstance { } impl OverrideInstance { - pub fn new(template_id: String, slot_height: Slot, account: AccountAddress) -> Self { + pub fn new(template_id: String, scenario_relative_slot: Slot, account: AccountAddress) -> Self { Self { id: Uuid::new_v4().to_string(), template_id, values: HashMap::new(), - slot_height, + scenario_relative_slot, label: None, enabled: true, fetch_before_use: false, @@ -162,8 +162,8 @@ impl Scenario { pub fn add_override(&mut self, override_instance: OverrideInstance) { self.overrides.push(override_instance); - // Sort by slot height for efficient lookup - self.overrides.sort_by_key(|o| o.slot_height); + // Sort by slot for efficient lookup + self.overrides.sort_by_key(|o| o.scenario_relative_slot); } pub fn remove_override(&mut self, override_id: &str) { @@ -173,7 +173,7 @@ impl Scenario { pub fn get_overrides_for_slot(&self, slot: Slot) -> Vec<&OverrideInstance> { self.overrides .iter() - .filter(|o| o.enabled && o.slot_height == slot) + .filter(|o| o.enabled && o.scenario_relative_slot == slot) .collect() } } diff --git a/crates/types/src/types.rs b/crates/types/src/types.rs index 47c3f8f4..16473bab 100644 --- a/crates/types/src/types.rs +++ b/crates/types/src/types.rs @@ -481,6 +481,7 @@ pub enum SimnetCommand { SlotBackward(Option), CommandClock(Option<(Hash, String)>, ClockCommand), UpdateInternalClock(Option<(Hash, String)>, Clock), + UpdateInternalClockWithConfirmation(Option<(Hash, String)>, Clock, Sender), UpdateBlockProductionMode(BlockProductionMode), ProcessTransaction( Option<(Hash, String)>, @@ -498,6 +499,8 @@ pub enum SimnetCommand { #[derive(Debug)] pub enum ClockCommand { Pause, + /// Pause with confirmation - sends epoch info back when actually paused + PauseWithConfirmation(Sender), Resume, Toggle, UpdateSlotInterval(u64), From b69cb5c750dbccb902ae3f12d63fcedc68ee71ea Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Tue, 28 Oct 2025 22:17:40 -0400 Subject: [PATCH 6/9] chore: update deps --- Cargo.lock | 10 ++++++---- Cargo.toml | 8 ++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ce860598..bcd9b68f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12075,6 +12075,7 @@ dependencies = [ "agave-feature-set", "agave-geyser-plugin-interface", "agave-reserved-account-keys", + "anchor-lang-idl", "base64 0.22.1", "bincode", "blake3", @@ -12147,6 +12148,7 @@ dependencies = [ "test-case", "tokio", "txtx-addon-kit", + "txtx-addon-network-svm", "txtx-addon-network-svm-types", "uuid", ] @@ -13052,9 +13054,9 @@ dependencies = [ [[package]] name = "txtx-addon-network-svm" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f683a2c9f5cd9e4ff30cacf8fdf52a83bed90232c17ece10f11cf200956134" +checksum = "e956b4c0ed49913bb4a08b3e6062a68f9aa71dab66c06e6f3abe88c9878fcb9d" dependencies = [ "async-recursion", "bincode", @@ -13096,9 +13098,9 @@ dependencies = [ [[package]] name = "txtx-addon-network-svm-types" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "991c5b1f0484b8fdd21b0c1db68638c5ee3fc0c4f6318a5dcbed7ca66a0cd659" +checksum = "30eaa1527896106eea3fce87bf1aac932c31c45ff94b544c0eabbec2f63ebd01" dependencies = [ "anchor-lang-idl", "borsh 1.5.7", diff --git a/Cargo.toml b/Cargo.toml index cced3167..9af6302a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -159,9 +159,9 @@ surfpool-studio-ui = { path = "crates/studio", default-features = false } surfpool-subgraph = { path = "crates/subgraph", default-features = false } surfpool-types = { path = "crates/types", default-features = false } -txtx-addon-kit = "0.4.11" -txtx-addon-network-svm = { version = "0.3.15" } -txtx-addon-network-svm-types = { version = "0.3.14" } +txtx-addon-kit = "0.4.10" +txtx-addon-network-svm = { version = "0.3.16" } +txtx-addon-network-svm-types = { version = "0.3.15" } txtx-cloud = { version = "0.1.13", features = [ "clap", "toml", @@ -170,7 +170,7 @@ txtx-core = { version = "0.4.13" } txtx-gql = { version = "0.3.7" } txtx-supervisor-ui = { version = "0.2.9", default-features = false } -[patch.crates-io] +# [patch.crates-io] ## Local # txtx-addon-kit = { path = "../txtx/crates/txtx-addon-kit" } # txtx-core = { path = "../txtx/crates/txtx-core" } From dbc2711b4ceba94c7e41b59c4d428ea168e7c3e8 Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Tue, 28 Oct 2025 22:19:44 -0400 Subject: [PATCH 7/9] chore: cargo fmt --- crates/core/src/surfnet/svm.rs | 39 +++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/crates/core/src/surfnet/svm.rs b/crates/core/src/surfnet/svm.rs index 05ecda2e..acbb734e 100644 --- a/crates/core/src/surfnet/svm.rs +++ b/crates/core/src/surfnet/svm.rs @@ -60,7 +60,10 @@ use surfpool_types::{ ComputeUnitsEstimationResult, KeyedProfileResult, UiKeyedProfileResult, UuidOrSignature, }, }; -use txtx_addon_kit::{indexmap::IndexMap, types::types::{AddonJsonConverter, Value}}; +use txtx_addon_kit::{ + indexmap::IndexMap, + types::types::{AddonJsonConverter, Value}, +}; use txtx_addon_network_svm::codec::idl::borsh_encode_value_to_idl_type; use txtx_addon_network_svm_types::subgraph::idl::{ parse_bytes_to_value_with_expected_idl_type_def_ty, @@ -102,7 +105,10 @@ pub fn apply_override_to_decoded_account( match current { Value::Object(map) => { current = map.get_mut(&part.to_string()).ok_or_else(|| { - SurfpoolError::internal(format!("Path segment '{}' not found in decoded account", part)) + SurfpoolError::internal(format!( + "Path segment '{}' not found in decoded account", + part + )) })?; } _ => { @@ -143,13 +149,15 @@ fn json_to_txtx_value(json: &serde_json::Value) -> SurfpoolResult { } else if let Some(f) = n.as_f64() { Ok(Value::Float(f)) } else { - Err(SurfpoolError::internal(format!("Unable to convert number: {}", n))) + Err(SurfpoolError::internal(format!( + "Unable to convert number: {}", + n + ))) } } serde_json::Value::String(s) => Ok(Value::String(s.clone())), serde_json::Value::Array(arr) => { - let txtx_arr: Result, _> = - arr.iter().map(json_to_txtx_value).collect(); + let txtx_arr: Result, _> = arr.iter().map(json_to_txtx_value).collect(); Ok(Value::Array(Box::new(txtx_arr?))) } serde_json::Value::Object(obj) => { @@ -1608,20 +1616,21 @@ impl SurfnetSvm { generics: account_type .generics .iter() - .map(|_| IdlGenericArg::Type { ty: IdlType::String }) + .map(|_| IdlGenericArg::Type { + ty: IdlType::String, + }) .collect(), }; // Re-encode the value using Borsh - let re_encoded_data = borsh_encode_value_to_idl_type( - &parsed_value, - &defined_type, - &idl.types, - None, - ) - .map_err(|e| { - SurfpoolError::internal(format!("Failed to re-encode account data using Borsh: {}", e)) - })?; + let re_encoded_data = + borsh_encode_value_to_idl_type(&parsed_value, &defined_type, &idl.types, None) + .map_err(|e| { + SurfpoolError::internal(format!( + "Failed to re-encode account data using Borsh: {}", + e + )) + })?; // Reconstruct the account data with discriminator and preserve any trailing bytes let mut new_account_data = From 5166589fd327d4abd339ac7121874590c2b8f796 Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Tue, 28 Oct 2025 22:36:41 -0400 Subject: [PATCH 8/9] feat: easter egg --- crates/cli/src/tui/simnet.rs | 45 ++++++++++++++++++++++++++++++++---- 1 file changed, 41 insertions(+), 4 deletions(-) diff --git a/crates/cli/src/tui/simnet.rs b/crates/cli/src/tui/simnet.rs index e0f7c670..35432296 100644 --- a/crates/cli/src/tui/simnet.rs +++ b/crates/cli/src/tui/simnet.rs @@ -5,7 +5,7 @@ use std::{ time::{Duration, Instant}, }; -use chrono::{DateTime, Local}; +use chrono::{DateTime, Datelike, Local}; use crossbeam::channel::{Select, Sender, unbounded}; use crossterm::{ event::{self, Event, KeyCode, KeyEventKind, KeyModifiers}, @@ -40,6 +40,38 @@ const SURFPOOL_LINK: &str = "Need help? https://docs.surfpool.run/tui"; const ITEM_HEIGHT: usize = 1; +/// Theme variants for the TUI +#[derive(Debug, Clone, Copy, PartialEq)] +enum Theme { + Classic, + Halloween, +} + +impl Theme { + /// Detect the current theme based on the date + fn detect() -> Self { + let now = Local::now(); + if now.month() == 10 && now.day() == 31 { + Theme::Halloween + } else { + Theme::Classic + } + } + + /// Get the color palette for this theme + fn palette(&self) -> &'static tailwind::Palette { + match self { + Theme::Classic => &palette::tailwind::EMERALD, + Theme::Halloween => &palette::tailwind::ORANGE, + } + } + + /// Get the slot symbol for this theme + fn slot_symbol(&self) -> &'static str { + "● " + } +} + // Terminal detection constants const MACOS_TERMINAL: &str = "Apple_Terminal"; /// XTerm-based terminals @@ -242,6 +274,7 @@ struct App { paused: bool, blink_state: bool, last_blink: Instant, + theme: Theme, } impl App { @@ -253,7 +286,8 @@ impl App { displayed_url: DisplayedUrl, breaker: Option, ) -> App { - let palette = palette::tailwind::EMERALD; + let theme = Theme::detect(); + let palette = theme.palette(); let mut events = vec![]; let (rpc_url, ws_url, datasource) = match &displayed_url { @@ -282,7 +316,7 @@ impl App { App { state: TableState::default().with_offset(0), scroll_state: ScrollbarState::new(0), - colors: ColorTheme::new(&palette), + colors: ColorTheme::new(palette), simnet_events_rx, simnet_commands_tx, clock: Clock::default(), @@ -304,6 +338,7 @@ impl App { paused: false, blink_state: false, last_blink: Instant::now(), + theme, } } @@ -882,6 +917,8 @@ fn render_slots(f: &mut Frame, app: &mut App, area: Rect) { let total_chars = line_len * 3; let cursor = app.slot() % total_chars; + let symbol = app.theme.slot_symbol(); + let mut lines = Vec::new(); for chunk in (0..total_chars).collect::>().chunks(line_len) { let mut spans = Vec::new(); @@ -895,7 +932,7 @@ fn render_slots(f: &mut Frame, app: &mut App, area: Rect) { } else { app.colors.dark_gray }; - spans.push(Span::styled("● ", color)); + spans.push(Span::styled(symbol, color)); } lines.push(Line::from(spans)); } From f23fe6a80fd19cfc6faa8470c7a02df087b6b19c Mon Sep 17 00:00:00 2001 From: Ludo Galabru Date: Tue, 28 Oct 2025 22:43:12 -0400 Subject: [PATCH 9/9] fix: potential deadlock --- crates/core/src/runloops/mod.rs | 3 +++ crates/core/src/surfnet/locker.rs | 8 +++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/crates/core/src/runloops/mod.rs b/crates/core/src/runloops/mod.rs index 77e8a1b5..06fa50c6 100644 --- a/crates/core/src/runloops/mod.rs +++ b/crates/core/src/runloops/mod.rs @@ -206,6 +206,9 @@ pub async fn start_block_production_runloop( // Send Pause to clock runloop let _ = clock_command_tx.send(ClockCommand::Pause); + // Give the clock time to process the pause command + tokio::time::sleep(tokio::time::Duration::from_millis(slot_time / 2)).await; + // Loop and check if the slot has stopped advancing let max_attempts = 10; let mut attempts = 0; diff --git a/crates/core/src/surfnet/locker.rs b/crates/core/src/surfnet/locker.rs index 00764f29..01c90cd9 100644 --- a/crates/core/src/surfnet/locker.rs +++ b/crates/core/src/surfnet/locker.rs @@ -2785,12 +2785,10 @@ impl SurfnetSvmLocker { &self, remote_ctx: &Option<(SurfnetRemoteClient, CommitmentConfig)>, ) -> SurfpoolResult<()> { - // First, confirm the block synchronously - self.with_svm_writer(|svm_writer| svm_writer.confirm_current_block())?; - - // Then, materialize any scheduled overrides for the new slot - // TODO: Pass remote client when available for fetch_before_use support + // Acquire write lock once and do both operations atomically + // This prevents lock contention and potential deadlocks from mixing blocking and async locks let mut svm_writer = self.0.write().await; + svm_writer.confirm_current_block()?; svm_writer.materialize_overrides(remote_ctx).await }