Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ juniper_graphql_ws = { version = "0.4.0", default-features = false }
lazy_static = "1.5.0"
libloading = "0.7.4"
litesvm = { version = "0.8.1", features = ["nodejs-internal"] }
litesvm-token = "0.8.1"
log = "0.4.27"
mime_guess = { version = "2.0.4", default-features = false }
mustache = "0.9.0"
Expand Down Expand Up @@ -137,6 +138,7 @@ toml = { version = "0.8.23", default-features = false }
tracing = { version = "0.1.41", default-features = false }
url = { version = "1.7.2", default-features = false }
uuid = "1.15.1"
walkdir = "2.3.3"
zip = { version = "0.6", features = ["deflate"], default-features = false }

surfpool-core = { path = "crates/core", default-features = false }
Expand Down
1 change: 1 addition & 0 deletions crates/cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ solana-transaction = { workspace = true }
tokio = { workspace = true }
toml = { workspace = true, optional = true }
url = { workspace = true }
walkdir = { workspace = true }

surfpool-core = { workspace = true }
surfpool-gql = { workspace = true }
Expand Down
7 changes: 3 additions & 4 deletions crates/cli/src/cli/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,10 +224,9 @@ pub struct StartSimnet {
/// Start surfpool with some CI adequate settings (eg. surfpool start --ci)
#[clap(long = "ci", action=ArgAction::SetTrue, default_value = "false")]
pub ci: bool,
/// Apply suggested defaults for runbook generation and execution.
/// This includes executing any deployment runbooks, and generating in-memory deployment runbooks if none exist. (eg. surfpool start --autopilot)
#[clap(long = "autopilot", action=ArgAction::SetTrue, default_value = "false")]
pub autopilot: bool,
/// Apply suggested defaults for runbook generation and execution when running as part of an anchor test suite (eg. surfpool start --legacy-anchor-compatibility)
#[clap(long = "legacy-anchor-compatibility", action=ArgAction::SetTrue, default_value = "false")]
pub anchor_compat: bool,
}

#[derive(clap::ValueEnum, PartialEq, Clone, Debug)]
Expand Down
12 changes: 8 additions & 4 deletions crates/cli/src/cli/simnet/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -431,14 +431,14 @@ async fn write_and_execute_iac(
.map_err(|e| format!("Failed to detect project framework: {}", e))?;

let (progress_tx, progress_rx) = crossbeam::channel::unbounded();
if let Some((framework, programs)) = deployment {
if let Some((framework, programs, genesis_accounts)) = deployment {
// Is infrastructure-as-code (IaC) already setup?
let base_location =
FileLocation::from_path_string(&cmd.manifest_path)?.get_parent_location()?;
let mut txtx_manifest_location = base_location.clone();
txtx_manifest_location.append_path("txtx.yml")?;
let txtx_manifest_exists = txtx_manifest_location.exists();
let do_write_scaffold = !cmd.autopilot && !txtx_manifest_exists;
let do_write_scaffold = !cmd.anchor_compat && !txtx_manifest_exists;
if do_write_scaffold {
// Scaffold IaC
scaffold_iac_layout(
Expand All @@ -452,12 +452,16 @@ async fn write_and_execute_iac(
// If there were existing on-disk runbooks, we'll execute those instead of in-memory ones
// If there were no existing runbooks and the user requested autopilot, we'll generate and execute in-memory runbooks
// If there were no existing runbooks and the user did not request autopilot, we'll generate and execute on-disk runbooks
let do_execute_in_memory_runbooks = cmd.autopilot && !txtx_manifest_exists;
let do_execute_in_memory_runbooks = cmd.anchor_compat && !txtx_manifest_exists;

let mut on_disk_runbook_data = None;
let mut in_memory_runbook_data = None;
if do_execute_in_memory_runbooks {
in_memory_runbook_data = Some(scaffold_in_memory_iac(&framework, &programs)?);
in_memory_runbook_data = Some(scaffold_in_memory_iac(
&framework,
&programs,
&genesis_accounts,
)?);
} else {
let runbooks_ids_to_execute = cmd.runbooks.clone();
on_disk_runbook_data = Some((txtx_manifest_location.clone(), runbooks_ids_to_execute));
Expand Down
216 changes: 212 additions & 4 deletions crates/cli/src/scaffold/anchor.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,23 @@
#![allow(dead_code)]

use std::{collections::BTreeMap, str::FromStr};
use std::{
collections::{BTreeMap, HashMap},
path::{Path, PathBuf},
str::FromStr,
};

use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize};
use txtx_core::kit::helpers::fs::FileLocation;
use url::Url;
use walkdir::WalkDir;

use super::ProgramMetadata;
use crate::types::Framework;
use crate::{scaffold::GenesisEntry, types::Framework};

pub fn try_get_programs_from_project(
base_location: FileLocation,
) -> Result<Option<(Framework, Vec<ProgramMetadata>)>, String> {
) -> Result<Option<(Framework, Vec<ProgramMetadata>, Option<Vec<GenesisEntry>>)>, String> {
let mut manifest_location = base_location.clone();
manifest_location.append_path("Anchor.toml")?;
if manifest_location.exists() {
Expand All @@ -30,8 +35,38 @@ pub fn try_get_programs_from_project(
programs.push(ProgramMetadata::new(program_name, &deployment.idl));
}
}
let mut genesis_entries = manifest
.test
.as_ref()
.and_then(|test| test.genesis.as_ref())
.cloned()
.unwrap_or_default();
if let Some(test_configs) = TestConfig::discover_test_toml(&base_location.expect_path_buf())
.map_err(|e| {
format!(
"failed to discover Test.toml files in workspace: {}",
e.to_string()
)
})?
{
for (_, config) in test_configs.test_suite_configs.iter() {
if let Some(test_config) = config.test.as_ref() {
if let Some(genesis) = test_config.genesis.as_ref() {
genesis_entries.extend(genesis.clone());
}
}
}
}

Ok(Some((Framework::Anchor, programs)))
Ok(Some((
Framework::Anchor,
programs,
if genesis_entries.is_empty() {
None
} else {
Some(genesis_entries)
},
)))
} else {
Ok(None)
}
Expand All @@ -46,6 +81,7 @@ pub struct AnchorManifest {
pub programs: ProgramsConfig,
pub scripts: ScriptsConfig,
pub workspace: WorkspaceConfig,
pub test: Option<TestValidatorConfig>,
}

#[derive(Debug, Deserialize)]
Expand All @@ -57,6 +93,7 @@ pub struct AnchorManifestFile {
// provider: Provider,
workspace: Option<WorkspaceConfig>,
scripts: Option<ScriptsConfig>,
test: Option<TestValidatorConfig>,
}

impl AnchorManifest {
Expand All @@ -72,6 +109,7 @@ impl AnchorManifest {
.programs
.map_or(Ok(BTreeMap::new()), |p| deser_programs(p, base_location))?,
workspace: cfg.workspace.unwrap_or_default(),
test: cfg.test,
})
}
}
Expand Down Expand Up @@ -122,6 +160,170 @@ impl Default for RegistryConfig {
}
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestTomlFile {
pub extends: Option<Vec<String>>,
pub test: Option<TestValidatorConfig>,
pub scripts: Option<ScriptsConfig>,
}

impl TestTomlFile {
fn from_path(path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
let s = std::fs::read_to_string(&path)?;
let parsed_toml: Self = toml::from_str(&s)?;
let mut current_toml = TestTomlFile {
extends: None,
test: None,
scripts: None,
};
if let Some(bases) = &parsed_toml.extends {
for base in bases {
let mut canonical_base = base.clone();
canonical_base = canonicalize_filepath_from_origin(&canonical_base, &path)?;
current_toml.merge(TestTomlFile::from_path(&canonical_base)?);
}
}
current_toml.merge(parsed_toml);

if let Some(test) = &mut current_toml.test {
if let Some(genesis_programs) = &mut test.genesis {
for entry in genesis_programs {
entry.program = canonicalize_filepath_from_origin(&entry.program, &path)?;
}
}
}
Ok(current_toml)
}
}

impl From<TestTomlFile> for TestToml {
fn from(value: TestTomlFile) -> Self {
Self {
test: value.test,
scripts: value.scripts.unwrap_or_default(),
}
}
}

impl TestTomlFile {
fn merge(&mut self, other: Self) {
let mut my_scripts = self.scripts.take();
match &mut my_scripts {
None => my_scripts = other.scripts,
Some(my_scripts) => {
if let Some(other_scripts) = other.scripts {
for (name, script) in other_scripts {
my_scripts.insert(name, script);
}
}
}
}

let mut my_test = self.test.take();
match &mut my_test {
Some(my_test) => {
if let Some(other_test) = other.test {
if let Some(other_genesis) = other_test.genesis {
match &mut my_test.genesis {
Some(my_genesis) => {
for other_entry in other_genesis {
match my_genesis
.iter()
.position(|g| *g.address == other_entry.address)
{
None => my_genesis.push(other_entry),
Some(i) => my_genesis[i] = other_entry,
}
}
}
None => my_test.genesis = Some(other_genesis),
}
}
}
}
None => my_test = other.test,
};

// Instantiating a new Self object here ensures that
// this function will fail to compile if new fields get added
// to Self. This is useful as a reminder if they also require merging
*self = Self {
test: my_test,
scripts: my_scripts,
extends: self.extends.take(),
};
}
}

fn canonicalize_filepath_from_origin(
file_path: impl AsRef<Path>,
origin: impl AsRef<Path>,
) -> Result<String> {
use anyhow::Context;
let previous_dir = std::env::current_dir()?;
std::env::set_current_dir(origin.as_ref().parent().unwrap())?;
let result = std::fs::canonicalize(&file_path)
.with_context(|| {
format!(
"Error reading (possibly relative) path: {}. If relative, this is the path that was used as the current path: {}",
&file_path.as_ref().display(),
&origin.as_ref().display()
)
})?
.display()
.to_string();
std::env::set_current_dir(previous_dir)?;
Ok(result)
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestToml {
#[serde(skip_serializing_if = "Option::is_none")]
pub test: Option<TestValidatorConfig>,
pub scripts: ScriptsConfig,
}
impl TestToml {
pub fn from_path(p: impl AsRef<Path>) -> Result<Self> {
TestTomlFile::from_path(&p).map(Into::into).map_err(|e| {
anyhow!(
"Unable to read Test.toml at {}: {}",
p.as_ref().display(),
e
)
})
}
}
#[derive(Debug, Clone)]
pub struct TestConfig {
pub test_suite_configs: HashMap<PathBuf, TestToml>,
}
fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| (s != "." && (s.starts_with('.') || s.starts_with("./."))) || s == "target")
.unwrap_or(false)
}
impl TestConfig {
pub fn discover_test_toml(root: impl AsRef<Path>) -> Result<Option<Self>> {
let walker = WalkDir::new(root).into_iter();
let mut test_suite_configs = HashMap::new();
for entry in walker.filter_entry(|e| !is_hidden(e)) {
let entry = entry?;
if entry.file_name() == "Test.toml" {
let entry_path = entry.path();
let test_toml = TestToml::from_path(entry_path)?;
test_suite_configs.insert(entry.path().into(), test_toml);
}
}

Ok(match test_suite_configs.is_empty() {
true => None,
false => Some(Self { test_suite_configs }),
})
}
}

#[derive(Debug, Default, Serialize, Deserialize)]
pub struct AnchorProgramDeployment {
pub address: String,
Expand Down Expand Up @@ -241,6 +443,12 @@ pub struct WorkspaceConfig {
pub types: String,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestValidatorConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub genesis: Option<Vec<GenesisEntry>>,
}

fn deser_programs(
programs: BTreeMap<String, BTreeMap<String, serde_json::Value>>,
base_location: &FileLocation,
Expand Down
Loading