Skip to content

Commit

Permalink
refactor(update_metadata) retry flow and async (samuelvanderwaal#146)
Browse files Browse the repository at this point in the history
* refactor(update_metadata) move into dir with submodules

* add update creators-all handler

* refactor: create Action trait

* chore: run cargo fmt

* chore: clean up
  • Loading branch information
samuelvanderwaal authored Jul 10, 2022
1 parent e6fc941 commit 59ee247
Show file tree
Hide file tree
Showing 20 changed files with 1,136 additions and 19 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ license = "Apache-2.0"

[dependencies]
anyhow = "1.0.44"
async-trait = "0.1.56"
base64 = "0.13.0"
borsh = "0.9.1"
bs58 = "0.4.0"
Expand All @@ -34,14 +35,14 @@ serde_yaml = "0.8.21"
shellexpand = "2.1.0"
solana-client = "1.10.17"
solana-account-decoder = "1.10.17"
solana-sdk = "1.10.17"
solana-logger = "1.11.1"
solana-program = "1.10.17"
solana-sdk = "1.10.17"
spl-associated-token-account = "1.0.5"
spl-token = "3.2.0"
structopt = "0.3.23"
thiserror = "1.0.30"
tokio = "1.14.1"
solana-logger = "1.11.1"

[features]

Expand Down
188 changes: 188 additions & 0 deletions src/cache/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
use anyhow::{anyhow, Result as AnyResult};
use async_trait::async_trait;
use indexmap::IndexMap;
use log::info;
use serde::{Deserialize, Serialize};
use solana_client::rpc_client::RpcClient;
use solana_sdk::signature::Keypair;
use std::fs::{File, OpenOptions};
use std::path::Path;
use std::sync::Arc;
use std::{io::Write, ops::Deref};

use crate::errors::ActionError;
use crate::spinner::create_alt_spinner;

#[derive(Debug, Deserialize, Serialize)]
pub struct Cache(pub IndexMap<String, CacheItem>);
pub type CacheResults = Vec<Result<(), ActionError>>;

impl Default for Cache {
fn default() -> Self {
Self::new()
}
}

impl Deref for Cache {
type Target = IndexMap<String, CacheItem>;

fn deref(&self) -> &Self::Target {
&self.0
}
}

impl Cache {
pub fn new() -> Self {
Cache(IndexMap::new())
}

pub fn write<W: Write>(self, writer: W) -> AnyResult<()> {
serde_json::to_writer(writer, &self)?;
Ok(())
}

pub fn update_errors(&mut self, errors: Vec<Result<(), ActionError>>) {
let errors = errors.iter().map(|r| r.as_ref()).map(Result::unwrap_err);

// Clear out old errors.
self.0.clear();

for error in errors {
match error {
ActionError::ActionFailed(mint_address, _) => {
let item = CacheItem {
error: Some(error.to_string()),
};

self.0.insert(mint_address.to_string(), item);
}
}
}
}
}

#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct CacheItem {
pub error: Option<String>,
}

pub struct BatchActionArgs {
pub client: RpcClient,
pub keypair: Keypair,
pub mint_list: Option<String>,
pub cache_file: Option<String>,
pub new_value: String,
pub retries: u8,
}

pub struct RunActionArgs {
pub client: Arc<RpcClient>,
pub keypair: Arc<Keypair>,
pub mint_account: String,
pub new_value: String,
}

#[async_trait]
pub trait Action {
async fn action(args: RunActionArgs) -> Result<(), ActionError>;

async fn run(args: BatchActionArgs) -> AnyResult<()> {
if args.cache_file.is_some() && args.mint_list.is_some() {
return Err(anyhow!(
"Can only specify either a cache or a mint_list file."
));
}

// Default name, if we don't get an output_file option or a cache file.
let mut cache_file_name = String::from("mb-cache-update.json");
let mut cache = Cache::new();

let mut mint_list: Vec<String> = if let Some(mint_list) = args.mint_list {
let f = File::open(mint_list)?;
serde_json::from_reader(f)?
} else if let Some(cache_path) = args.cache_file {
println!("Retrying items from cache file. . .");
cache_file_name = cache_path;

let f = File::open(&cache_file_name)?;
let cache: Cache = serde_json::from_reader(f)?;
cache.0.keys().map(|k| k.to_string()).collect()
} else {
return Err(anyhow!(
"Please specify either a n mint_list file or a cache file."
));
};

let f = if !Path::new(&cache_file_name).exists() {
File::create(&cache_file_name)?
} else {
OpenOptions::new()
.read(true)
.write(true)
.truncate(true)
.open(&cache_file_name)?
};

let mut counter = 0u8;
let client = Arc::new(args.client);

let keypair = Arc::new(args.keypair);

loop {
let remaining_mints = mint_list.clone();

info!("Sending network requests...");
let spinner = create_alt_spinner("Sending network requests....");
// Create a vector of futures to execute.
let update_tasks: Vec<_> = remaining_mints
.into_iter()
.map(|mint_address| {
tokio::spawn(Self::action(RunActionArgs {
client: client.clone(),
keypair: keypair.clone(),
mint_account: mint_address,
new_value: args.new_value.clone(),
}))
})
.collect();
spinner.finish();

let update_tasks_len = update_tasks.len();

// Wait for all the tasks to resolve and push the results to our results vector
let mut update_results = Vec::new();
let spinner = create_alt_spinner("Awaiting results...");
for task in update_tasks {
update_results.push(task.await.unwrap());
}
spinner.finish();

// Partition migration results.
let (_update_successful, update_failed): (CacheResults, CacheResults) =
update_results.into_iter().partition(Result::is_ok);

// If some of the migrations failed, ask user if they wish to retry and the loop starts again.
// Otherwise, break out of the loop and write the cache to disk.
if !update_failed.is_empty() && counter < args.retries {
counter += 1;
println!(
"{}/{} updates failed. Retrying. . .",
&update_failed.len(),
update_tasks_len
);
cache.update_errors(update_failed);
mint_list = cache.keys().map(|m| m.to_string()).collect();
} else if update_failed.is_empty() {
// None failed so we exit the loop.
println!("All actions successfully run!");
break;
} else {
println!("Reached max retries. Writing remaining items to cache.");
cache.write(f)?;
break;
}
}

Ok(())
}
}
12 changes: 12 additions & 0 deletions src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,18 @@ pub enum MigrateError {
MigrationFailed(MintAddress, NetworkError),
}

#[derive(Error, Debug)]
pub enum UpdateError {
#[error("Action failed with error: {1}")]
UpdateFailed(MintAddress, NetworkError),
}

#[derive(Error, Debug)]
pub enum ActionError {
#[error("Action failed with error: {1}")]
ActionFailed(MintAddress, NetworkError),
}

#[derive(Error, Debug)]
pub enum SolConfigError {
#[error("no home env var found")]
Expand Down
3 changes: 2 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod burn;
pub mod cache;
pub mod collections;
pub mod constants;
pub mod data;
Expand All @@ -15,7 +16,7 @@ pub mod sign;
pub mod snapshot;
pub mod spinner;
pub mod theindexio;
pub mod update_metadata;
pub mod update;
pub mod uses;
pub mod utils;
pub mod wtf_errors;
11 changes: 7 additions & 4 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ use metaboss::process_subcommands::*;
async fn main() -> Result<()> {
let options = Opt::from_args();

solana_logger::setup_with_default("solana=off");
let log_level = format!("solana={}", options.log_level);
solana_logger::setup_with_default(&log_level);

let sol_config = parse_solana_config();

Expand All @@ -28,10 +29,10 @@ async fn main() -> Result<()> {
(config.json_rpc_url, config.commitment)
} else {
info!(
"Could not find a valid Solana-CLI config file. Defaulting to https://psytrbhymqlkfrhudd.dev.genesysgo.net:8899/ devnet node."
"Could not find a valid Solana-CLI config file. Defaulting to https://devnet.genesysgo.net devnet node."
);
(
String::from("https://psytrbhymqlkfrhudd.dev.genesysgo.net:8899/"),
String::from("https://devnet.genesysgo.net"),
String::from("confirmed"),
)
};
Expand Down Expand Up @@ -65,7 +66,9 @@ async fn main() -> Result<()> {
Command::Derive { derive_subcommands } => process_derive(derive_subcommands),
Command::Find { find_subcommands } => process_find(&client, find_subcommands)?,
Command::Mint { mint_subcommands } => process_mint(&client, mint_subcommands)?,
Command::Update { update_subcommands } => process_update(&client, update_subcommands)?,
Command::Update { update_subcommands } => {
process_update(client, update_subcommands).await?
}
Command::Set { set_subcommands } => process_set(&client, set_subcommands)?,
Command::Sign { sign_subcommands } => process_sign(&client, sign_subcommands)?,
Command::Snapshot {
Expand Down
2 changes: 1 addition & 1 deletion src/mint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ pub fn mint_from_uris(
sign,
) {
Ok(_) => (),
Err(e) => error!("Failed to mint {:?}: {}", &uri, e),
Err(e) => println!("Failed to mint {:?}: {}", &uri, e),
}
});
} else {
Expand Down
29 changes: 28 additions & 1 deletion src/opt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub struct Opt {
pub timeout: u64,

/// Log level
#[structopt(short, long, global = true, default_value = "warn")]
#[structopt(short, long, global = true, default_value = "off")]
pub log_level: String,

#[structopt(subcommand)]
Expand Down Expand Up @@ -816,6 +816,33 @@ pub enum UpdateSubcommands {
#[structopt(short = "A", long = "append")]
append: bool,
},
/// Update all the creators fields for a list of mints
#[structopt(name = "creators-all")]
CreatorsAll {
/// Path to the creator's keypair file
#[structopt(short, long)]
keypair: Option<String>,

/// Mint list
#[structopt(short = "L", long)]
mint_list: Option<String>,

/// Cache file
#[structopt(short, long)]
cache_file: Option<String>,

/// New creators in the format: address1:share:verified,address2:share:verified,...
#[structopt(short = "n", long)]
new_creators: String,

/// Should be appended instead of overwriting
#[structopt(short = "A", long = "append")]
append: bool,

/// Maximum retries: retry failed items up to this many times.
#[structopt(long, default_value = "1")]
retries: u8,
},
/// Update the data struct on a NFT
#[structopt(name = "data")]
Data {
Expand Down
Loading

0 comments on commit 59ee247

Please sign in to comment.