Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,569 changes: 845 additions & 724 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ ignore = "0.4.20"
# Do not update due to https://github.com/console-rs/indicatif/issues/317 and https://github.com/getsentry/sentry-cli/pull/1055
indicatif = "0.14.0"
itertools = "0.10.5"
java-properties = "1.4.1"
java-properties = "2.0.0"
lazy_static = "1.4.0"
libc = "0.2.139"
log = { version = "0.4.17", features = ["std"] }
Expand Down
15 changes: 7 additions & 8 deletions src/api/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,12 +339,12 @@ impl Api {
};

let ref_name = format!("sentry-cli-{}-{arch}{EXT}", capitalize_string(PLATFORM));
info!("Looking for file named: {}", ref_name);
info!("Looking for file named: {ref_name}");

if resp.status() == 200 {
let info: RegistryRelease = resp.convert()?;
for (filename, _download_url) in info.file_urls {
info!("Found asset {}", filename);
info!("Found asset {filename}");
if filename == ref_name {
return Ok(Some(SentryCliRelease {
version: info.version,
Expand Down Expand Up @@ -1728,7 +1728,7 @@ impl ApiRequest {

match pipeline_env {
Some(env) => {
debug!("pipeline: {}", env);
debug!("pipeline: {env}");
headers
.append(&format!("User-Agent: sentry-cli/{VERSION} {env}"))
.ok();
Expand Down Expand Up @@ -1817,7 +1817,7 @@ impl ApiRequest {

/// enables or disables redirects. The default is off.
pub fn follow_location(mut self, val: bool) -> ApiResult<Self> {
debug!("follow redirects: {}", val);
debug!("follow redirects: {val}");
self.handle.follow_location(val)?;
Ok(self)
}
Expand Down Expand Up @@ -1877,8 +1877,7 @@ impl ApiRequest {
.expect("should not return None, as there is no max_elapsed_time");

debug!(
"retry number {}, retrying again in {} ms",
retry_number,
"retry number {retry_number}, retrying again in {} ms",
backoff_timeout.as_milliseconds()
);
std::thread::sleep(backoff_timeout);
Expand Down Expand Up @@ -1909,7 +1908,7 @@ impl ApiResponse {
pub fn into_result(self) -> ApiResult<Self> {
if let Some(ref body) = self.body {
let body = String::from_utf8_lossy(body);
debug!("body: {}", body);
debug!("body: {body}");
}
if self.ok() {
return Ok(self);
Expand Down Expand Up @@ -2046,7 +2045,7 @@ fn log_headers(is_response: bool, data: &[u8]) {
};
format!("{}: {} {info}", &caps[1], &caps[2])
});
debug!("{} {}", if is_response { ">" } else { "<" }, replaced);
debug!("{} {replaced}", if is_response { ">" } else { "<" });
}
}
}
Expand Down
28 changes: 11 additions & 17 deletions src/commands/build/upload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,14 +161,11 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
repo_ref
.and_then(|r| match git_repo_head_ref(r) {
Ok(ref_name) => {
debug!("Found current branch reference: {}", ref_name);
debug!("Found current branch reference: {ref_name}");
Some(ref_name)
}
Err(e) => {
debug!(
"No valid branch reference found (likely detached HEAD): {}",
e
);
debug!("No valid branch reference found (likely detached HEAD): {e}");
None
}
})
Expand All @@ -188,11 +185,11 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
repo_ref
.and_then(|r| match git_repo_base_ref(r, &cached_remote) {
Ok(base_ref_name) => {
debug!("Found base reference: {}", base_ref_name);
debug!("Found base reference: {base_ref_name}");
Some(base_ref_name)
}
Err(e) => {
info!("Could not detect base branch reference: {}", e);
info!("Could not detect base branch reference: {e}");
None
}
})
Expand All @@ -208,15 +205,15 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
repo_ref
.and_then(|r| match git_repo_base_repo_name_preserve_case(r) {
Ok(Some(base_repo_name)) => {
debug!("Found base repository name: {}", base_repo_name);
debug!("Found base repository name: {base_repo_name}");
Some(base_repo_name)
}
Ok(None) => {
debug!("No base repository found - not a fork");
None
}
Err(e) => {
warn!("Could not detect base repository name: {}", e);
warn!("Could not detect base repository name: {e}");
None
}
})
Expand All @@ -237,7 +234,7 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
.map(Cow::Borrowed)
.or_else(|| {
vcs::find_base_sha()
.inspect_err(|e| debug!("Error finding base SHA: {}", e))
.inspect_err(|e| debug!("Error finding base SHA: {e}"))
.ok()
.flatten()
.map(Cow::Owned)
Expand Down Expand Up @@ -326,7 +323,7 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
uploaded_paths_and_urls.push((path.to_path_buf(), artifact_url));
}
Err(e) => {
debug!("Failed to upload file at path {}: {}", path.display(), e);
debug!("Failed to upload file at path {}: {e}", path.display());
errored_paths_and_reasons.push((path.to_path_buf(), e));
}
}
Expand Down Expand Up @@ -446,7 +443,7 @@ fn normalize_file(path: &Path, bytes: &[u8]) -> Result<TempFile> {
.to_str()
.with_context(|| format!("Failed to get relative path for {}", path.display()))?;

debug!("Adding file to zip: {}", file_name);
debug!("Adding file to zip: {file_name}");

// Need to set the last modified time to a fixed value to ensure consistent checksums
// This is important as an optimization to avoid re-uploading the same chunks if they're already on the server
Expand Down Expand Up @@ -486,11 +483,8 @@ fn upload_file(
update to the latest version of Sentry to use the build upload command.";

debug!(
"Uploading file to organization: {}, project: {}, build_configuration: {}, vcs_info: {:?}",
org,
project,
"Uploading file to organization: {org}, project: {project}, build_configuration: {}, vcs_info: {vcs_info:?}",
build_configuration.unwrap_or("unknown"),
vcs_info,
);

let chunk_upload_options = api.get_chunk_upload_options(org)?.ok_or_else(|| {
Expand Down Expand Up @@ -569,7 +563,7 @@ fn upload_file(
// true for ChunkedFileState::NotFound.
if response.state == ChunkedFileState::Error {
let message = response.detail.as_deref().unwrap_or("unknown error");
bail!("Failed to process uploaded files: {}", message);
bail!("Failed to process uploaded files: {message}");
}

if let Some(artifact_url) = response.artifact_url {
Expand Down
11 changes: 3 additions & 8 deletions src/commands/dart_symbol_map/upload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@ pub(super) fn execute(args: DartSymbolMapUploadArgs) -> Result<()> {
ids.dedup();
match ids.len() {
0 => bail!(
"No debug identifier found in the provided debug file ({}). Ensure the file contains an embedded Debug ID.",
debug_file_path
"No debug identifier found in the provided debug file ({debug_file_path}). Ensure the file contains an embedded Debug ID."
),
1 => {
let debug_id = ids.remove(0);
Expand Down Expand Up @@ -153,10 +152,7 @@ pub(super) fn execute(args: DartSymbolMapUploadArgs) -> Result<()> {

if (mapping_len as u64) > effective_max_file_size {
bail!(
"The dartsymbolmap '{}' exceeds the maximum allowed size ({} bytes > {} bytes).",
mapping_path,
mapping_len,
effective_max_file_size
"The dartsymbolmap '{mapping_path}' exceeds the maximum allowed size ({mapping_len} bytes > {effective_max_file_size} bytes)."
);
}

Expand All @@ -172,8 +168,7 @@ pub(super) fn execute(args: DartSymbolMapUploadArgs) -> Result<()> {
Ok(())
}
_ => bail!(
"Multiple debug identifiers found in the provided debug file ({}): {}. Please provide a file that contains a single Debug ID.",
debug_file_path,
"Multiple debug identifiers found in the provided debug file ({debug_file_path}): {}. Please provide a file that contains a single Debug ID.",
ids.into_iter().map(|id| id.to_string()).collect::<Vec<_>>().join(", ")
),
}
Expand Down
2 changes: 1 addition & 1 deletion src/commands/debug_files/upload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
upload.filter_format(DifFormat::BcSymbolMap);
upload.filter_format(DifFormat::PList)
}
other => bail!("Unsupported type: {}", other),
other => bail!("Unsupported type: {other}"),
};
}

Expand Down
2 changes: 1 addition & 1 deletion src/commands/files/upload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {

if matches.get_flag("decompress") && is_gzip_compressed(&contents) {
contents = decompress_gzip_content(&contents).unwrap_or_else(|_| {
warn!("Could not decompress: {}", name);
warn!("Could not decompress: {name}");
contents
});
}
Expand Down
7 changes: 2 additions & 5 deletions src/commands/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,10 +288,7 @@ pub fn execute() -> Result<()> {
);
}

debug!(
"sentry-cli version: {}, platform: \"{}\", architecture: \"{}\"",
VERSION, PLATFORM, ARCH
);
debug!("sentry-cli version: {VERSION}, platform: \"{PLATFORM}\", architecture: \"{ARCH}\"");

info!(
"sentry-cli was invoked with the following command line: {}",
Expand Down Expand Up @@ -358,7 +355,7 @@ fn setup() {
set_logger(&Logger).unwrap();

if let Err(e) = load_dotenv_result {
log::warn!("Failed to load .env file: {}", e);
log::warn!("Failed to load .env file: {e}");
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/commands/organizations/list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub fn execute(_matches: &ArgMatches) -> Result<()> {
let regions = authenticated_api.list_available_regions()?;

let mut organizations: Vec<Organization> = vec![];
debug!("Available regions: {:?}", regions);
debug!("Available regions: {regions:?}");

// Self-hosted instances won't have a region instance or prefix, so we
// need to check before fanning out.
Expand Down
9 changes: 3 additions & 6 deletions src/commands/releases/set_commits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,18 +117,15 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {

if commit_spec.rev.len() > MAX_COMMIT_SHA_LENGTH {
bail!(
"Invalid commit SHA '{}'. Commit SHAs must be {} characters or less.",
commit_spec.rev,
MAX_COMMIT_SHA_LENGTH
"Invalid commit SHA '{}'. Commit SHAs must be {MAX_COMMIT_SHA_LENGTH} characters or less.",
commit_spec.rev
);
}

if let Some(ref prev_rev) = commit_spec.prev_rev {
if prev_rev.len() > MAX_COMMIT_SHA_LENGTH {
bail!(
"Invalid previous commit SHA '{}'. Commit SHAs must be {} characters or less.",
prev_rev,
MAX_COMMIT_SHA_LENGTH
"Invalid previous commit SHA '{prev_rev}'. Commit SHAs must be {MAX_COMMIT_SHA_LENGTH} characters or less."
);
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/commands/send_envelope.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
.collect();

if collected_paths.is_empty() {
warn!("Did not match any envelope files for pattern: {}", path);
warn!("Did not match any envelope files for pattern: {path}");
return Ok(());
}

Expand Down
2 changes: 1 addition & 1 deletion src/commands/send_event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ pub fn execute(matches: &ArgMatches) -> Result<()> {
.collect();

if collected_paths.is_empty() {
warn!("Did not match any .json files for pattern: {}", path);
warn!("Did not match any .json files for pattern: {path}");
return Ok(());
}

Expand Down
18 changes: 8 additions & 10 deletions src/commands/sourcemaps/explain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,19 +108,19 @@ fn extract_nth_frame(stacktrace: &Stacktrace, position: usize) -> Result<&Frame>

let frame = in_app_frames
.get(position)
.ok_or_else(|| format_err!("Selected frame ({}) is missing.", position))?;
.ok_or_else(|| format_err!("Selected frame ({position}) is missing."))?;

let abs_path = frame
.abs_path
.as_ref()
.ok_or_else(|| format_err!("Selected frame ({}) is missing an abs_path", position))?;
.ok_or_else(|| format_err!("Selected frame ({position}) is missing an abs_path"))?;

if let Ok(abs_path) = Url::parse(abs_path) {
if Path::new(abs_path.path()).extension().is_none() {
bail!("Selected frame ({}) of event exception originates from the <script> tag, its not possible to resolve source maps", position);
bail!("Selected frame ({position}) of event exception originates from the <script> tag, its not possible to resolve source maps");
}
} else {
bail!("Event exception stacktrace selected frame ({}) has incorrect abs_path (valid url is required). Found {}", position, abs_path);
bail!("Event exception stacktrace selected frame ({position}) has incorrect abs_path (valid url is required). Found {abs_path}");
}

Ok(frame)
Expand Down Expand Up @@ -210,10 +210,8 @@ fn fetch_release_artifact_file(
})
.map_err(|err| {
format_err!(
"Could not retrieve file {} from release {}: {:?}",
artifact.name,
release,
err
"Could not retrieve file {} from release {release}: {err:?}",
artifact.name
)
})?
}
Expand Down Expand Up @@ -290,7 +288,7 @@ fn print_sourcemap(file: &TempFile, line: u32, column: u32) -> Result<()> {
} else if token.get_source_view().is_none() {
bail!("cannot find source");
} else {
bail!("cannot find source for line {} column {}", line, column);
bail!("cannot find source for line {line} column {column}");
}
} else {
bail!("invalid sourcemap location");
Expand Down Expand Up @@ -363,7 +361,7 @@ fn unify_artifact_url(abs_path: &str) -> Result<String> {
Err(_) => {
let base = Url::parse("http://example.com").unwrap();
base.join(abs_path)
.map_err(|_| format_err!("Cannot parse source map url {}", abs_path))
.map_err(|_| format_err!("Cannot parse source map url {abs_path}"))
}
}?;
let mut filename = String::from("~");
Expand Down
2 changes: 1 addition & 1 deletion src/commands/sourcemaps/upload.rs
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ fn process_sources_from_bundle(
if !prefixes.contains(&"~") {
prefixes.push("~");
}
debug!("Prefixes: {:?}", prefixes);
debug!("Prefixes: {prefixes:?}");

processor.rewrite(&prefixes)?;
processor.add_sourcemap_references();
Expand Down
Loading