cleanup: leverage Itertools::try_collect() instead of turbofish

It still requires return type annotation in some places, but should be easier
to type than ::<Result<...

https://docs.rs/itertools/latest/itertools/trait.Itertools.html#method.try_collect
This commit is contained in:
Yuya Nishihara 2022-12-16 12:51:25 +09:00
parent daef30906f
commit 4e8f51a983
5 changed files with 20 additions and 21 deletions

View file

@ -98,7 +98,7 @@ impl RepoPath {
Component::Normal(a) => Ok(RepoPathComponent::from(a.to_str().unwrap())), Component::Normal(a) => Ok(RepoPathComponent::from(a.to_str().unwrap())),
_ => Err(FsPathParseError::InputNotInRepo(input.to_string())), _ => Err(FsPathParseError::InputNotInRepo(input.to_string())),
}) })
.collect::<Result<Vec<_>, _>>()?; .try_collect()?;
Ok(RepoPath::from_components(components)) Ok(RepoPath::from_components(components))
} }

View file

@ -800,10 +800,10 @@ fn parse_function_expression(
// Resolve arguments in the current scope, and pass them in to the alias // Resolve arguments in the current scope, and pass them in to the alias
// expansion scope. // expansion scope.
let arguments_span = arguments_pair.as_span(); let arguments_span = arguments_pair.as_span();
let args = arguments_pair let args: Vec<_> = arguments_pair
.into_inner() .into_inner()
.map(|arg| parse_expression_rule(arg.into_inner(), state)) .map(|arg| parse_expression_rule(arg.into_inner(), state))
.collect::<Result<Vec<_>, RevsetParseError>>()?; .try_collect()?;
if params.len() == args.len() { if params.len() == args.len() {
let locals = params.iter().map(|s| s.as_str()).zip(args).collect(); let locals = params.iter().map(|s| s.as_str()).zip(args).collect();
state.with_alias_expanding(id, &locals, primary_span, |state| { state.with_alias_expanding(id, &locals, primary_span, |state| {
@ -934,9 +934,9 @@ fn parse_builtin_function(
"file" => { "file" => {
if let Some(ctx) = state.workspace_ctx { if let Some(ctx) = state.workspace_ctx {
let arguments_span = arguments_pair.as_span(); let arguments_span = arguments_pair.as_span();
let paths = arguments_pair let paths: Vec<_> = arguments_pair
.into_inner() .into_inner()
.map(|arg| { .map(|arg| -> Result<_, RevsetParseError> {
let span = arg.as_span(); let span = arg.as_span();
let needle = parse_function_argument_to_string(name, arg, state)?; let needle = parse_function_argument_to_string(name, arg, state)?;
let path = RepoPath::parse_fs_path(ctx.cwd, ctx.workspace_root, &needle) let path = RepoPath::parse_fs_path(ctx.cwd, ctx.workspace_root, &needle)
@ -948,7 +948,7 @@ fn parse_builtin_function(
})?; })?;
Ok(path) Ok(path)
}) })
.collect::<Result<Vec<_>, RevsetParseError>>()?; .try_collect()?;
if paths.is_empty() { if paths.is_empty() {
Err(RevsetParseError::with_span( Err(RevsetParseError::with_span(
RevsetParseErrorKind::InvalidFunctionArguments { RevsetParseErrorKind::InvalidFunctionArguments {

View file

@ -563,10 +563,10 @@ impl WorkspaceCommandHelper {
Ok(Box::new(EverythingMatcher)) Ok(Box::new(EverythingMatcher))
} else { } else {
// TODO: Add support for globs and other formats // TODO: Add support for globs and other formats
let paths = values let paths: Vec<_> = values
.iter() .iter()
.map(|v| self.parse_file_path(v)) .map(|v| self.parse_file_path(v))
.collect::<Result<Vec<_>, _>>()?; .try_collect()?;
Ok(Box::new(PrefixMatcher::new(&paths))) Ok(Box::new(PrefixMatcher::new(&paths)))
} }
} }
@ -617,7 +617,7 @@ impl WorkspaceCommandHelper {
))), ))),
(Some(commit0), Some(commit1)) => { (Some(commit0), Some(commit1)) => {
let mut iter = [commit0, commit1].into_iter().chain(iter); let mut iter = [commit0, commit1].into_iter().chain(iter);
let commits = iter.by_ref().take(5).collect::<Result<Vec<_>, _>>()?; let commits: Vec<_> = iter.by_ref().take(5).try_collect()?;
let elided = iter.next().is_some(); let elided = iter.next().is_some();
let hint = format!( let hint = format!(
"The revset resolved to these revisions:\n{commits}{ellipsis}", "The revset resolved to these revisions:\n{commits}{ellipsis}",

View file

@ -27,7 +27,7 @@ use chrono::{FixedOffset, LocalResult, TimeZone, Utc};
use clap::builder::NonEmptyStringValueParser; use clap::builder::NonEmptyStringValueParser;
use clap::{ArgAction, ArgGroup, ArgMatches, CommandFactory, FromArgMatches, Subcommand}; use clap::{ArgAction, ArgGroup, ArgMatches, CommandFactory, FromArgMatches, Subcommand};
use itertools::Itertools; use itertools::Itertools;
use jujutsu_lib::backend::{BackendError, CommitId, Timestamp, TreeValue}; use jujutsu_lib::backend::{CommitId, Timestamp, TreeValue};
use jujutsu_lib::commit::Commit; use jujutsu_lib::commit::Commit;
use jujutsu_lib::commit_builder::CommitBuilder; use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::dag_walk::topo_order_reverse; use jujutsu_lib::dag_walk::topo_order_reverse;
@ -2702,18 +2702,18 @@ fn rebase_revision(
.parents() .parents()
.ancestors(), .ancestors(),
); );
let new_child_parents: Result<Vec<Commit>, BackendError> = workspace_command let new_child_parents: Vec<Commit> = workspace_command
.evaluate_revset(&new_child_parents_expression) .evaluate_revset(&new_child_parents_expression)
.unwrap() .unwrap()
.iter() .iter()
.commits(store) .commits(store)
.collect(); .try_collect()?;
rebase_commit( rebase_commit(
ui.settings(), ui.settings(),
tx.mut_repo(), tx.mut_repo(),
&child_commit, &child_commit,
&new_child_parents?, &new_child_parents,
); );
num_rebased_descendants += 1; num_rebased_descendants += 1;
} }
@ -3472,16 +3472,16 @@ fn cmd_sparse(ui: &mut Ui, command: &CommandHelper, args: &SparseArgs) -> Result
} }
} else { } else {
let mut workspace_command = command.workspace_helper(ui)?; let mut workspace_command = command.workspace_helper(ui)?;
let paths_to_add = args let paths_to_add: Vec<_> = args
.add .add
.iter() .iter()
.map(|v| workspace_command.parse_file_path(v)) .map(|v| workspace_command.parse_file_path(v))
.collect::<Result<Vec<_>, _>>()?; .try_collect()?;
let paths_to_remove = args let paths_to_remove: Vec<_> = args
.remove .remove
.iter() .iter()
.map(|v| workspace_command.parse_file_path(v)) .map(|v| workspace_command.parse_file_path(v))
.collect::<Result<Vec<_>, _>>()?; .try_collect()?;
let (mut locked_wc, _wc_commit) = workspace_command.start_working_copy_mutation()?; let (mut locked_wc, _wc_commit) = workspace_command.start_working_copy_mutation()?;
let mut new_patterns = HashSet::new(); let mut new_patterns = HashSet::new();
if args.reset { if args.reset {

View file

@ -212,9 +212,9 @@ pub fn run_mergetool(
// The default case below should never actually trigger, but we support it just in case // The default case below should never actually trigger, but we support it just in case
// resolving the root path ever makes sense. // resolving the root path ever makes sense.
.unwrap_or_default(); .unwrap_or_default();
let paths: Result<HashMap<&str, _>, ConflictResolveError> = files let paths: HashMap<&str, _> = files
.iter() .iter()
.map(|(role, contents)| { .map(|(role, contents)| -> Result<_, ConflictResolveError> {
let path = temp_dir.path().join(format!("{role}{suffix}")); let path = temp_dir.path().join(format!("{role}{suffix}"));
std::fs::write(&path, contents).map_err(ExternalToolError::SetUpDirError)?; std::fs::write(&path, contents).map_err(ExternalToolError::SetUpDirError)?;
if *role != "output" { if *role != "output" {
@ -223,8 +223,7 @@ pub fn run_mergetool(
} }
Ok((*role, path)) Ok((*role, path))
}) })
.collect(); .try_collect()?;
let paths = paths?;
let args = interpolate_mergetool_filename_patterns(&editor.merge_args, &paths); let args = interpolate_mergetool_filename_patterns(&editor.merge_args, &paths);
let args_str = args let args_str = args