2022-11-26 23:57:50 +00:00
|
|
|
// Copyright 2022 The Jujutsu Authors
|
2022-09-22 04:44:46 +00:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// https://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
use std::collections::{HashSet, VecDeque};
|
2023-01-04 08:18:45 +00:00
|
|
|
use std::env::{self, ArgsOs, VarError};
|
2022-11-28 14:32:44 +00:00
|
|
|
use std::ffi::{OsStr, OsString};
|
2022-09-22 04:44:46 +00:00
|
|
|
use std::fmt::Debug;
|
2022-11-25 09:46:30 +00:00
|
|
|
use std::iter;
|
2022-11-28 14:32:44 +00:00
|
|
|
use std::ops::Deref;
|
2022-12-22 03:58:06 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2022-11-01 07:14:21 +00:00
|
|
|
use std::rc::Rc;
|
2022-09-22 04:44:46 +00:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
2022-10-31 17:49:53 +00:00
|
|
|
use clap;
|
2022-11-28 14:32:44 +00:00
|
|
|
use clap::builder::{NonEmptyStringValueParser, TypedValueParser, ValueParserFactory};
|
2022-11-22 04:46:50 +00:00
|
|
|
use clap::{Arg, ArgAction, ArgMatches, Command, Error, FromArgMatches};
|
2022-09-22 04:44:46 +00:00
|
|
|
use git2::{Oid, Repository};
|
|
|
|
use itertools::Itertools;
|
2023-01-11 18:30:31 +00:00
|
|
|
use jujutsu_lib::backend::{BackendError, ChangeId, CommitId, ObjectId, TreeId};
|
2022-09-22 04:44:46 +00:00
|
|
|
use jujutsu_lib::commit::Commit;
|
|
|
|
use jujutsu_lib::git::{GitExportError, GitImportError};
|
|
|
|
use jujutsu_lib::gitignore::GitIgnoreFile;
|
|
|
|
use jujutsu_lib::matchers::{EverythingMatcher, Matcher, PrefixMatcher, Visit};
|
|
|
|
use jujutsu_lib::op_heads_store::{OpHeadResolutionError, OpHeads, OpHeadsStore};
|
|
|
|
use jujutsu_lib::op_store::{OpStore, OpStoreError, OperationId, WorkspaceId};
|
|
|
|
use jujutsu_lib::operation::Operation;
|
2022-12-14 18:08:31 +00:00
|
|
|
use jujutsu_lib::repo::{MutableRepo, ReadonlyRepo, RepoRef, RewriteRootCommit, StoreFactories};
|
2022-10-21 04:50:03 +00:00
|
|
|
use jujutsu_lib::repo_path::{FsPathParseError, RepoPath};
|
2022-10-23 03:41:50 +00:00
|
|
|
use jujutsu_lib::revset::{
|
2022-11-25 03:53:01 +00:00
|
|
|
Revset, RevsetAliasesMap, RevsetError, RevsetExpression, RevsetParseError,
|
|
|
|
RevsetWorkspaceContext,
|
2022-10-23 03:41:50 +00:00
|
|
|
};
|
2022-09-22 04:44:46 +00:00
|
|
|
use jujutsu_lib::settings::UserSettings;
|
|
|
|
use jujutsu_lib::transaction::Transaction;
|
|
|
|
use jujutsu_lib::tree::{Tree, TreeMergeError};
|
|
|
|
use jujutsu_lib::working_copy::{
|
|
|
|
CheckoutStats, LockedWorkingCopy, ResetError, SnapshotError, WorkingCopy,
|
|
|
|
};
|
2023-01-02 05:18:38 +00:00
|
|
|
use jujutsu_lib::workspace::{Workspace, WorkspaceInitError, WorkspaceLoadError, WorkspaceLoader};
|
2022-10-21 04:08:23 +00:00
|
|
|
use jujutsu_lib::{dag_walk, file_util, git, revset};
|
2022-10-02 18:59:26 +00:00
|
|
|
use thiserror::Error;
|
2023-01-03 07:24:44 +00:00
|
|
|
use tracing_subscriber::prelude::*;
|
2022-09-22 04:44:46 +00:00
|
|
|
|
2023-01-05 04:55:20 +00:00
|
|
|
use crate::config::{FullCommandArgs, LayeredConfigs};
|
2022-10-06 10:20:51 +00:00
|
|
|
use crate::formatter::Formatter;
|
2022-12-18 04:00:07 +00:00
|
|
|
use crate::merge_tools::{ConflictResolveError, DiffEditError};
|
2022-10-06 10:20:51 +00:00
|
|
|
use crate::templater::TemplateFormatter;
|
2022-10-21 04:50:03 +00:00
|
|
|
use crate::ui::{ColorChoice, Ui};
|
2022-09-22 04:44:46 +00:00
|
|
|
|
2023-01-10 00:42:48 +00:00
|
|
|
#[derive(Clone, Debug)]
|
2022-09-22 04:44:46 +00:00
|
|
|
pub enum CommandError {
|
2022-11-12 23:28:32 +00:00
|
|
|
UserError {
|
|
|
|
message: String,
|
|
|
|
hint: Option<String>,
|
|
|
|
},
|
2022-09-24 14:15:23 +00:00
|
|
|
ConfigError(String),
|
2022-09-22 04:44:46 +00:00
|
|
|
/// Invalid command line
|
|
|
|
CliError(String),
|
2022-10-31 17:49:53 +00:00
|
|
|
/// Invalid command line detected by clap
|
2023-01-10 00:42:48 +00:00
|
|
|
ClapCliError(Arc<clap::Error>),
|
2022-09-22 04:44:46 +00:00
|
|
|
BrokenPipe,
|
|
|
|
InternalError(String),
|
|
|
|
}
|
|
|
|
|
2022-11-12 22:38:43 +00:00
|
|
|
pub fn user_error(message: impl Into<String>) -> CommandError {
|
2022-11-12 23:28:32 +00:00
|
|
|
CommandError::UserError {
|
|
|
|
message: message.into(),
|
|
|
|
hint: None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
pub fn user_error_with_hint(message: impl Into<String>, hint: impl Into<String>) -> CommandError {
|
|
|
|
CommandError::UserError {
|
|
|
|
message: message.into(),
|
|
|
|
hint: Some(hint.into()),
|
|
|
|
}
|
2022-11-12 22:38:43 +00:00
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
impl From<std::io::Error> for CommandError {
|
|
|
|
fn from(err: std::io::Error) -> Self {
|
|
|
|
if err.kind() == std::io::ErrorKind::BrokenPipe {
|
|
|
|
CommandError::BrokenPipe
|
|
|
|
} else {
|
|
|
|
// TODO: Record the error as a chained cause
|
|
|
|
CommandError::InternalError(format!("I/O error: {err}"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<config::ConfigError> for CommandError {
|
|
|
|
fn from(err: config::ConfigError) -> Self {
|
2022-09-24 14:15:23 +00:00
|
|
|
CommandError::ConfigError(err.to_string())
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-01 07:29:46 +00:00
|
|
|
impl From<crate::config::ConfigError> for CommandError {
|
|
|
|
fn from(err: crate::config::ConfigError) -> Self {
|
|
|
|
CommandError::ConfigError(err.to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-20 23:33:14 +00:00
|
|
|
impl From<RewriteRootCommit> for CommandError {
|
|
|
|
fn from(err: RewriteRootCommit) -> Self {
|
2022-11-12 22:38:43 +00:00
|
|
|
user_error(err.to_string())
|
2022-10-20 23:33:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
impl From<BackendError> for CommandError {
|
|
|
|
fn from(err: BackendError) -> Self {
|
2022-12-31 08:14:51 +00:00
|
|
|
user_error(format!("Unexpected error from backend: {err}"))
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<WorkspaceInitError> for CommandError {
|
|
|
|
fn from(_: WorkspaceInitError) -> Self {
|
2022-11-12 22:38:43 +00:00
|
|
|
user_error("The target repo already exists")
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<OpHeadResolutionError> for CommandError {
|
|
|
|
fn from(err: OpHeadResolutionError) -> Self {
|
|
|
|
match err {
|
2022-12-31 07:52:16 +00:00
|
|
|
OpHeadResolutionError::NoHeads => CommandError::InternalError(
|
|
|
|
"Corrupt repository: there are no operations".to_string(),
|
|
|
|
),
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<SnapshotError> for CommandError {
|
|
|
|
fn from(err: SnapshotError) -> Self {
|
|
|
|
CommandError::InternalError(format!("Failed to snapshot the working copy: {err}"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<TreeMergeError> for CommandError {
|
|
|
|
fn from(err: TreeMergeError) -> Self {
|
|
|
|
CommandError::InternalError(format!("Merge failed: {err}"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<ResetError> for CommandError {
|
|
|
|
fn from(_: ResetError) -> Self {
|
|
|
|
CommandError::InternalError("Failed to reset the working copy".to_string())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<DiffEditError> for CommandError {
|
|
|
|
fn from(err: DiffEditError) -> Self {
|
2022-11-12 22:38:43 +00:00
|
|
|
user_error(format!("Failed to edit diff: {err}"))
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-28 03:30:44 +00:00
|
|
|
impl From<ConflictResolveError> for CommandError {
|
|
|
|
fn from(err: ConflictResolveError) -> Self {
|
|
|
|
user_error(format!("Failed to use external tool to resolve: {err}"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
impl From<git2::Error> for CommandError {
|
|
|
|
fn from(err: git2::Error) -> Self {
|
2022-11-12 22:38:43 +00:00
|
|
|
user_error(format!("Git operation failed: {err}"))
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<GitImportError> for CommandError {
|
|
|
|
fn from(err: GitImportError) -> Self {
|
|
|
|
CommandError::InternalError(format!(
|
|
|
|
"Failed to import refs from underlying Git repo: {err}"
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<GitExportError> for CommandError {
|
|
|
|
fn from(err: GitExportError) -> Self {
|
2022-10-31 16:10:22 +00:00
|
|
|
CommandError::InternalError(format!(
|
|
|
|
"Failed to export refs to underlying Git repo: {err}"
|
|
|
|
))
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<RevsetParseError> for CommandError {
|
|
|
|
fn from(err: RevsetParseError) -> Self {
|
2022-11-25 09:46:30 +00:00
|
|
|
let message = iter::successors(Some(&err), |e| e.origin()).join("\n");
|
|
|
|
user_error(format!("Failed to parse revset: {message}"))
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<RevsetError> for CommandError {
|
|
|
|
fn from(err: RevsetError) -> Self {
|
2022-11-12 22:38:43 +00:00
|
|
|
user_error(format!("{err}"))
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-21 04:50:03 +00:00
|
|
|
impl From<FsPathParseError> for CommandError {
|
|
|
|
fn from(err: FsPathParseError) -> Self {
|
2022-11-12 22:38:43 +00:00
|
|
|
user_error(format!("{err}"))
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-22 23:58:04 +00:00
|
|
|
impl From<glob::PatternError> for CommandError {
|
|
|
|
fn from(err: glob::PatternError) -> Self {
|
|
|
|
user_error(format!("Failed to compile glob: {err}"))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-31 17:49:53 +00:00
|
|
|
impl From<clap::Error> for CommandError {
|
|
|
|
fn from(err: clap::Error) -> Self {
|
2023-01-10 00:42:48 +00:00
|
|
|
CommandError::ClapCliError(Arc::new(err))
|
2022-10-31 17:49:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-03 07:24:44 +00:00
|
|
|
/// Handle to initialize or change tracing subscription.
|
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct TracingSubscription {
|
|
|
|
reload_log_filter: tracing_subscriber::reload::Handle<
|
|
|
|
tracing_subscriber::EnvFilter,
|
|
|
|
tracing_subscriber::Registry,
|
|
|
|
>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TracingSubscription {
|
|
|
|
/// Initializes tracing with the default configuration. This should be
|
|
|
|
/// called as early as possible.
|
|
|
|
pub fn init() -> Self {
|
|
|
|
let filter = tracing_subscriber::EnvFilter::builder()
|
|
|
|
.with_default_directive(tracing::metadata::LevelFilter::INFO.into())
|
|
|
|
.from_env_lossy();
|
|
|
|
let (filter, reload_log_filter) = tracing_subscriber::reload::Layer::new(filter);
|
|
|
|
tracing_subscriber::registry()
|
|
|
|
.with(filter)
|
|
|
|
.with(tracing_subscriber::fmt::Layer::default().with_writer(std::io::stderr))
|
|
|
|
.init();
|
|
|
|
TracingSubscription { reload_log_filter }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn enable_verbose_logging(&self) -> Result<(), CommandError> {
|
|
|
|
self.reload_log_filter
|
|
|
|
.modify(|filter| {
|
|
|
|
*filter = tracing_subscriber::EnvFilter::builder()
|
|
|
|
.with_default_directive(tracing::metadata::LevelFilter::DEBUG.into())
|
|
|
|
.from_env_lossy()
|
|
|
|
})
|
|
|
|
.map_err(|err| {
|
|
|
|
CommandError::InternalError(format!("failed to enable verbose logging: {err:?}"))
|
|
|
|
})?;
|
|
|
|
tracing::debug!("verbose logging enabled");
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-29 16:12:16 +00:00
|
|
|
pub struct CommandHelper {
|
|
|
|
app: clap::Command,
|
2023-01-04 08:18:45 +00:00
|
|
|
cwd: PathBuf,
|
2022-09-22 04:44:46 +00:00
|
|
|
string_args: Vec<String>,
|
|
|
|
global_args: GlobalArgs,
|
2023-01-04 08:57:36 +00:00
|
|
|
settings: UserSettings,
|
2023-01-10 00:53:22 +00:00
|
|
|
maybe_workspace_loader: Result<WorkspaceLoader, CommandError>,
|
2022-12-14 18:08:31 +00:00
|
|
|
store_factories: StoreFactories,
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
2022-09-29 16:12:16 +00:00
|
|
|
impl CommandHelper {
|
2023-01-04 08:16:53 +00:00
|
|
|
pub fn new(
|
|
|
|
app: clap::Command,
|
2023-01-04 08:18:45 +00:00
|
|
|
cwd: PathBuf,
|
2023-01-04 08:16:53 +00:00
|
|
|
string_args: Vec<String>,
|
|
|
|
global_args: GlobalArgs,
|
2023-01-04 08:57:36 +00:00
|
|
|
settings: UserSettings,
|
2023-01-10 00:53:22 +00:00
|
|
|
maybe_workspace_loader: Result<WorkspaceLoader, CommandError>,
|
2023-01-04 08:16:53 +00:00
|
|
|
store_factories: StoreFactories,
|
|
|
|
) -> Self {
|
2022-09-22 04:44:46 +00:00
|
|
|
Self {
|
|
|
|
app,
|
2023-01-04 08:18:45 +00:00
|
|
|
cwd,
|
2022-09-22 04:44:46 +00:00
|
|
|
string_args,
|
|
|
|
global_args,
|
2023-01-04 08:57:36 +00:00
|
|
|
settings,
|
2023-01-10 00:53:22 +00:00
|
|
|
maybe_workspace_loader,
|
2023-01-04 08:16:53 +00:00
|
|
|
store_factories,
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-29 16:12:16 +00:00
|
|
|
pub fn app(&self) -> &clap::Command {
|
2022-09-22 04:44:46 +00:00
|
|
|
&self.app
|
|
|
|
}
|
|
|
|
|
2023-01-04 08:18:45 +00:00
|
|
|
pub fn cwd(&self) -> &Path {
|
|
|
|
&self.cwd
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn string_args(&self) -> &Vec<String> {
|
|
|
|
&self.string_args
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn global_args(&self) -> &GlobalArgs {
|
|
|
|
&self.global_args
|
|
|
|
}
|
|
|
|
|
2023-01-04 08:57:36 +00:00
|
|
|
pub fn settings(&self) -> &UserSettings {
|
|
|
|
&self.settings
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn workspace_helper(&self, ui: &mut Ui) -> Result<WorkspaceCommandHelper, CommandError> {
|
2023-01-04 08:57:36 +00:00
|
|
|
let workspace = self.load_workspace()?;
|
2022-10-02 22:03:44 +00:00
|
|
|
let mut workspace_command = self.resolve_operation(ui, workspace)?;
|
|
|
|
workspace_command.snapshot(ui)?;
|
|
|
|
Ok(workspace_command)
|
|
|
|
}
|
|
|
|
|
2023-01-04 08:57:36 +00:00
|
|
|
pub fn load_workspace(&self) -> Result<Workspace, CommandError> {
|
2023-01-10 00:53:22 +00:00
|
|
|
let loader = self.maybe_workspace_loader.as_ref().map_err(Clone::clone)?;
|
|
|
|
loader
|
|
|
|
.load(&self.settings, &self.store_factories)
|
|
|
|
.map_err(|e| user_error(format!("{}: {}", e, e.error)))
|
2022-10-09 17:01:05 +00:00
|
|
|
}
|
|
|
|
|
2022-10-02 17:09:46 +00:00
|
|
|
pub fn resolve_operation(
|
2022-10-09 17:01:05 +00:00
|
|
|
&self,
|
|
|
|
ui: &mut Ui,
|
|
|
|
workspace: Workspace,
|
|
|
|
) -> Result<WorkspaceCommandHelper, CommandError> {
|
2022-09-22 04:44:46 +00:00
|
|
|
let repo_loader = workspace.repo_loader();
|
|
|
|
let op_heads = resolve_op_for_load(
|
|
|
|
repo_loader.op_store(),
|
|
|
|
repo_loader.op_heads_store(),
|
|
|
|
&self.global_args.at_operation,
|
|
|
|
)?;
|
2022-10-09 17:01:05 +00:00
|
|
|
let workspace_command = match op_heads {
|
2022-10-08 06:47:37 +00:00
|
|
|
OpHeads::Single(op) => {
|
|
|
|
let repo = repo_loader.load_at(&op);
|
|
|
|
self.for_loaded_repo(ui, workspace, repo)?
|
|
|
|
}
|
2022-09-22 04:44:46 +00:00
|
|
|
OpHeads::Unresolved {
|
|
|
|
locked_op_heads,
|
|
|
|
op_heads,
|
|
|
|
} => {
|
|
|
|
writeln!(
|
|
|
|
ui,
|
|
|
|
"Concurrent modification detected, resolving automatically.",
|
|
|
|
)?;
|
|
|
|
let base_repo = repo_loader.load_at(&op_heads[0]);
|
|
|
|
// TODO: It may be helpful to print each operation we're merging here
|
|
|
|
let mut workspace_command = self.for_loaded_repo(ui, workspace, base_repo)?;
|
|
|
|
let mut tx = workspace_command.start_transaction("resolve concurrent operations");
|
|
|
|
for other_op_head in op_heads.into_iter().skip(1) {
|
|
|
|
tx.merge_operation(other_op_head);
|
2023-01-04 08:57:36 +00:00
|
|
|
let num_rebased = tx.mut_repo().rebase_descendants(&self.settings)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
if num_rebased > 0 {
|
|
|
|
writeln!(
|
|
|
|
ui,
|
2022-12-15 02:30:06 +00:00
|
|
|
"Rebased {num_rebased} descendant commits onto commits rewritten by \
|
|
|
|
other operation"
|
2022-09-22 04:44:46 +00:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let merged_repo = tx.write().leave_unpublished();
|
|
|
|
locked_op_heads.finish(merged_repo.operation());
|
|
|
|
workspace_command.repo = merged_repo;
|
2022-10-08 06:47:37 +00:00
|
|
|
workspace_command
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
};
|
2022-10-08 06:20:00 +00:00
|
|
|
Ok(workspace_command)
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn for_loaded_repo(
|
|
|
|
&self,
|
2022-11-25 10:27:13 +00:00
|
|
|
ui: &mut Ui,
|
2022-09-22 04:44:46 +00:00
|
|
|
workspace: Workspace,
|
|
|
|
repo: Arc<ReadonlyRepo>,
|
|
|
|
) -> Result<WorkspaceCommandHelper, CommandError> {
|
2022-10-08 06:20:00 +00:00
|
|
|
WorkspaceCommandHelper::new(
|
2022-09-22 04:44:46 +00:00
|
|
|
ui,
|
|
|
|
workspace,
|
2023-01-04 08:18:45 +00:00
|
|
|
self.cwd.clone(),
|
2022-09-22 04:44:46 +00:00
|
|
|
self.string_args.clone(),
|
|
|
|
&self.global_args,
|
2023-01-04 08:57:36 +00:00
|
|
|
self.settings.clone(),
|
2022-09-22 04:44:46 +00:00
|
|
|
repo,
|
2022-10-08 06:20:00 +00:00
|
|
|
)
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Provides utilities for writing a command that works on a workspace (like most
|
|
|
|
// commands do).
|
|
|
|
pub struct WorkspaceCommandHelper {
|
|
|
|
cwd: PathBuf,
|
|
|
|
string_args: Vec<String>,
|
|
|
|
global_args: GlobalArgs,
|
|
|
|
settings: UserSettings,
|
|
|
|
workspace: Workspace,
|
|
|
|
repo: Arc<ReadonlyRepo>,
|
2022-11-25 10:27:13 +00:00
|
|
|
revset_aliases_map: RevsetAliasesMap,
|
2022-09-22 04:44:46 +00:00
|
|
|
may_update_working_copy: bool,
|
|
|
|
working_copy_shared_with_git: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl WorkspaceCommandHelper {
|
2022-10-08 05:24:50 +00:00
|
|
|
pub fn new(
|
2022-11-25 10:27:13 +00:00
|
|
|
ui: &mut Ui,
|
2022-09-22 04:44:46 +00:00
|
|
|
workspace: Workspace,
|
2023-01-04 08:18:45 +00:00
|
|
|
cwd: PathBuf,
|
2022-09-22 04:44:46 +00:00
|
|
|
string_args: Vec<String>,
|
|
|
|
global_args: &GlobalArgs,
|
2023-01-04 08:57:36 +00:00
|
|
|
settings: UserSettings,
|
2022-09-22 04:44:46 +00:00
|
|
|
repo: Arc<ReadonlyRepo>,
|
|
|
|
) -> Result<Self, CommandError> {
|
2023-01-04 09:20:11 +00:00
|
|
|
let revset_aliases_map = load_revset_aliases(ui, &settings)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
let loaded_at_head = &global_args.at_operation == "@";
|
|
|
|
let may_update_working_copy = loaded_at_head && !global_args.no_commit_working_copy;
|
|
|
|
let mut working_copy_shared_with_git = false;
|
|
|
|
let maybe_git_repo = repo.store().git_repo();
|
|
|
|
if let Some(git_workdir) = maybe_git_repo
|
|
|
|
.as_ref()
|
|
|
|
.and_then(|git_repo| git_repo.workdir())
|
|
|
|
.and_then(|workdir| workdir.canonicalize().ok())
|
|
|
|
{
|
|
|
|
working_copy_shared_with_git = git_workdir == workspace.workspace_root().as_path();
|
|
|
|
}
|
2022-10-08 05:24:50 +00:00
|
|
|
Ok(Self {
|
2023-01-04 08:18:45 +00:00
|
|
|
cwd,
|
2022-09-22 04:44:46 +00:00
|
|
|
string_args,
|
|
|
|
global_args: global_args.clone(),
|
2023-01-04 09:20:11 +00:00
|
|
|
settings,
|
2022-09-22 04:44:46 +00:00
|
|
|
workspace,
|
|
|
|
repo,
|
2023-01-04 09:20:11 +00:00
|
|
|
revset_aliases_map,
|
2022-09-22 04:44:46 +00:00
|
|
|
may_update_working_copy,
|
|
|
|
working_copy_shared_with_git,
|
2022-10-08 05:24:50 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-10-02 17:09:46 +00:00
|
|
|
pub fn check_working_copy_writable(&self) -> Result<(), CommandError> {
|
2022-09-22 04:44:46 +00:00
|
|
|
if self.may_update_working_copy {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
2022-11-12 23:28:32 +00:00
|
|
|
let hint = if self.global_args.no_commit_working_copy {
|
|
|
|
"Don't use --no-commit-working-copy."
|
|
|
|
} else {
|
|
|
|
"Don't use --at-op."
|
|
|
|
};
|
|
|
|
Err(user_error_with_hint(
|
|
|
|
"This command must be able to update the working copy.",
|
|
|
|
hint,
|
2022-09-22 04:44:46 +00:00
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-08 05:56:26 +00:00
|
|
|
/// Snapshot the working copy if allowed, and import Git refs if the working
|
|
|
|
/// copy is collocated with Git.
|
|
|
|
pub fn snapshot(&mut self, ui: &mut Ui) -> Result<(), CommandError> {
|
|
|
|
if self.may_update_working_copy {
|
|
|
|
if self.working_copy_shared_with_git {
|
|
|
|
let maybe_git_repo = self.repo.store().git_repo();
|
|
|
|
self.import_git_refs_and_head(ui, maybe_git_repo.as_ref().unwrap())?;
|
|
|
|
}
|
|
|
|
self.commit_working_copy(ui)?;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
fn import_git_refs_and_head(
|
|
|
|
&mut self,
|
|
|
|
ui: &mut Ui,
|
|
|
|
git_repo: &Repository,
|
|
|
|
) -> Result<(), CommandError> {
|
|
|
|
let mut tx = self.start_transaction("import git refs");
|
|
|
|
git::import_refs(tx.mut_repo(), git_repo)?;
|
|
|
|
if tx.mut_repo().has_changes() {
|
|
|
|
let old_git_head = self.repo.view().git_head();
|
|
|
|
let new_git_head = tx.mut_repo().view().git_head();
|
|
|
|
// If the Git HEAD has changed, abandon our old checkout and check out the new
|
|
|
|
// Git HEAD.
|
|
|
|
if new_git_head != old_git_head && new_git_head.is_some() {
|
2022-10-03 11:18:34 +00:00
|
|
|
let workspace_id = self.workspace_id();
|
2022-09-22 04:44:46 +00:00
|
|
|
let mut locked_working_copy = self.workspace.working_copy_mut().start_mutation();
|
|
|
|
if let Some(old_wc_commit_id) = self.repo.view().get_wc_commit_id(&workspace_id) {
|
|
|
|
tx.mut_repo()
|
|
|
|
.record_abandoned_commit(old_wc_commit_id.clone());
|
|
|
|
}
|
|
|
|
let new_checkout = self
|
|
|
|
.repo
|
|
|
|
.store()
|
|
|
|
.get_commit(new_git_head.as_ref().unwrap())?;
|
|
|
|
tx.mut_repo()
|
2022-12-24 05:09:19 +00:00
|
|
|
.check_out(workspace_id, &self.settings, &new_checkout)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
// The working copy was presumably updated by the git command that updated HEAD,
|
|
|
|
// so we just need to reset our working copy state to it without updating
|
|
|
|
// working copy files.
|
|
|
|
locked_working_copy.reset(&new_checkout.tree())?;
|
|
|
|
tx.mut_repo().rebase_descendants(&self.settings)?;
|
|
|
|
self.repo = tx.commit();
|
|
|
|
locked_working_copy.finish(self.repo.op_id().clone());
|
|
|
|
} else {
|
2023-01-04 08:57:36 +00:00
|
|
|
let num_rebased = tx.mut_repo().rebase_descendants(&self.settings)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
if num_rebased > 0 {
|
|
|
|
writeln!(
|
|
|
|
ui,
|
2022-12-15 02:30:06 +00:00
|
|
|
"Rebased {num_rebased} descendant commits off of commits rewritten from \
|
|
|
|
git"
|
2022-09-22 04:44:46 +00:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
self.finish_transaction(ui, tx)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn export_head_to_git(&self, mut_repo: &mut MutableRepo) -> Result<(), CommandError> {
|
|
|
|
let git_repo = mut_repo.store().git_repo().unwrap();
|
|
|
|
let current_git_head_ref = git_repo.find_reference("HEAD").unwrap();
|
|
|
|
let current_git_commit_id = current_git_head_ref
|
|
|
|
.peel_to_commit()
|
|
|
|
.ok()
|
|
|
|
.map(|commit| commit.id());
|
2022-09-30 05:43:02 +00:00
|
|
|
if let Some(wc_commit_id) = mut_repo.view().get_wc_commit_id(&self.workspace_id()) {
|
|
|
|
let first_parent_id = mut_repo
|
|
|
|
.index()
|
|
|
|
.entry_by_id(wc_commit_id)
|
|
|
|
.unwrap()
|
|
|
|
.parents()[0]
|
|
|
|
.commit_id();
|
2022-09-22 04:44:46 +00:00
|
|
|
if first_parent_id != *mut_repo.store().root_commit_id() {
|
|
|
|
if let Some(current_git_commit_id) = current_git_commit_id {
|
|
|
|
git_repo.set_head_detached(current_git_commit_id)?;
|
|
|
|
}
|
|
|
|
let new_git_commit_id = Oid::from_bytes(first_parent_id.as_bytes()).unwrap();
|
|
|
|
let new_git_commit = git_repo.find_commit(new_git_commit_id)?;
|
|
|
|
git_repo.reset(new_git_commit.as_object(), git2::ResetType::Mixed, None)?;
|
|
|
|
mut_repo.set_git_head(first_parent_id);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// The workspace was removed (maybe the user undid the
|
|
|
|
// initialization of the workspace?), which is weird,
|
|
|
|
// but we should probably just not do anything else here.
|
|
|
|
// Except maybe print a note about it?
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn repo(&self) -> &Arc<ReadonlyRepo> {
|
|
|
|
&self.repo
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn working_copy(&self) -> &WorkingCopy {
|
|
|
|
self.workspace.working_copy()
|
|
|
|
}
|
|
|
|
|
2022-10-02 17:09:46 +00:00
|
|
|
pub fn unsafe_start_working_copy_mutation(
|
2022-09-22 04:44:46 +00:00
|
|
|
&mut self,
|
|
|
|
) -> Result<(LockedWorkingCopy, Commit), CommandError> {
|
|
|
|
self.check_working_copy_writable()?;
|
|
|
|
let wc_commit_id = self.repo.view().get_wc_commit_id(&self.workspace_id());
|
|
|
|
let wc_commit = if let Some(wc_commit_id) = wc_commit_id {
|
|
|
|
self.repo.store().get_commit(wc_commit_id)?
|
|
|
|
} else {
|
2022-11-12 22:38:43 +00:00
|
|
|
return Err(user_error("Nothing checked out in this workspace"));
|
2022-09-22 04:44:46 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let locked_working_copy = self.workspace.working_copy_mut().start_mutation();
|
2022-10-02 17:09:46 +00:00
|
|
|
|
|
|
|
Ok((locked_working_copy, wc_commit))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn start_working_copy_mutation(
|
|
|
|
&mut self,
|
|
|
|
) -> Result<(LockedWorkingCopy, Commit), CommandError> {
|
|
|
|
let (locked_working_copy, wc_commit) = self.unsafe_start_working_copy_mutation()?;
|
2022-09-22 04:44:46 +00:00
|
|
|
if wc_commit.tree_id() != locked_working_copy.old_tree_id() {
|
2022-11-12 22:38:43 +00:00
|
|
|
return Err(user_error("Concurrent working copy operation. Try again."));
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
Ok((locked_working_copy, wc_commit))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn workspace_root(&self) -> &PathBuf {
|
|
|
|
self.workspace.workspace_root()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn workspace_id(&self) -> WorkspaceId {
|
2022-10-03 11:18:34 +00:00
|
|
|
self.workspace.workspace_id().clone()
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn working_copy_shared_with_git(&self) -> bool {
|
|
|
|
self.working_copy_shared_with_git
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn format_file_path(&self, file: &RepoPath) -> String {
|
2022-10-21 04:08:23 +00:00
|
|
|
file_util::relative_path(&self.cwd, &file.to_fs_path(self.workspace_root()))
|
2022-09-22 04:44:46 +00:00
|
|
|
.to_str()
|
|
|
|
.unwrap()
|
|
|
|
.to_owned()
|
|
|
|
}
|
|
|
|
|
2022-10-21 04:44:37 +00:00
|
|
|
/// Parses a path relative to cwd into a RepoPath, which is relative to the
|
|
|
|
/// workspace root.
|
|
|
|
pub fn parse_file_path(&self, input: &str) -> Result<RepoPath, FsPathParseError> {
|
|
|
|
RepoPath::parse_fs_path(&self.cwd, self.workspace_root(), input)
|
|
|
|
}
|
|
|
|
|
2022-10-21 05:32:39 +00:00
|
|
|
pub fn matcher_from_values(&self, values: &[String]) -> Result<Box<dyn Matcher>, CommandError> {
|
2022-10-21 05:46:27 +00:00
|
|
|
if values.is_empty() {
|
2022-10-21 05:32:39 +00:00
|
|
|
Ok(Box::new(EverythingMatcher))
|
|
|
|
} else {
|
2022-10-21 05:46:27 +00:00
|
|
|
// TODO: Add support for globs and other formats
|
2022-12-16 03:51:25 +00:00
|
|
|
let paths: Vec<_> = values
|
2022-10-21 05:46:27 +00:00
|
|
|
.iter()
|
|
|
|
.map(|v| self.parse_file_path(v))
|
2022-12-16 03:51:25 +00:00
|
|
|
.try_collect()?;
|
2022-10-21 05:32:39 +00:00
|
|
|
Ok(Box::new(PrefixMatcher::new(&paths)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn git_config(&self) -> Result<git2::Config, git2::Error> {
|
|
|
|
if let Some(git_repo) = self.repo.store().git_repo() {
|
|
|
|
git_repo.config()
|
|
|
|
} else {
|
|
|
|
git2::Config::open_default()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn base_ignores(&self) -> Arc<GitIgnoreFile> {
|
2022-12-22 03:58:06 +00:00
|
|
|
fn xdg_config_home() -> Result<PathBuf, VarError> {
|
|
|
|
if let Ok(x) = std::env::var("XDG_CONFIG_HOME") {
|
|
|
|
if !x.is_empty() {
|
|
|
|
return Ok(PathBuf::from(x));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
std::env::var("HOME").map(|x| Path::new(&x).join(".config"))
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
let mut git_ignores = GitIgnoreFile::empty();
|
2022-12-22 03:58:06 +00:00
|
|
|
if let Ok(excludes_file_path) = self
|
2022-09-22 04:44:46 +00:00
|
|
|
.git_config()
|
2022-12-22 03:58:06 +00:00
|
|
|
.and_then(|git_config| {
|
|
|
|
git_config
|
|
|
|
.get_string("core.excludesFile")
|
|
|
|
.map(expand_git_path)
|
2022-12-21 01:17:36 +00:00
|
|
|
})
|
2022-12-22 03:58:06 +00:00
|
|
|
.or_else(|_| xdg_config_home().map(|x| x.join("git").join("ignore")))
|
2022-09-22 04:44:46 +00:00
|
|
|
{
|
|
|
|
git_ignores = git_ignores.chain_with_file("", excludes_file_path);
|
|
|
|
}
|
|
|
|
if let Some(git_repo) = self.repo.store().git_repo() {
|
|
|
|
git_ignores =
|
|
|
|
git_ignores.chain_with_file("", git_repo.path().join("info").join("exclude"));
|
|
|
|
}
|
|
|
|
git_ignores
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve_single_op(&self, op_str: &str) -> Result<Operation, CommandError> {
|
|
|
|
// When resolving the "@" operation in a `ReadonlyRepo`, we resolve it to the
|
|
|
|
// operation the repo was loaded at.
|
|
|
|
resolve_single_op(
|
|
|
|
self.repo.op_store(),
|
|
|
|
self.repo.op_heads_store(),
|
2022-12-12 10:58:26 +00:00
|
|
|
|| Ok(self.repo.operation().clone()),
|
2022-09-22 04:44:46 +00:00
|
|
|
op_str,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve_single_rev(&self, revision_str: &str) -> Result<Commit, CommandError> {
|
2022-11-01 07:14:21 +00:00
|
|
|
let revset_expression = self.parse_revset(revision_str)?;
|
2022-10-23 03:55:09 +00:00
|
|
|
let revset = self.evaluate_revset(&revset_expression)?;
|
2022-12-15 11:28:02 +00:00
|
|
|
let mut iter = revset.iter().commits(self.repo.store()).fuse();
|
|
|
|
match (iter.next(), iter.next()) {
|
|
|
|
(Some(commit), None) => Ok(commit?),
|
|
|
|
(None, _) => Err(user_error(format!(
|
2022-12-15 02:30:06 +00:00
|
|
|
"Revset \"{revision_str}\" didn't resolve to any revisions"
|
2022-09-22 04:44:46 +00:00
|
|
|
))),
|
2022-12-15 11:37:14 +00:00
|
|
|
(Some(commit0), Some(commit1)) => {
|
|
|
|
let mut iter = [commit0, commit1].into_iter().chain(iter);
|
2022-12-16 03:51:25 +00:00
|
|
|
let commits: Vec<_> = iter.by_ref().take(5).try_collect()?;
|
2022-12-15 11:37:14 +00:00
|
|
|
let elided = iter.next().is_some();
|
|
|
|
let hint = format!(
|
|
|
|
"The revset resolved to these revisions:\n{commits}{ellipsis}",
|
|
|
|
commits = commits.iter().map(short_commit_description).join("\n"),
|
|
|
|
ellipsis = elided.then(|| "\n...").unwrap_or_default()
|
|
|
|
);
|
|
|
|
Err(user_error_with_hint(
|
|
|
|
format!("Revset \"{revision_str}\" resolved to more than one revision"),
|
|
|
|
hint,
|
|
|
|
))
|
|
|
|
}
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn resolve_revset(&self, revision_str: &str) -> Result<Vec<Commit>, CommandError> {
|
2022-11-01 07:14:21 +00:00
|
|
|
let revset_expression = self.parse_revset(revision_str)?;
|
2022-10-23 03:55:09 +00:00
|
|
|
let revset = self.evaluate_revset(&revset_expression)?;
|
2022-12-16 04:09:12 +00:00
|
|
|
Ok(revset.iter().commits(self.repo.store()).try_collect()?)
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
2022-11-01 07:14:21 +00:00
|
|
|
pub fn parse_revset(
|
|
|
|
&self,
|
|
|
|
revision_str: &str,
|
|
|
|
) -> Result<Rc<RevsetExpression>, RevsetParseError> {
|
2022-11-25 10:27:13 +00:00
|
|
|
let expression = revset::parse(
|
|
|
|
revision_str,
|
|
|
|
&self.revset_aliases_map,
|
|
|
|
Some(&self.revset_context()),
|
|
|
|
)?;
|
2022-11-01 07:14:21 +00:00
|
|
|
Ok(revset::optimize(expression))
|
|
|
|
}
|
|
|
|
|
2022-10-23 03:55:09 +00:00
|
|
|
pub fn evaluate_revset<'repo>(
|
|
|
|
&'repo self,
|
|
|
|
revset_expression: &RevsetExpression,
|
|
|
|
) -> Result<Box<dyn Revset<'repo> + 'repo>, RevsetError> {
|
2022-11-01 07:14:21 +00:00
|
|
|
revset_expression.evaluate(self.repo.as_repo_ref(), Some(&self.revset_context()))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn revset_context(&self) -> RevsetWorkspaceContext {
|
|
|
|
RevsetWorkspaceContext {
|
2022-10-23 04:14:00 +00:00
|
|
|
cwd: &self.cwd,
|
2022-10-23 03:41:50 +00:00
|
|
|
workspace_id: self.workspace.workspace_id(),
|
2022-10-23 04:14:00 +00:00
|
|
|
workspace_root: self.workspace.workspace_root(),
|
2022-11-01 07:14:21 +00:00
|
|
|
}
|
2022-10-23 03:55:09 +00:00
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn check_rewriteable(&self, commit: &Commit) -> Result<(), CommandError> {
|
|
|
|
if commit.id() == self.repo.store().root_commit_id() {
|
2022-11-12 22:38:43 +00:00
|
|
|
return Err(user_error("Cannot rewrite the root commit"));
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn check_non_empty(&self, commits: &[Commit]) -> Result<(), CommandError> {
|
|
|
|
if commits.is_empty() {
|
2022-11-12 22:38:43 +00:00
|
|
|
return Err(user_error("Empty revision set"));
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn commit_working_copy(&mut self, ui: &mut Ui) -> Result<(), CommandError> {
|
|
|
|
let repo = self.repo.clone();
|
|
|
|
let workspace_id = self.workspace_id();
|
2022-09-30 05:43:02 +00:00
|
|
|
let wc_commit_id = match repo.view().get_wc_commit_id(&self.workspace_id()) {
|
|
|
|
Some(wc_commit_id) => wc_commit_id.clone(),
|
2022-09-22 04:44:46 +00:00
|
|
|
None => {
|
|
|
|
// If the workspace has been deleted, it's unclear what to do, so we just skip
|
|
|
|
// committing the working copy.
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let base_ignores = self.base_ignores();
|
|
|
|
let mut locked_wc = self.workspace.working_copy_mut().start_mutation();
|
2022-10-02 17:09:46 +00:00
|
|
|
let old_op_id = locked_wc.old_operation_id().clone();
|
2022-09-30 05:43:02 +00:00
|
|
|
let wc_commit = repo.store().get_commit(&wc_commit_id)?;
|
2022-10-02 18:59:26 +00:00
|
|
|
self.repo = match check_stale_working_copy(&locked_wc, &wc_commit, repo.clone()) {
|
|
|
|
Ok(repo) => repo,
|
|
|
|
Err(StaleWorkingCopyError::WorkingCopyStale) => {
|
2022-10-02 17:09:46 +00:00
|
|
|
locked_wc.discard();
|
|
|
|
return Err(user_error_with_hint(
|
|
|
|
format!(
|
|
|
|
"The working copy is stale (not updated since operation {}).",
|
|
|
|
short_operation_hash(&old_op_id)
|
|
|
|
),
|
|
|
|
"Run `jj workspace update-stale` to update it.",
|
|
|
|
));
|
2022-10-02 18:59:26 +00:00
|
|
|
}
|
|
|
|
Err(StaleWorkingCopyError::SiblingOperation) => {
|
2022-10-02 17:09:46 +00:00
|
|
|
locked_wc.discard();
|
2022-10-02 18:59:26 +00:00
|
|
|
return Err(CommandError::InternalError(format!(
|
|
|
|
"The repo was loaded at operation {}, which seems to be a sibling of the \
|
|
|
|
working copy's operation {}",
|
|
|
|
short_operation_hash(repo.op_id()),
|
2022-10-02 17:09:46 +00:00
|
|
|
short_operation_hash(&old_op_id)
|
2022-10-02 18:59:26 +00:00
|
|
|
)));
|
|
|
|
}
|
|
|
|
Err(StaleWorkingCopyError::UnrelatedOperation) => {
|
2022-10-02 17:09:46 +00:00
|
|
|
locked_wc.discard();
|
2022-10-02 18:59:26 +00:00
|
|
|
return Err(CommandError::InternalError(format!(
|
|
|
|
"The repo was loaded at operation {}, which seems unrelated to the working \
|
|
|
|
copy's operation {}",
|
|
|
|
short_operation_hash(repo.op_id()),
|
2022-10-02 17:09:46 +00:00
|
|
|
short_operation_hash(&old_op_id)
|
2022-10-02 18:59:26 +00:00
|
|
|
)));
|
|
|
|
}
|
|
|
|
};
|
2022-09-22 04:44:46 +00:00
|
|
|
let new_tree_id = locked_wc.snapshot(base_ignores)?;
|
2022-09-30 05:43:02 +00:00
|
|
|
if new_tree_id != *wc_commit.tree_id() {
|
2022-11-13 06:29:06 +00:00
|
|
|
let mut tx = self
|
|
|
|
.repo
|
|
|
|
.start_transaction(&self.settings, "commit working copy");
|
2022-09-22 04:44:46 +00:00
|
|
|
let mut_repo = tx.mut_repo();
|
2022-12-25 20:58:08 +00:00
|
|
|
let commit = mut_repo
|
|
|
|
.rewrite_commit(&self.settings, &wc_commit)
|
2022-09-22 04:44:46 +00:00
|
|
|
.set_tree(new_tree_id)
|
2022-12-24 05:09:19 +00:00
|
|
|
.write()?;
|
2022-10-20 23:33:14 +00:00
|
|
|
mut_repo
|
|
|
|
.set_wc_commit(workspace_id, commit.id().clone())
|
|
|
|
.unwrap();
|
2022-09-22 04:44:46 +00:00
|
|
|
|
|
|
|
// Rebase descendants
|
|
|
|
let num_rebased = mut_repo.rebase_descendants(&self.settings)?;
|
|
|
|
if num_rebased > 0 {
|
|
|
|
writeln!(
|
|
|
|
ui,
|
2022-12-15 02:30:06 +00:00
|
|
|
"Rebased {num_rebased} descendant commits onto updated working copy"
|
2022-09-22 04:44:46 +00:00
|
|
|
)?;
|
|
|
|
}
|
|
|
|
|
2022-12-09 04:04:33 +00:00
|
|
|
if self.working_copy_shared_with_git {
|
|
|
|
let git_repo = self.repo.store().git_repo().unwrap();
|
|
|
|
let failed_branches = git::export_refs(mut_repo, &git_repo)?;
|
|
|
|
print_failed_git_export(ui, &failed_branches)?;
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
self.repo = tx.commit();
|
|
|
|
}
|
2022-10-02 03:07:19 +00:00
|
|
|
locked_wc.finish(self.repo.op_id().clone());
|
2022-09-22 04:44:46 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-10-28 03:30:44 +00:00
|
|
|
pub fn run_mergetool(
|
|
|
|
&self,
|
|
|
|
ui: &mut Ui,
|
|
|
|
tree: &Tree,
|
2022-12-04 03:47:03 +00:00
|
|
|
repo_path: &RepoPath,
|
2022-10-28 03:30:44 +00:00
|
|
|
) -> Result<TreeId, CommandError> {
|
2023-01-04 08:50:08 +00:00
|
|
|
Ok(crate::merge_tools::run_mergetool(
|
|
|
|
ui,
|
|
|
|
tree,
|
|
|
|
repo_path,
|
|
|
|
&self.settings,
|
|
|
|
)?)
|
2022-10-28 03:30:44 +00:00
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn edit_diff(
|
|
|
|
&self,
|
|
|
|
ui: &mut Ui,
|
|
|
|
left_tree: &Tree,
|
|
|
|
right_tree: &Tree,
|
|
|
|
instructions: &str,
|
2022-10-28 03:30:44 +00:00
|
|
|
) -> Result<TreeId, CommandError> {
|
2022-12-18 04:00:07 +00:00
|
|
|
Ok(crate::merge_tools::edit_diff(
|
2022-10-28 03:30:44 +00:00
|
|
|
ui,
|
|
|
|
left_tree,
|
|
|
|
right_tree,
|
|
|
|
instructions,
|
|
|
|
self.base_ignores(),
|
2023-01-04 08:50:08 +00:00
|
|
|
&self.settings,
|
2022-10-28 03:30:44 +00:00
|
|
|
)?)
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn select_diff(
|
|
|
|
&self,
|
|
|
|
ui: &mut Ui,
|
|
|
|
left_tree: &Tree,
|
|
|
|
right_tree: &Tree,
|
|
|
|
instructions: &str,
|
|
|
|
interactive: bool,
|
|
|
|
matcher: &dyn Matcher,
|
|
|
|
) -> Result<TreeId, CommandError> {
|
|
|
|
if interactive {
|
2022-12-18 04:00:07 +00:00
|
|
|
Ok(crate::merge_tools::edit_diff(
|
2022-09-22 04:44:46 +00:00
|
|
|
ui,
|
|
|
|
left_tree,
|
|
|
|
right_tree,
|
|
|
|
instructions,
|
|
|
|
self.base_ignores(),
|
2023-01-04 08:50:08 +00:00
|
|
|
&self.settings,
|
2022-09-22 04:44:46 +00:00
|
|
|
)?)
|
|
|
|
} else if matcher.visit(&RepoPath::root()) == Visit::AllRecursively {
|
|
|
|
// Optimization for a common case
|
|
|
|
Ok(right_tree.id().clone())
|
|
|
|
} else {
|
|
|
|
let mut tree_builder = self.repo().store().tree_builder(left_tree.id().clone());
|
|
|
|
for (repo_path, diff) in left_tree.diff(right_tree, matcher) {
|
|
|
|
match diff.into_options().1 {
|
|
|
|
Some(value) => {
|
|
|
|
tree_builder.set(repo_path, value);
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
tree_builder.remove(repo_path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(tree_builder.write_tree())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn start_transaction(&self, description: &str) -> Transaction {
|
2022-11-13 06:29:06 +00:00
|
|
|
let mut tx = self.repo.start_transaction(&self.settings, description);
|
2022-09-22 04:44:46 +00:00
|
|
|
// TODO: Either do better shell-escaping here or store the values in some list
|
|
|
|
// type (which we currently don't have).
|
|
|
|
let shell_escape = |arg: &String| {
|
|
|
|
if arg.as_bytes().iter().all(|b| {
|
|
|
|
matches!(b,
|
|
|
|
b'A'..=b'Z'
|
|
|
|
| b'a'..=b'z'
|
|
|
|
| b'0'..=b'9'
|
|
|
|
| b','
|
|
|
|
| b'-'
|
|
|
|
| b'.'
|
|
|
|
| b'/'
|
|
|
|
| b':'
|
|
|
|
| b'@'
|
|
|
|
| b'_'
|
|
|
|
)
|
|
|
|
}) {
|
|
|
|
arg.clone()
|
|
|
|
} else {
|
|
|
|
format!("'{}'", arg.replace('\'', "\\'"))
|
|
|
|
}
|
|
|
|
};
|
2022-11-24 07:46:04 +00:00
|
|
|
let mut quoted_strings = vec!["jj".to_string()];
|
|
|
|
quoted_strings.extend(self.string_args.iter().skip(1).map(shell_escape));
|
2022-09-22 04:44:46 +00:00
|
|
|
tx.set_tag("args".to_string(), quoted_strings.join(" "));
|
|
|
|
tx
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn finish_transaction(
|
|
|
|
&mut self,
|
|
|
|
ui: &mut Ui,
|
|
|
|
mut tx: Transaction,
|
|
|
|
) -> Result<(), CommandError> {
|
|
|
|
let mut_repo = tx.mut_repo();
|
|
|
|
let store = mut_repo.store().clone();
|
|
|
|
if !mut_repo.has_changes() {
|
|
|
|
writeln!(ui, "Nothing changed.")?;
|
|
|
|
return Ok(());
|
|
|
|
}
|
2023-01-04 08:57:36 +00:00
|
|
|
let num_rebased = mut_repo.rebase_descendants(&self.settings)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
if num_rebased > 0 {
|
2022-12-15 02:30:06 +00:00
|
|
|
writeln!(ui, "Rebased {num_rebased} descendant commits")?;
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
if self.working_copy_shared_with_git {
|
|
|
|
self.export_head_to_git(mut_repo)?;
|
2022-10-31 15:52:47 +00:00
|
|
|
let git_repo = self.repo.store().git_repo().unwrap();
|
2022-11-25 04:17:39 +00:00
|
|
|
let failed_branches = git::export_refs(mut_repo, &git_repo)?;
|
|
|
|
print_failed_git_export(ui, &failed_branches)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
let maybe_old_commit = tx
|
|
|
|
.base_repo()
|
|
|
|
.view()
|
|
|
|
.get_wc_commit_id(&self.workspace_id())
|
|
|
|
.map(|commit_id| store.get_commit(commit_id))
|
|
|
|
.transpose()?;
|
|
|
|
self.repo = tx.commit();
|
|
|
|
if self.may_update_working_copy {
|
|
|
|
let stats = update_working_copy(
|
|
|
|
ui,
|
|
|
|
&self.repo,
|
|
|
|
&self.workspace_id(),
|
|
|
|
self.workspace.working_copy_mut(),
|
|
|
|
maybe_old_commit.as_ref(),
|
2023-01-04 09:22:22 +00:00
|
|
|
&self.settings,
|
2022-09-22 04:44:46 +00:00
|
|
|
)?;
|
|
|
|
if let Some(stats) = stats {
|
|
|
|
print_checkout_stats(ui, stats)?;
|
|
|
|
}
|
|
|
|
}
|
2023-01-04 08:57:36 +00:00
|
|
|
let settings = &self.settings;
|
2022-10-10 00:22:20 +00:00
|
|
|
if settings.user_name() == UserSettings::user_name_placeholder()
|
|
|
|
|| settings.user_email() == UserSettings::user_email_placeholder()
|
|
|
|
{
|
2023-01-12 06:36:59 +00:00
|
|
|
writeln!(
|
|
|
|
ui.warning(),
|
|
|
|
r#"Name and email not configured. Add something like the following to $HOME/.jjconfig.toml:
|
2022-10-10 00:22:20 +00:00
|
|
|
user.name = "Some One"
|
2023-01-12 06:36:59 +00:00
|
|
|
user.email = "someone@example.com""#
|
|
|
|
)?;
|
2022-10-10 00:22:20 +00:00
|
|
|
}
|
2022-09-22 04:44:46 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-10 00:53:22 +00:00
|
|
|
fn init_workspace_loader(
|
|
|
|
cwd: &Path,
|
|
|
|
global_args: &GlobalArgs,
|
|
|
|
) -> Result<WorkspaceLoader, CommandError> {
|
2023-01-10 03:52:06 +00:00
|
|
|
let workspace_root = if let Some(path) = global_args.repository.as_ref() {
|
|
|
|
cwd.join(path)
|
|
|
|
} else {
|
|
|
|
cwd.ancestors()
|
|
|
|
.find(|path| path.join(".jj").is_dir())
|
|
|
|
.unwrap_or(cwd)
|
|
|
|
.to_owned()
|
|
|
|
};
|
|
|
|
WorkspaceLoader::init(&workspace_root).map_err(|err| match err {
|
2023-01-10 00:53:22 +00:00
|
|
|
WorkspaceLoadError::NoWorkspaceHere(wc_path) => {
|
|
|
|
// Prefer user-specified workspace_path_str instead of absolute wc_path.
|
2023-01-10 03:52:06 +00:00
|
|
|
let workspace_path_str = global_args.repository.as_deref().unwrap_or(".");
|
2023-01-10 00:53:22 +00:00
|
|
|
let message = format!(r#"There is no jj repo in "{workspace_path_str}""#);
|
|
|
|
let git_dir = wc_path.join(".git");
|
|
|
|
if git_dir.is_dir() {
|
|
|
|
user_error_with_hint(
|
|
|
|
message,
|
|
|
|
"It looks like this is a git repo. You can create a jj repo backed by it by \
|
|
|
|
running this:
|
|
|
|
jj init --git-repo=.",
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
user_error(message)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
WorkspaceLoadError::RepoDoesNotExist(repo_dir) => user_error(format!(
|
|
|
|
"The repository directory at {} is missing. Was it moved?",
|
2023-01-10 02:00:19 +00:00
|
|
|
repo_dir.display(),
|
2023-01-10 00:53:22 +00:00
|
|
|
)),
|
|
|
|
WorkspaceLoadError::Path(e) => user_error(format!("{}: {}", e, e.error)),
|
|
|
|
WorkspaceLoadError::NonUnicodePath => user_error(err.to_string()),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-10-02 18:59:26 +00:00
|
|
|
#[derive(Debug, Error)]
|
|
|
|
pub enum StaleWorkingCopyError {
|
|
|
|
#[error("The working copy is behind the latest operation")]
|
|
|
|
WorkingCopyStale,
|
|
|
|
#[error("The working copy is a sibling of the latest operation")]
|
|
|
|
SiblingOperation,
|
|
|
|
#[error("The working copy is unrelated to the latest operation")]
|
|
|
|
UnrelatedOperation,
|
|
|
|
}
|
|
|
|
|
2022-10-02 17:09:46 +00:00
|
|
|
pub fn check_stale_working_copy(
|
2022-10-02 18:59:26 +00:00
|
|
|
locked_wc: &LockedWorkingCopy,
|
2022-10-02 17:39:18 +00:00
|
|
|
wc_commit: &Commit,
|
2022-10-02 18:59:26 +00:00
|
|
|
repo: Arc<ReadonlyRepo>,
|
|
|
|
) -> Result<Arc<ReadonlyRepo>, StaleWorkingCopyError> {
|
|
|
|
// Check if the working copy's tree matches the repo's view
|
2022-10-02 17:39:18 +00:00
|
|
|
let wc_tree_id = locked_wc.old_tree_id().clone();
|
2022-10-02 18:59:26 +00:00
|
|
|
if *wc_commit.tree_id() == wc_tree_id {
|
|
|
|
Ok(repo)
|
|
|
|
} else {
|
2022-10-02 17:39:18 +00:00
|
|
|
let wc_operation_data = repo
|
|
|
|
.op_store()
|
|
|
|
.read_operation(locked_wc.old_operation_id())
|
|
|
|
.unwrap();
|
|
|
|
let wc_operation = Operation::new(
|
|
|
|
repo.op_store().clone(),
|
|
|
|
locked_wc.old_operation_id().clone(),
|
|
|
|
wc_operation_data,
|
|
|
|
);
|
|
|
|
let repo_operation = repo.operation();
|
|
|
|
let maybe_ancestor_op = dag_walk::closest_common_node(
|
|
|
|
[wc_operation.clone()],
|
|
|
|
[repo_operation.clone()],
|
|
|
|
&|op: &Operation| op.parents(),
|
|
|
|
&|op: &Operation| op.id().clone(),
|
|
|
|
);
|
|
|
|
if let Some(ancestor_op) = maybe_ancestor_op {
|
|
|
|
if ancestor_op.id() == repo_operation.id() {
|
|
|
|
// The working copy was updated since we loaded the repo. We reload the repo
|
|
|
|
// at the working copy's operation.
|
2022-10-02 18:59:26 +00:00
|
|
|
Ok(repo.reload_at(&wc_operation))
|
2022-10-02 17:39:18 +00:00
|
|
|
} else if ancestor_op.id() == wc_operation.id() {
|
|
|
|
// The working copy was not updated when some repo operation committed,
|
2022-10-02 18:59:26 +00:00
|
|
|
// meaning that it's stale compared to the repo view.
|
|
|
|
Err(StaleWorkingCopyError::WorkingCopyStale)
|
2022-10-02 17:39:18 +00:00
|
|
|
} else {
|
2022-10-02 18:59:26 +00:00
|
|
|
Err(StaleWorkingCopyError::SiblingOperation)
|
2022-10-02 17:39:18 +00:00
|
|
|
}
|
|
|
|
} else {
|
2022-10-02 18:59:26 +00:00
|
|
|
Err(StaleWorkingCopyError::UnrelatedOperation)
|
2022-10-02 17:39:18 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn print_checkout_stats(ui: &mut Ui, stats: CheckoutStats) -> Result<(), std::io::Error> {
|
|
|
|
if stats.added_files > 0 || stats.updated_files > 0 || stats.removed_files > 0 {
|
|
|
|
writeln!(
|
|
|
|
ui,
|
|
|
|
"Added {} files, modified {} files, removed {} files",
|
|
|
|
stats.added_files, stats.updated_files, stats.removed_files
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-11-25 04:17:39 +00:00
|
|
|
pub fn print_failed_git_export(
|
|
|
|
ui: &mut Ui,
|
|
|
|
failed_branches: &[String],
|
|
|
|
) -> Result<(), std::io::Error> {
|
|
|
|
if !failed_branches.is_empty() {
|
2023-01-12 06:36:59 +00:00
|
|
|
writeln!(ui.warning(), "Failed to export some branches:")?;
|
2022-11-25 04:17:39 +00:00
|
|
|
let mut formatter = ui.stderr_formatter();
|
|
|
|
for branch_name in failed_branches {
|
|
|
|
formatter.write_str(" ")?;
|
|
|
|
formatter.with_label("branch", |formatter| formatter.write_str(branch_name))?;
|
|
|
|
formatter.write_str("\n")?;
|
|
|
|
}
|
2022-11-28 13:50:36 +00:00
|
|
|
drop(formatter);
|
2023-01-12 06:25:17 +00:00
|
|
|
writeln!(
|
|
|
|
ui.hint(),
|
2022-11-28 13:50:36 +00:00
|
|
|
r#"Hint: Git doesn't allow a branch name that looks like a parent directory of
|
|
|
|
another (e.g. `foo` and `foo/bar`). Try to rename the branches that failed to
|
2023-01-12 06:25:17 +00:00
|
|
|
export or their "parent" branches."#,
|
2022-11-28 13:50:36 +00:00
|
|
|
)?;
|
2022-11-25 04:17:39 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
/// Expands "~/" to "$HOME/" as Git seems to do for e.g. core.excludesFile.
|
|
|
|
fn expand_git_path(path_str: String) -> PathBuf {
|
|
|
|
if let Some(remainder) = path_str.strip_prefix("~/") {
|
|
|
|
if let Ok(home_dir_str) = std::env::var("HOME") {
|
|
|
|
return PathBuf::from(home_dir_str).join(remainder);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
PathBuf::from(path_str)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn resolve_op_for_load(
|
|
|
|
op_store: &Arc<dyn OpStore>,
|
2022-12-15 22:13:00 +00:00
|
|
|
op_heads_store: &Arc<dyn OpHeadsStore>,
|
2022-09-22 04:44:46 +00:00
|
|
|
op_str: &str,
|
|
|
|
) -> Result<OpHeads, CommandError> {
|
|
|
|
if op_str == "@" {
|
|
|
|
Ok(op_heads_store.get_heads(op_store)?)
|
2022-12-12 10:58:26 +00:00
|
|
|
} else {
|
|
|
|
let get_current_op = || match op_heads_store.get_heads(op_store)? {
|
|
|
|
OpHeads::Single(current_op) => Ok(current_op),
|
2022-11-12 22:38:43 +00:00
|
|
|
OpHeads::Unresolved { .. } => Err(user_error(format!(
|
2022-09-22 04:44:46 +00:00
|
|
|
r#"The "{op_str}" expression resolved to more than one operation"#
|
|
|
|
))),
|
2022-12-12 10:58:26 +00:00
|
|
|
};
|
|
|
|
let operation = resolve_single_op(op_store, op_heads_store, get_current_op, op_str)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
Ok(OpHeads::Single(operation))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn resolve_single_op(
|
|
|
|
op_store: &Arc<dyn OpStore>,
|
2022-12-15 22:13:00 +00:00
|
|
|
op_heads_store: &Arc<dyn OpHeadsStore>,
|
2022-12-12 10:58:26 +00:00
|
|
|
get_current_op: impl FnOnce() -> Result<Operation, CommandError>,
|
2022-09-22 04:44:46 +00:00
|
|
|
op_str: &str,
|
|
|
|
) -> Result<Operation, CommandError> {
|
2022-12-12 11:18:06 +00:00
|
|
|
let op_symbol = op_str.trim_end_matches('-');
|
|
|
|
let op_postfix = &op_str[op_symbol.len()..];
|
|
|
|
let mut operation = match op_symbol {
|
|
|
|
"@" => get_current_op(),
|
|
|
|
s => resolve_single_op_from_store(op_store, op_heads_store, s),
|
|
|
|
}?;
|
|
|
|
for _ in op_postfix.chars() {
|
|
|
|
operation = match operation.parents().as_slice() {
|
|
|
|
[op] => Ok(op.clone()),
|
|
|
|
[] => Err(user_error(format!(
|
|
|
|
r#"The "{op_str}" expression resolved to no operations"#
|
|
|
|
))),
|
|
|
|
[_, _, ..] => Err(user_error(format!(
|
2022-09-22 04:44:46 +00:00
|
|
|
r#"The "{op_str}" expression resolved to more than one operation"#
|
2022-12-12 11:18:06 +00:00
|
|
|
))),
|
|
|
|
}?;
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
2022-12-12 11:18:06 +00:00
|
|
|
Ok(operation)
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn find_all_operations(
|
|
|
|
op_store: &Arc<dyn OpStore>,
|
2022-12-15 22:13:00 +00:00
|
|
|
op_heads_store: &Arc<dyn OpHeadsStore>,
|
2022-09-22 04:44:46 +00:00
|
|
|
) -> Vec<Operation> {
|
|
|
|
let mut visited = HashSet::new();
|
|
|
|
let mut work: VecDeque<_> = op_heads_store.get_op_heads().into_iter().collect();
|
|
|
|
let mut operations = vec![];
|
|
|
|
while let Some(op_id) = work.pop_front() {
|
|
|
|
if visited.insert(op_id.clone()) {
|
|
|
|
let store_operation = op_store.read_operation(&op_id).unwrap();
|
|
|
|
work.extend(store_operation.parents.iter().cloned());
|
|
|
|
let operation = Operation::new(op_store.clone(), op_id, store_operation);
|
|
|
|
operations.push(operation);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
operations
|
|
|
|
}
|
|
|
|
|
|
|
|
fn resolve_single_op_from_store(
|
|
|
|
op_store: &Arc<dyn OpStore>,
|
2022-12-15 22:13:00 +00:00
|
|
|
op_heads_store: &Arc<dyn OpHeadsStore>,
|
2022-09-22 04:44:46 +00:00
|
|
|
op_str: &str,
|
|
|
|
) -> Result<Operation, CommandError> {
|
|
|
|
if op_str.is_empty() || !op_str.as_bytes().iter().all(|b| b.is_ascii_hexdigit()) {
|
2022-11-12 22:38:43 +00:00
|
|
|
return Err(user_error(format!(
|
2022-12-15 02:30:06 +00:00
|
|
|
"Operation ID \"{op_str}\" is not a valid hexadecimal prefix"
|
2022-09-22 04:44:46 +00:00
|
|
|
)));
|
|
|
|
}
|
|
|
|
if let Ok(binary_op_id) = hex::decode(op_str) {
|
|
|
|
let op_id = OperationId::new(binary_op_id);
|
|
|
|
match op_store.read_operation(&op_id) {
|
|
|
|
Ok(operation) => {
|
|
|
|
return Ok(Operation::new(op_store.clone(), op_id, operation));
|
|
|
|
}
|
|
|
|
Err(OpStoreError::NotFound) => {
|
|
|
|
// Fall through
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
return Err(CommandError::InternalError(format!(
|
|
|
|
"Failed to read operation: {err}"
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let mut matches = vec![];
|
|
|
|
for op in find_all_operations(op_store, op_heads_store) {
|
|
|
|
if op.id().hex().starts_with(op_str) {
|
|
|
|
matches.push(op);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if matches.is_empty() {
|
2022-12-15 02:30:06 +00:00
|
|
|
Err(user_error(format!("No operation ID matching \"{op_str}\"")))
|
2022-09-22 04:44:46 +00:00
|
|
|
} else if matches.len() == 1 {
|
|
|
|
Ok(matches.pop().unwrap())
|
|
|
|
} else {
|
2022-11-12 22:38:43 +00:00
|
|
|
Err(user_error(format!(
|
2022-12-15 02:30:06 +00:00
|
|
|
"Operation ID prefix \"{op_str}\" is ambiguous"
|
2022-09-22 04:44:46 +00:00
|
|
|
)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-04 09:20:11 +00:00
|
|
|
fn load_revset_aliases(
|
|
|
|
ui: &mut Ui,
|
|
|
|
settings: &UserSettings,
|
|
|
|
) -> Result<RevsetAliasesMap, CommandError> {
|
2022-11-25 10:27:13 +00:00
|
|
|
const TABLE_KEY: &str = "revset-aliases";
|
|
|
|
let mut aliases_map = RevsetAliasesMap::new();
|
2023-01-04 09:20:11 +00:00
|
|
|
if let Ok(table) = settings.config().get_table(TABLE_KEY) {
|
2022-11-25 10:27:13 +00:00
|
|
|
for (decl, value) in table.into_iter().sorted_by(|a, b| a.0.cmp(&b.0)) {
|
|
|
|
let r = value
|
|
|
|
.into_string()
|
|
|
|
.map_err(|e| e.to_string())
|
|
|
|
.and_then(|v| aliases_map.insert(&decl, v).map_err(|e| e.to_string()));
|
|
|
|
if let Err(s) = r {
|
2023-01-12 06:36:59 +00:00
|
|
|
writeln!(ui.warning(), r#"Failed to load "{TABLE_KEY}.{decl}": {s}"#)?;
|
2022-11-25 10:27:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Ok(aliases_map)
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn resolve_base_revs(
|
|
|
|
workspace_command: &WorkspaceCommandHelper,
|
2022-11-28 14:32:44 +00:00
|
|
|
revisions: &[RevisionArg],
|
2022-09-22 04:44:46 +00:00
|
|
|
) -> Result<Vec<Commit>, CommandError> {
|
|
|
|
let mut commits = vec![];
|
|
|
|
for revision_str in revisions {
|
|
|
|
let commit = workspace_command.resolve_single_rev(revision_str)?;
|
|
|
|
if let Some(i) = commits.iter().position(|c| c == &commit) {
|
2022-11-12 22:38:43 +00:00
|
|
|
return Err(user_error(format!(
|
2022-09-22 04:44:46 +00:00
|
|
|
r#"Revset "{}" and "{}" resolved to the same revision {}"#,
|
2022-11-28 14:32:44 +00:00
|
|
|
&revisions[i].0,
|
|
|
|
&revision_str.0,
|
2022-09-22 04:44:46 +00:00
|
|
|
short_commit_hash(commit.id()),
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
commits.push(commit);
|
|
|
|
}
|
|
|
|
|
|
|
|
let root_commit_id = workspace_command.repo().store().root_commit_id();
|
|
|
|
if commits.len() >= 2 && commits.iter().any(|c| c.id() == root_commit_id) {
|
2022-11-12 22:38:43 +00:00
|
|
|
Err(user_error("Cannot merge with root revision"))
|
2022-09-22 04:44:46 +00:00
|
|
|
} else {
|
|
|
|
Ok(commits)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-02 17:09:46 +00:00
|
|
|
pub fn update_working_copy(
|
2022-09-22 04:44:46 +00:00
|
|
|
ui: &mut Ui,
|
|
|
|
repo: &Arc<ReadonlyRepo>,
|
|
|
|
workspace_id: &WorkspaceId,
|
|
|
|
wc: &mut WorkingCopy,
|
|
|
|
old_commit: Option<&Commit>,
|
2023-01-04 09:22:22 +00:00
|
|
|
settings: &UserSettings,
|
2022-09-22 04:44:46 +00:00
|
|
|
) -> Result<Option<CheckoutStats>, CommandError> {
|
|
|
|
let new_commit_id = match repo.view().get_wc_commit_id(workspace_id) {
|
|
|
|
Some(new_commit_id) => new_commit_id,
|
|
|
|
None => {
|
|
|
|
// It seems the workspace was deleted, so we shouldn't try to update it.
|
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let new_commit = repo.store().get_commit(new_commit_id)?;
|
|
|
|
let old_tree_id = old_commit.map(|commit| commit.tree_id().clone());
|
|
|
|
let stats = if Some(new_commit.tree_id()) != old_tree_id.as_ref() {
|
|
|
|
// TODO: CheckoutError::ConcurrentCheckout should probably just result in a
|
|
|
|
// warning for most commands (but be an error for the checkout command)
|
|
|
|
let stats = wc
|
|
|
|
.check_out(
|
|
|
|
repo.op_id().clone(),
|
|
|
|
old_tree_id.as_ref(),
|
|
|
|
&new_commit.tree(),
|
|
|
|
)
|
|
|
|
.map_err(|err| {
|
|
|
|
CommandError::InternalError(format!(
|
|
|
|
"Failed to check out commit {}: {}",
|
|
|
|
new_commit.id().hex(),
|
|
|
|
err
|
|
|
|
))
|
|
|
|
})?;
|
|
|
|
Some(stats)
|
|
|
|
} else {
|
|
|
|
// Record new operation id which represents the latest working-copy state
|
|
|
|
let locked_wc = wc.start_mutation();
|
|
|
|
locked_wc.finish(repo.op_id().clone());
|
|
|
|
None
|
|
|
|
};
|
|
|
|
if Some(&new_commit) != old_commit {
|
|
|
|
ui.write("Working copy now at: ")?;
|
2022-10-06 10:20:51 +00:00
|
|
|
write_commit_summary(
|
|
|
|
ui.stdout_formatter().as_mut(),
|
|
|
|
repo.as_repo_ref(),
|
|
|
|
workspace_id,
|
|
|
|
&new_commit,
|
2023-01-04 09:22:22 +00:00
|
|
|
settings,
|
2022-10-06 10:20:51 +00:00
|
|
|
)?;
|
2022-09-22 04:44:46 +00:00
|
|
|
ui.write("\n")?;
|
|
|
|
}
|
|
|
|
Ok(stats)
|
|
|
|
}
|
|
|
|
|
2022-10-06 10:20:51 +00:00
|
|
|
pub fn write_commit_summary(
|
|
|
|
formatter: &mut dyn Formatter,
|
|
|
|
repo: RepoRef,
|
|
|
|
workspace_id: &WorkspaceId,
|
|
|
|
commit: &Commit,
|
|
|
|
settings: &UserSettings,
|
|
|
|
) -> std::io::Result<()> {
|
|
|
|
let template_string = settings
|
|
|
|
.config()
|
|
|
|
.get_string("template.commit_summary")
|
2022-11-05 01:24:20 +00:00
|
|
|
.unwrap_or_else(|_| String::from(r#"commit_id.short() " " description.first_line()"#));
|
2022-10-06 10:20:51 +00:00
|
|
|
let template =
|
|
|
|
crate::template_parser::parse_commit_template(repo, workspace_id, &template_string);
|
|
|
|
let mut template_writer = TemplateFormatter::new(template, formatter);
|
|
|
|
template_writer.format(commit)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-12-13 06:22:08 +00:00
|
|
|
pub fn write_config_entry(
|
|
|
|
ui: &mut Ui,
|
|
|
|
path: &str,
|
|
|
|
value: config::Value,
|
|
|
|
) -> Result<(), CommandError> {
|
|
|
|
match value.kind {
|
|
|
|
// Handle table values specially to render each child nicely on its own line.
|
|
|
|
config::ValueKind::Table(table) => {
|
|
|
|
// TODO: Remove sorting when config crate maintains deterministic ordering.
|
|
|
|
for (key, table_val) in table.into_iter().sorted_by_key(|(k, _)| k.to_owned()) {
|
|
|
|
let key_path = match path {
|
|
|
|
"" => key,
|
|
|
|
_ => format!("{path}.{key}"),
|
|
|
|
};
|
|
|
|
write_config_entry(ui, key_path.as_str(), table_val)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => writeln!(ui, "{path}={}", serialize_config_value(value))?,
|
|
|
|
};
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Use a proper TOML library to serialize instead.
|
|
|
|
fn serialize_config_value(value: config::Value) -> String {
|
|
|
|
match value.kind {
|
|
|
|
config::ValueKind::Table(table) => format!(
|
|
|
|
"{{{}}}",
|
|
|
|
// TODO: Remove sorting when config crate maintains deterministic ordering.
|
|
|
|
table
|
|
|
|
.into_iter()
|
|
|
|
.sorted_by_key(|(k, _)| k.to_owned())
|
|
|
|
.map(|(k, v)| format!("{k}={}", serialize_config_value(v)))
|
|
|
|
.join(", ")
|
|
|
|
),
|
|
|
|
config::ValueKind::Array(vals) => format!(
|
|
|
|
"[{}]",
|
|
|
|
vals.into_iter().map(serialize_config_value).join(", ")
|
|
|
|
),
|
|
|
|
config::ValueKind::String(val) => format!("{val:?}"),
|
|
|
|
_ => value.to_string(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-05 04:55:20 +00:00
|
|
|
pub fn run_ui_editor(settings: &UserSettings, edit_path: &PathBuf) -> Result<(), CommandError> {
|
|
|
|
let editor: FullCommandArgs = settings
|
|
|
|
.config()
|
|
|
|
.get("ui.editor")
|
|
|
|
.unwrap_or_else(|_| "pico".into());
|
|
|
|
let exit_status = editor
|
|
|
|
.to_command()
|
|
|
|
.arg(edit_path)
|
|
|
|
.status()
|
|
|
|
.map_err(|_| user_error(format!("Failed to run editor '{editor}'")))?;
|
|
|
|
if !exit_status.success() {
|
|
|
|
return Err(user_error(format!(
|
|
|
|
"Editor '{editor}' exited with an error"
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-01-08 00:23:25 +00:00
|
|
|
// TODO: Consider basing this function on `write_commit_summary`. It will need
|
|
|
|
// more arguments passed to it, but the output will be more consistent.
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn short_commit_description(commit: &Commit) -> String {
|
|
|
|
let first_line = commit.description().split('\n').next().unwrap();
|
2023-01-08 00:23:25 +00:00
|
|
|
format!(
|
|
|
|
"{} ({})",
|
|
|
|
short_commit_hash(commit.id()),
|
|
|
|
if first_line.is_empty() {
|
|
|
|
"no description set"
|
|
|
|
} else {
|
|
|
|
first_line
|
|
|
|
}
|
|
|
|
)
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn short_commit_hash(commit_id: &CommitId) -> String {
|
|
|
|
commit_id.hex()[0..12].to_string()
|
|
|
|
}
|
|
|
|
|
2023-01-11 18:30:31 +00:00
|
|
|
pub fn short_change_hash(change_id: &ChangeId) -> String {
|
|
|
|
change_id.hex()[0..12].to_string()
|
|
|
|
}
|
|
|
|
|
2022-09-22 04:44:46 +00:00
|
|
|
pub fn short_operation_hash(operation_id: &OperationId) -> String {
|
|
|
|
operation_id.hex()[0..12].to_string()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Jujutsu (An experimental VCS)
|
|
|
|
///
|
|
|
|
/// To get started, see the tutorial at https://github.com/martinvonz/jj/blob/main/docs/tutorial.md.
|
|
|
|
#[derive(clap::Parser, Clone, Debug)]
|
2022-09-29 17:42:28 +00:00
|
|
|
#[command(
|
2022-09-22 04:44:46 +00:00
|
|
|
name = "jj",
|
|
|
|
author = "Martin von Zweigbergk <martinvonz@google.com>",
|
|
|
|
version
|
|
|
|
)]
|
|
|
|
pub struct Args {
|
2022-09-29 17:42:28 +00:00
|
|
|
#[command(flatten)]
|
2022-09-22 04:44:46 +00:00
|
|
|
pub global_args: GlobalArgs,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(clap::Args, Clone, Debug)]
|
|
|
|
pub struct GlobalArgs {
|
|
|
|
/// Path to repository to operate on
|
|
|
|
///
|
|
|
|
/// By default, Jujutsu searches for the closest .jj/ directory in an
|
|
|
|
/// ancestor of the current working directory.
|
2022-09-29 17:42:28 +00:00
|
|
|
#[arg(
|
2022-09-22 04:44:46 +00:00
|
|
|
long,
|
|
|
|
short = 'R',
|
|
|
|
global = true,
|
2022-09-29 16:12:16 +00:00
|
|
|
help_heading = "Global Options",
|
2022-09-22 04:44:46 +00:00
|
|
|
value_hint = clap::ValueHint::DirPath,
|
|
|
|
)]
|
|
|
|
pub repository: Option<String>,
|
|
|
|
/// Don't commit the working copy
|
|
|
|
///
|
|
|
|
/// By default, Jujutsu commits the working copy on every command, unless
|
|
|
|
/// you load the repo at a specific operation with `--at-operation`. If
|
|
|
|
/// you want to avoid committing the working and instead see a possibly
|
|
|
|
/// stale working copy commit, you can use `--no-commit-working-copy`.
|
|
|
|
/// This may be useful e.g. in a command prompt, especially if you have
|
|
|
|
/// another process that commits the working copy.
|
2022-09-29 17:42:28 +00:00
|
|
|
#[arg(long, global = true, help_heading = "Global Options")]
|
2022-09-22 04:44:46 +00:00
|
|
|
pub no_commit_working_copy: bool,
|
|
|
|
/// Operation to load the repo at
|
|
|
|
///
|
|
|
|
/// Operation to load the repo at. By default, Jujutsu loads the repo at the
|
|
|
|
/// most recent operation. You can use `--at-op=<operation ID>` to see what
|
|
|
|
/// the repo looked like at an earlier operation. For example `jj
|
|
|
|
/// --at-op=<operation ID> st` will show you what `jj st` would have
|
|
|
|
/// shown you when the given operation had just finished.
|
|
|
|
///
|
|
|
|
/// Use `jj op log` to find the operation ID you want. Any unambiguous
|
|
|
|
/// prefix of the operation ID is enough.
|
|
|
|
///
|
|
|
|
/// When loading the repo at an earlier operation, the working copy will not
|
|
|
|
/// be automatically committed.
|
|
|
|
///
|
|
|
|
/// It is possible to run mutating commands when loading the repo at an
|
|
|
|
/// earlier operation. Doing that is equivalent to having run concurrent
|
|
|
|
/// commands starting at the earlier operation. There's rarely a reason to
|
|
|
|
/// do that, but it is possible.
|
2022-09-29 17:42:28 +00:00
|
|
|
#[arg(
|
2022-09-22 04:44:46 +00:00
|
|
|
long,
|
|
|
|
visible_alias = "at-op",
|
|
|
|
global = true,
|
2022-09-29 16:12:16 +00:00
|
|
|
help_heading = "Global Options",
|
2022-09-22 04:44:46 +00:00
|
|
|
default_value = "@"
|
|
|
|
)]
|
|
|
|
pub at_operation: String,
|
2022-11-22 04:46:50 +00:00
|
|
|
/// Enable verbose logging
|
|
|
|
#[arg(long, short = 'v', global = true, help_heading = "Global Options")]
|
|
|
|
pub verbose: bool,
|
|
|
|
|
|
|
|
#[command(flatten)]
|
|
|
|
pub early_args: EarlyArgs,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(clap::Args, Clone, Debug)]
|
|
|
|
pub struct EarlyArgs {
|
2022-09-22 04:44:46 +00:00
|
|
|
/// When to colorize output (always, never, auto)
|
2022-09-29 17:42:28 +00:00
|
|
|
#[arg(
|
2022-09-22 04:44:46 +00:00
|
|
|
long,
|
|
|
|
value_name = "WHEN",
|
|
|
|
global = true,
|
2022-09-29 16:12:16 +00:00
|
|
|
help_heading = "Global Options"
|
2022-09-22 04:44:46 +00:00
|
|
|
)]
|
|
|
|
pub color: Option<ColorChoice>,
|
2022-11-01 00:31:30 +00:00
|
|
|
/// Disable the pager
|
|
|
|
#[arg(
|
|
|
|
long,
|
|
|
|
value_name = "WHEN",
|
|
|
|
global = true,
|
2022-11-22 04:46:50 +00:00
|
|
|
help_heading = "Global Options",
|
|
|
|
action = ArgAction::SetTrue
|
2022-11-01 00:31:30 +00:00
|
|
|
)]
|
2022-11-22 04:46:50 +00:00
|
|
|
// Parsing with ignore_errors will crash if this is bool, so use
|
|
|
|
// Option<bool>.
|
|
|
|
pub no_pager: Option<bool>,
|
2022-10-31 01:29:26 +00:00
|
|
|
/// Additional configuration options
|
|
|
|
// TODO: Introduce a `--config` option with simpler syntax for simple
|
|
|
|
// cases, designed so that `--config ui.color=auto` works
|
|
|
|
#[arg(
|
|
|
|
long,
|
|
|
|
value_name = "TOML",
|
|
|
|
global = true,
|
|
|
|
help_heading = "Global Options"
|
|
|
|
)]
|
|
|
|
pub config_toml: Vec<String>,
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
|
2022-12-21 04:47:10 +00:00
|
|
|
/// `-m/--message` argument which should be terminated with `\n`.
|
|
|
|
///
|
|
|
|
/// Based on the Git CLI behavior. See `opt_parse_m()` and `cleanup_mode` in
|
|
|
|
/// `git/builtin/commit.c`.
|
|
|
|
#[derive(Clone, Debug, Eq, PartialEq)]
|
|
|
|
pub struct DescriptionArg(String);
|
|
|
|
|
|
|
|
impl DescriptionArg {
|
|
|
|
pub fn as_str(&self) -> &str {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<String> for DescriptionArg {
|
|
|
|
fn from(mut s: String) -> Self {
|
|
|
|
complete_newline(&mut s);
|
|
|
|
DescriptionArg(s)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<&DescriptionArg> for String {
|
|
|
|
fn from(arg: &DescriptionArg) -> Self {
|
|
|
|
arg.0.to_owned()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsRef<str> for DescriptionArg {
|
|
|
|
fn as_ref(&self) -> &str {
|
|
|
|
self.as_str()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-21 01:51:44 +00:00
|
|
|
pub fn complete_newline(s: &mut String) {
|
2022-12-21 04:47:10 +00:00
|
|
|
if !s.is_empty() && !s.ends_with('\n') {
|
|
|
|
s.push('\n');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-28 14:32:44 +00:00
|
|
|
#[derive(Clone, Debug)]
|
|
|
|
pub struct RevisionArg(String);
|
|
|
|
|
|
|
|
impl Deref for RevisionArg {
|
|
|
|
type Target = str;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
self.0.as_str()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct RevisionArgValueParser;
|
|
|
|
|
|
|
|
impl TypedValueParser for RevisionArgValueParser {
|
|
|
|
type Value = RevisionArg;
|
|
|
|
|
|
|
|
fn parse_ref(
|
|
|
|
&self,
|
|
|
|
cmd: &Command,
|
|
|
|
arg: Option<&Arg>,
|
|
|
|
value: &OsStr,
|
|
|
|
) -> Result<Self::Value, Error> {
|
|
|
|
let string = NonEmptyStringValueParser::new().parse(cmd, arg, value.to_os_string())?;
|
|
|
|
Ok(RevisionArg(string))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ValueParserFactory for RevisionArg {
|
|
|
|
type Parser = RevisionArgValueParser;
|
|
|
|
|
|
|
|
fn value_parser() -> RevisionArgValueParser {
|
|
|
|
RevisionArgValueParser
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-22 06:29:42 +00:00
|
|
|
fn resolve_aliases(
|
2023-01-05 05:52:12 +00:00
|
|
|
config: &config::Config,
|
2022-09-22 04:44:46 +00:00
|
|
|
app: &clap::Command,
|
|
|
|
string_args: &[String],
|
|
|
|
) -> Result<Vec<String>, CommandError> {
|
2023-01-05 05:52:12 +00:00
|
|
|
let mut aliases_map = config.get_table("alias").unwrap_or_default();
|
2022-09-22 04:44:46 +00:00
|
|
|
let mut resolved_aliases = HashSet::new();
|
|
|
|
let mut string_args = string_args.to_vec();
|
|
|
|
let mut real_commands = HashSet::new();
|
|
|
|
for command in app.get_subcommands() {
|
|
|
|
real_commands.insert(command.get_name().to_string());
|
|
|
|
for alias in command.get_all_aliases() {
|
|
|
|
real_commands.insert(alias.to_string());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
loop {
|
|
|
|
let app_clone = app.clone().allow_external_subcommands(true);
|
2022-12-14 02:47:52 +00:00
|
|
|
let matches = app_clone.try_get_matches_from(&string_args).ok();
|
|
|
|
if let Some((command_name, submatches)) = matches.as_ref().and_then(|m| m.subcommand()) {
|
2022-09-22 04:44:46 +00:00
|
|
|
if !real_commands.contains(command_name) {
|
|
|
|
let alias_name = command_name.to_string();
|
|
|
|
let alias_args = submatches
|
2022-09-29 16:12:16 +00:00
|
|
|
.get_many::<OsString>("")
|
2022-09-22 04:44:46 +00:00
|
|
|
.unwrap_or_default()
|
2022-09-29 16:12:16 +00:00
|
|
|
.map(|arg| arg.to_str().unwrap().to_string())
|
2022-09-22 04:44:46 +00:00
|
|
|
.collect_vec();
|
|
|
|
if resolved_aliases.contains(&alias_name) {
|
2022-11-12 22:38:43 +00:00
|
|
|
return Err(user_error(format!(
|
2022-09-22 04:44:46 +00:00
|
|
|
r#"Recursive alias definition involving "{alias_name}""#
|
|
|
|
)));
|
|
|
|
}
|
2022-12-07 10:16:29 +00:00
|
|
|
if let Some(value) = aliases_map.remove(&alias_name) {
|
2022-12-07 10:26:37 +00:00
|
|
|
if let Ok(alias_definition) = value.try_deserialize::<Vec<String>>() {
|
2022-12-07 10:16:29 +00:00
|
|
|
assert!(string_args.ends_with(&alias_args));
|
|
|
|
string_args.truncate(string_args.len() - 1 - alias_args.len());
|
|
|
|
string_args.extend(alias_definition);
|
|
|
|
string_args.extend_from_slice(&alias_args);
|
|
|
|
resolved_aliases.insert(alias_name.clone());
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
return Err(user_error(format!(
|
|
|
|
r#"Alias definition for "{alias_name}" must be a string list"#
|
|
|
|
)));
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
2022-12-07 10:16:29 +00:00
|
|
|
} else {
|
|
|
|
// Not a real command and not an alias, so return what we've resolved so far
|
|
|
|
return Ok(string_args);
|
2022-09-22 04:44:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-12-14 02:47:52 +00:00
|
|
|
// No more alias commands, or hit unknown option
|
2022-09-22 04:44:46 +00:00
|
|
|
return Ok(string_args);
|
|
|
|
}
|
|
|
|
}
|
2022-09-22 06:29:42 +00:00
|
|
|
|
2022-11-22 04:46:50 +00:00
|
|
|
/// Parse args that must be interpreted early, e.g. before printing help.
|
2022-12-14 02:20:32 +00:00
|
|
|
fn handle_early_args(
|
|
|
|
ui: &mut Ui,
|
|
|
|
app: &clap::Command,
|
|
|
|
args: &[String],
|
2023-01-05 06:15:56 +00:00
|
|
|
layered_configs: &mut LayeredConfigs,
|
2022-12-14 02:20:32 +00:00
|
|
|
) -> Result<(), CommandError> {
|
2022-11-22 04:46:50 +00:00
|
|
|
// ignore_errors() bypasses errors like "--help" or missing subcommand
|
2022-12-14 02:20:32 +00:00
|
|
|
let early_matches = app.clone().ignore_errors(true).get_matches_from(args);
|
2022-11-22 04:46:50 +00:00
|
|
|
let mut args: EarlyArgs = EarlyArgs::from_arg_matches(&early_matches).unwrap();
|
|
|
|
|
|
|
|
if let Some(choice) = args.color {
|
2023-01-02 04:38:54 +00:00
|
|
|
args.config_toml.push(format!(r#"ui.color="{choice}""#));
|
2022-11-22 04:46:50 +00:00
|
|
|
}
|
|
|
|
if args.no_pager.unwrap_or_default() {
|
|
|
|
ui.set_pagination(crate::ui::PaginationChoice::No);
|
|
|
|
}
|
|
|
|
if !args.config_toml.is_empty() {
|
2023-01-05 06:15:56 +00:00
|
|
|
layered_configs.parse_config_args(&args.config_toml)?;
|
|
|
|
ui.reset(&layered_configs.merge());
|
2022-11-22 04:46:50 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-01-04 07:57:10 +00:00
|
|
|
pub fn expand_args(
|
|
|
|
app: &clap::Command,
|
2022-09-22 06:29:42 +00:00
|
|
|
args_os: ArgsOs,
|
2023-01-05 05:52:12 +00:00
|
|
|
config: &config::Config,
|
2023-01-04 07:57:10 +00:00
|
|
|
) -> Result<Vec<String>, CommandError> {
|
2022-09-22 06:29:42 +00:00
|
|
|
let mut string_args: Vec<String> = vec![];
|
|
|
|
for arg_os in args_os {
|
|
|
|
if let Some(string_arg) = arg_os.to_str() {
|
|
|
|
string_args.push(string_arg.to_owned());
|
|
|
|
} else {
|
|
|
|
return Err(CommandError::CliError("Non-utf8 argument".to_string()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-05 05:52:12 +00:00
|
|
|
resolve_aliases(config, app, &string_args)
|
2023-01-04 07:57:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_args(
|
|
|
|
ui: &mut Ui,
|
2023-01-04 08:12:56 +00:00
|
|
|
app: &clap::Command,
|
2023-01-04 07:57:10 +00:00
|
|
|
tracing_subscription: &TracingSubscription,
|
|
|
|
string_args: &[String],
|
2023-01-05 06:15:56 +00:00
|
|
|
layered_configs: &mut LayeredConfigs,
|
2023-01-04 08:12:56 +00:00
|
|
|
) -> Result<(ArgMatches, Args), CommandError> {
|
2023-01-05 06:15:56 +00:00
|
|
|
handle_early_args(ui, app, string_args, layered_configs)?;
|
2023-01-04 07:57:10 +00:00
|
|
|
let matches = app.clone().try_get_matches_from(string_args)?;
|
2022-10-31 17:49:53 +00:00
|
|
|
|
2022-11-22 04:46:50 +00:00
|
|
|
let args: Args = Args::from_arg_matches(&matches).unwrap();
|
2023-01-03 07:54:17 +00:00
|
|
|
if args.global_args.verbose {
|
|
|
|
// TODO: set up verbose logging as early as possible
|
|
|
|
tracing_subscription.enable_verbose_logging()?;
|
|
|
|
}
|
2023-01-04 08:12:56 +00:00
|
|
|
|
|
|
|
Ok((matches, args))
|
2022-09-22 06:29:42 +00:00
|
|
|
}
|
2022-09-22 07:12:52 +00:00
|
|
|
|
|
|
|
// TODO: Return std::process::ExitCode instead, once our MSRV is >= 1.61
|
|
|
|
#[must_use]
|
2022-09-24 14:15:23 +00:00
|
|
|
pub fn handle_command_result(ui: &mut Ui, result: Result<(), CommandError>) -> i32 {
|
|
|
|
match result {
|
|
|
|
Ok(()) => 0,
|
2022-11-12 23:28:32 +00:00
|
|
|
Err(CommandError::UserError { message, hint }) => {
|
2022-12-15 02:30:06 +00:00
|
|
|
ui.write_error(&format!("Error: {message}\n")).unwrap();
|
2022-11-12 23:28:32 +00:00
|
|
|
if let Some(hint) = hint {
|
2023-01-12 06:25:17 +00:00
|
|
|
writeln!(ui.hint(), "Hint: {hint}").unwrap();
|
2022-11-12 23:28:32 +00:00
|
|
|
}
|
2022-09-22 07:12:52 +00:00
|
|
|
1
|
|
|
|
}
|
2022-09-24 14:15:23 +00:00
|
|
|
Err(CommandError::ConfigError(message)) => {
|
2022-12-15 02:30:06 +00:00
|
|
|
ui.write_error(&format!("Config error: {message}\n"))
|
2022-09-24 14:15:23 +00:00
|
|
|
.unwrap();
|
|
|
|
1
|
|
|
|
}
|
|
|
|
Err(CommandError::CliError(message)) => {
|
2022-12-15 02:30:06 +00:00
|
|
|
ui.write_error(&format!("Error: {message}\n")).unwrap();
|
2022-09-22 07:12:52 +00:00
|
|
|
2
|
|
|
|
}
|
2022-10-31 17:49:53 +00:00
|
|
|
Err(CommandError::ClapCliError(inner)) => {
|
|
|
|
let clap_str = if ui.color() {
|
|
|
|
inner.render().ansi().to_string()
|
|
|
|
} else {
|
|
|
|
inner.render().to_string()
|
|
|
|
};
|
|
|
|
|
2022-11-22 00:18:27 +00:00
|
|
|
match inner.kind() {
|
|
|
|
clap::error::ErrorKind::DisplayHelp
|
|
|
|
| clap::error::ErrorKind::DisplayHelpOnMissingArgumentOrSubcommand => {
|
|
|
|
ui.request_pager()
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
};
|
2022-10-31 17:49:53 +00:00
|
|
|
// Definitions for exit codes and streams come from
|
|
|
|
// https://github.com/clap-rs/clap/blob/master/src/error/mod.rs
|
|
|
|
match inner.kind() {
|
|
|
|
clap::error::ErrorKind::DisplayHelp | clap::error::ErrorKind::DisplayVersion => {
|
|
|
|
ui.write(&clap_str).unwrap();
|
|
|
|
0
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
ui.write_stderr(&clap_str).unwrap();
|
|
|
|
2
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-11-02 00:20:09 +00:00
|
|
|
Err(CommandError::BrokenPipe) => 3,
|
2022-09-24 14:15:23 +00:00
|
|
|
Err(CommandError::InternalError(message)) => {
|
2022-12-15 02:30:06 +00:00
|
|
|
ui.write_error(&format!("Internal error: {message}\n"))
|
2022-09-22 07:12:52 +00:00
|
|
|
.unwrap();
|
|
|
|
255
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-03 08:03:33 +00:00
|
|
|
|
|
|
|
/// CLI command builder and runner.
|
|
|
|
#[must_use]
|
2023-01-03 12:53:30 +00:00
|
|
|
pub struct CliRunner {
|
2023-01-03 08:03:33 +00:00
|
|
|
tracing_subscription: TracingSubscription,
|
2023-01-03 08:33:53 +00:00
|
|
|
app: clap::Command,
|
2023-01-03 09:38:17 +00:00
|
|
|
store_factories: Option<StoreFactories>,
|
2023-01-03 12:53:30 +00:00
|
|
|
dispatch_fn: CliDispatchFn,
|
2023-01-03 08:03:33 +00:00
|
|
|
}
|
|
|
|
|
2023-01-03 12:53:30 +00:00
|
|
|
type CliDispatchFn =
|
|
|
|
Box<dyn FnOnce(&mut Ui, &CommandHelper, &ArgMatches) -> Result<(), CommandError>>;
|
|
|
|
|
|
|
|
impl CliRunner {
|
2023-01-03 08:03:33 +00:00
|
|
|
/// Initializes CLI environment and returns a builder. This should be called
|
|
|
|
/// as early as possible.
|
|
|
|
pub fn init() -> Self {
|
|
|
|
let tracing_subscription = TracingSubscription::init();
|
|
|
|
crate::cleanup_guard::init();
|
|
|
|
CliRunner {
|
|
|
|
tracing_subscription,
|
2023-01-03 08:33:53 +00:00
|
|
|
app: crate::commands::default_app(),
|
2023-01-03 09:38:17 +00:00
|
|
|
store_factories: None,
|
2023-01-03 12:53:30 +00:00
|
|
|
dispatch_fn: Box::new(crate::commands::run_command),
|
2023-01-03 08:03:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-03 09:38:17 +00:00
|
|
|
/// Replaces `StoreFactories` to be used.
|
|
|
|
pub fn set_store_factories(self, store_factories: StoreFactories) -> Self {
|
|
|
|
CliRunner {
|
|
|
|
tracing_subscription: self.tracing_subscription,
|
|
|
|
app: self.app,
|
|
|
|
store_factories: Some(store_factories),
|
|
|
|
dispatch_fn: self.dispatch_fn,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-03 08:33:53 +00:00
|
|
|
/// Registers new subcommands in addition to the default ones.
|
2023-01-03 12:53:30 +00:00
|
|
|
pub fn add_subcommand<C, F>(self, custom_dispatch_fn: F) -> Self
|
2023-01-03 08:33:53 +00:00
|
|
|
where
|
|
|
|
C: clap::Subcommand,
|
2023-01-03 12:53:30 +00:00
|
|
|
F: FnOnce(&mut Ui, &CommandHelper, C) -> Result<(), CommandError> + 'static,
|
2023-01-03 08:33:53 +00:00
|
|
|
{
|
2023-01-03 12:53:30 +00:00
|
|
|
let old_dispatch_fn = self.dispatch_fn;
|
|
|
|
let new_dispatch_fn =
|
|
|
|
move |ui: &mut Ui, command_helper: &CommandHelper, matches: &ArgMatches| {
|
|
|
|
match C::from_arg_matches(matches) {
|
|
|
|
Ok(command) => custom_dispatch_fn(ui, command_helper, command),
|
|
|
|
Err(_) => old_dispatch_fn(ui, command_helper, matches),
|
|
|
|
}
|
|
|
|
};
|
2023-01-03 08:33:53 +00:00
|
|
|
CliRunner {
|
|
|
|
tracing_subscription: self.tracing_subscription,
|
|
|
|
app: C::augment_subcommands(self.app),
|
2023-01-03 09:38:17 +00:00
|
|
|
store_factories: self.store_factories,
|
2023-01-03 12:53:30 +00:00
|
|
|
dispatch_fn: Box::new(new_dispatch_fn),
|
2023-01-03 08:03:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-05 06:04:54 +00:00
|
|
|
pub fn run(self, ui: &mut Ui, mut layered_configs: LayeredConfigs) -> Result<(), CommandError> {
|
2023-01-04 08:18:45 +00:00
|
|
|
let cwd = env::current_dir().unwrap(); // TODO: maybe map_err to CommandError?
|
2023-01-05 03:36:31 +00:00
|
|
|
layered_configs.read_user_config()?;
|
|
|
|
let config = layered_configs.merge();
|
|
|
|
ui.reset(&config);
|
|
|
|
let string_args = expand_args(&self.app, std::env::args_os(), &config)?;
|
2023-01-04 09:24:30 +00:00
|
|
|
let (matches, args) = parse_args(
|
|
|
|
ui,
|
|
|
|
&self.app,
|
|
|
|
&self.tracing_subscription,
|
|
|
|
&string_args,
|
2023-01-05 06:15:56 +00:00
|
|
|
&mut layered_configs,
|
2023-01-04 09:24:30 +00:00
|
|
|
)?;
|
2023-01-05 06:15:56 +00:00
|
|
|
|
2023-01-10 00:53:22 +00:00
|
|
|
let maybe_workspace_loader = init_workspace_loader(&cwd, &args.global_args);
|
|
|
|
if let Ok(loader) = &maybe_workspace_loader {
|
2023-01-02 05:18:38 +00:00
|
|
|
// TODO: maybe show error/warning if repo config contained command alias
|
|
|
|
layered_configs.read_repo_config(loader.repo_path())?;
|
|
|
|
}
|
2023-01-05 06:15:56 +00:00
|
|
|
let config = layered_configs.merge();
|
2023-01-02 05:18:38 +00:00
|
|
|
ui.reset(&config);
|
2023-01-05 06:15:56 +00:00
|
|
|
let settings = UserSettings::from_config(config);
|
2023-01-04 08:16:53 +00:00
|
|
|
let command_helper = CommandHelper::new(
|
|
|
|
self.app,
|
2023-01-04 08:18:45 +00:00
|
|
|
cwd,
|
2023-01-04 08:16:53 +00:00
|
|
|
string_args,
|
|
|
|
args.global_args,
|
2023-01-04 08:57:36 +00:00
|
|
|
settings,
|
2023-01-10 00:53:22 +00:00
|
|
|
maybe_workspace_loader,
|
2023-01-04 08:16:53 +00:00
|
|
|
self.store_factories.unwrap_or_default(),
|
|
|
|
);
|
2023-01-03 12:48:48 +00:00
|
|
|
(self.dispatch_fn)(ui, &command_helper, &matches)
|
2023-01-03 08:03:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn run_and_exit(self) -> ! {
|
2023-01-05 06:04:54 +00:00
|
|
|
let layered_configs = LayeredConfigs::from_environment();
|
|
|
|
let mut ui = Ui::with_config(&layered_configs.merge());
|
|
|
|
let result = self.run(&mut ui, layered_configs);
|
2023-01-03 08:03:33 +00:00
|
|
|
let exit_code = handle_command_result(&mut ui, result);
|
|
|
|
ui.finalize_writes();
|
|
|
|
std::process::exit(exit_code);
|
|
|
|
}
|
|
|
|
}
|