mirror of
https://github.com/martinvonz/jj.git
synced 2025-05-05 15:32:49 +00:00
style: add semicolon at the end of expressions used as statements
This commit is contained in:
parent
46e2723464
commit
3f2ef2ee04
@ -134,6 +134,7 @@ testutils = { path = "lib/testutils" }
|
|||||||
|
|
||||||
[workspace.lints.clippy]
|
[workspace.lints.clippy]
|
||||||
explicit_iter_loop = "warn"
|
explicit_iter_loop = "warn"
|
||||||
|
semicolon_if_nothing_returned = "warn"
|
||||||
uninlined_format_args = "warn"
|
uninlined_format_args = "warn"
|
||||||
|
|
||||||
# Insta suggests compiling these packages in opt mode for faster testing.
|
# Insta suggests compiling these packages in opt mode for faster testing.
|
||||||
|
@ -254,7 +254,7 @@ impl TracingSubscription {
|
|||||||
.modify(|filter| {
|
.modify(|filter| {
|
||||||
*filter = tracing_subscriber::EnvFilter::builder()
|
*filter = tracing_subscriber::EnvFilter::builder()
|
||||||
.with_default_directive(tracing::metadata::LevelFilter::DEBUG.into())
|
.with_default_directive(tracing::metadata::LevelFilter::DEBUG.into())
|
||||||
.from_env_lossy()
|
.from_env_lossy();
|
||||||
})
|
})
|
||||||
.map_err(|err| internal_error_with_message("failed to enable debug logging", err))?;
|
.map_err(|err| internal_error_with_message("failed to enable debug logging", err))?;
|
||||||
tracing::info!("debug logging enabled");
|
tracing::info!("debug logging enabled");
|
||||||
|
@ -100,7 +100,7 @@ fn check_wc_author(
|
|||||||
AuthorChange::Email => &author.email,
|
AuthorChange::Email => &author.email,
|
||||||
};
|
};
|
||||||
if new_value.as_str() != Some(orig_value) {
|
if new_value.as_str() != Some(orig_value) {
|
||||||
warn_wc_author(ui, &author.name, &author.email)?
|
warn_wc_author(ui, &author.name, &author.email)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -74,7 +74,7 @@ pub(crate) fn cmd_init(
|
|||||||
ui.warning_default(),
|
ui.warning_default(),
|
||||||
"`--git` and `--git-repo` are deprecated.
|
"`--git` and `--git-repo` are deprecated.
|
||||||
Use `jj git init` instead"
|
Use `jj git init` instead"
|
||||||
)?
|
)?;
|
||||||
} else {
|
} else {
|
||||||
if !command.settings().allow_native_backend() {
|
if !command.settings().allow_native_backend() {
|
||||||
return Err(user_error_with_hint(
|
return Err(user_error_with_hint(
|
||||||
|
@ -134,7 +134,7 @@ pub fn cmd_op_abandon(
|
|||||||
let mut locked_ws = workspace.start_working_copy_mutation()?;
|
let mut locked_ws = workspace.start_working_copy_mutation()?;
|
||||||
let old_op_id = locked_ws.locked_wc().old_operation_id();
|
let old_op_id = locked_ws.locked_wc().old_operation_id();
|
||||||
if let Some((_, new_id)) = reparented_head_ops().find(|(old, _)| old.id() == old_op_id) {
|
if let Some((_, new_id)) = reparented_head_ops().find(|(old, _)| old.id() == old_op_id) {
|
||||||
locked_ws.finish(new_id.clone())?
|
locked_ws.finish(new_id.clone())?;
|
||||||
} else {
|
} else {
|
||||||
writeln!(
|
writeln!(
|
||||||
ui.warning_default(),
|
ui.warning_default(),
|
||||||
|
@ -93,7 +93,7 @@ pub(crate) fn cmd_status(
|
|||||||
formatter.labeled("conflict"),
|
formatter.labeled("conflict"),
|
||||||
"There are unresolved conflicts at these paths:"
|
"There are unresolved conflicts at these paths:"
|
||||||
)?;
|
)?;
|
||||||
print_conflicted_paths(&conflicts, formatter, &workspace_command)?
|
print_conflicted_paths(&conflicts, formatter, &workspace_command)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let template = workspace_command.commit_summary_template();
|
let template = workspace_command.commit_summary_template();
|
||||||
|
@ -499,10 +499,10 @@ pub fn default_config() -> config::Config {
|
|||||||
.add_source(from_toml!("config/revsets.toml"))
|
.add_source(from_toml!("config/revsets.toml"))
|
||||||
.add_source(from_toml!("config/templates.toml"));
|
.add_source(from_toml!("config/templates.toml"));
|
||||||
if cfg!(unix) {
|
if cfg!(unix) {
|
||||||
builder = builder.add_source(from_toml!("config/unix.toml"))
|
builder = builder.add_source(from_toml!("config/unix.toml"));
|
||||||
}
|
}
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
builder = builder.add_source(from_toml!("config/windows.toml"))
|
builder = builder.add_source(from_toml!("config/windows.toml"));
|
||||||
}
|
}
|
||||||
builder.build().unwrap()
|
builder.build().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -1282,7 +1282,7 @@ fn show_diff_line_tokens(
|
|||||||
match token_type {
|
match token_type {
|
||||||
DiffTokenType::Matching => formatter.write_all(content)?,
|
DiffTokenType::Matching => formatter.write_all(content)?,
|
||||||
DiffTokenType::Different => {
|
DiffTokenType::Different => {
|
||||||
formatter.with_label("token", |formatter| formatter.write_all(content))?
|
formatter.with_label("token", |formatter| formatter.write_all(content))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1402,7 +1402,7 @@ pub fn show_diff_summary(
|
|||||||
CopyOperation::Rename => ("renamed", "R"),
|
CopyOperation::Rename => ("renamed", "R"),
|
||||||
};
|
};
|
||||||
let path = path_converter.format_copied_path(before_path, after_path);
|
let path = path_converter.format_copied_path(before_path, after_path);
|
||||||
writeln!(formatter.labeled(label), "{sigil} {path}")?
|
writeln!(formatter.labeled(label), "{sigil} {path}")?;
|
||||||
} else {
|
} else {
|
||||||
let path = path_converter.format_file_path(after_path);
|
let path = path_converter.format_file_path(after_path);
|
||||||
match (before.is_present(), after.is_present()) {
|
match (before.is_present(), after.is_present()) {
|
||||||
|
@ -553,7 +553,7 @@ impl<W: Write> Formatter for ColorFormatter<W> {
|
|||||||
fn pop_label(&mut self) -> io::Result<()> {
|
fn pop_label(&mut self) -> io::Result<()> {
|
||||||
self.labels.pop();
|
self.labels.pop();
|
||||||
if self.labels.is_empty() {
|
if self.labels.is_empty() {
|
||||||
self.write_new_style()?
|
self.write_new_style()?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -265,7 +265,7 @@ fn make_diff_sections(
|
|||||||
.split_inclusive('\n')
|
.split_inclusive('\n')
|
||||||
.map(|line| Cow::Owned(line.to_owned()))
|
.map(|line| Cow::Owned(line.to_owned()))
|
||||||
.collect(),
|
.collect(),
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
DiffHunk::Different(sides) => {
|
DiffHunk::Different(sides) => {
|
||||||
assert_eq!(sides.len(), 2, "only two inputs were provided to the diff");
|
assert_eq!(sides.len(), 2, "only two inputs were provided to the diff");
|
||||||
@ -285,7 +285,7 @@ fn make_diff_sections(
|
|||||||
make_section_changed_lines(right_side, scm_record::ChangeType::Added),
|
make_section_changed_lines(right_side, scm_record::ChangeType::Added),
|
||||||
]
|
]
|
||||||
.concat(),
|
.concat(),
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -361,7 +361,7 @@ pub fn make_diff_files(
|
|||||||
is_checked: false,
|
is_checked: false,
|
||||||
old_description: None,
|
old_description: None,
|
||||||
new_description: Some(Cow::Owned(describe_binary(hash.as_deref(), num_bytes))),
|
new_description: Some(Cow::Owned(describe_binary(hash.as_deref(), num_bytes))),
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
(
|
(
|
||||||
@ -426,7 +426,7 @@ pub fn make_diff_files(
|
|||||||
is_checked: false,
|
is_checked: false,
|
||||||
old_description: Some(Cow::Owned(describe_binary(hash.as_deref(), num_bytes))),
|
old_description: Some(Cow::Owned(describe_binary(hash.as_deref(), num_bytes))),
|
||||||
new_description: None,
|
new_description: None,
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -507,7 +507,7 @@ pub fn apply_diff_builtin(
|
|||||||
executable: file.get_file_mode()
|
executable: file.get_file_mode()
|
||||||
== Some(scm_record::FileMode(mode::EXECUTABLE)),
|
== Some(scm_record::FileMode(mode::EXECUTABLE)),
|
||||||
}),
|
}),
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -109,7 +109,7 @@ impl Direction {
|
|||||||
write!(formatter, "Working copy parent: ")?;
|
write!(formatter, "Working copy parent: ")?;
|
||||||
}
|
}
|
||||||
template.format(commit, formatter)
|
template.format(commit, formatter)
|
||||||
})
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
cmd_err
|
cmd_err
|
||||||
|
@ -206,7 +206,7 @@ pub struct ConcatTemplate<T>(pub Vec<T>);
|
|||||||
impl<T: Template> Template for ConcatTemplate<T> {
|
impl<T: Template> Template for ConcatTemplate<T> {
|
||||||
fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> {
|
fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> {
|
||||||
for template in &self.0 {
|
for template in &self.0 {
|
||||||
template.format(formatter)?
|
template.format(formatter)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -618,7 +618,7 @@ fn test_git_colocated_external_checkout() {
|
|||||||
let git_check_out_ref = |name| {
|
let git_check_out_ref = |name| {
|
||||||
git_repo
|
git_repo
|
||||||
.set_head_detached(git_repo.find_reference(name).unwrap().target().unwrap())
|
.set_head_detached(git_repo.find_reference(name).unwrap().target().unwrap())
|
||||||
.unwrap()
|
.unwrap();
|
||||||
};
|
};
|
||||||
|
|
||||||
test_env.jj_cmd_ok(&repo_path, &["git", "init", "--git-repo=."]);
|
test_env.jj_cmd_ok(&repo_path, &["git", "init", "--git-repo=."]);
|
||||||
|
@ -328,7 +328,7 @@ fn test_log_null_terminate_multiline_descriptions() {
|
|||||||
insta::assert_debug_snapshot!(
|
insta::assert_debug_snapshot!(
|
||||||
stdout,
|
stdout,
|
||||||
@r###""commit 3 line 1\n\ncommit 3 line 2\n\0commit 2 line 1\n\ncommit 2 line 2\n\0commit 1 line 1\n\ncommit 1 line 2\n\0""###
|
@r###""commit 3 line 1\n\ncommit 3 line 2\n\0commit 2 line 1\n\ncommit 2 line 2\n\0commit 1 line 1\n\ncommit 1 line 2\n\0""###
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -610,7 +610,7 @@ fn test_prev_prompts_on_multiple_parents() {
|
|||||||
Hint: Working copy parent: mzvwutvl bc4f4fe3 (empty) third
|
Hint: Working copy parent: mzvwutvl bc4f4fe3 (empty) third
|
||||||
Hint: Working copy parent: kkmpptxz b0d21db3 (empty) second
|
Hint: Working copy parent: kkmpptxz b0d21db3 (empty) second
|
||||||
Hint: Working copy parent: qpvuntsm fa15625b (empty) first
|
Hint: Working copy parent: qpvuntsm fa15625b (empty) first
|
||||||
"###)
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -628,7 +628,7 @@ fn test_parallelize_complex_nonlinear_target() {
|
|||||||
│ ○ 14ca4df576b3 4 parents:
|
│ ○ 14ca4df576b3 4 parents:
|
||||||
├─╯
|
├─╯
|
||||||
◆ 000000000000 parents:
|
◆ 000000000000 parents:
|
||||||
"###)
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_log_output(test_env: &TestEnvironment, cwd: &Path) -> String {
|
fn get_log_output(test_env: &TestEnvironment, cwd: &Path) -> String {
|
||||||
|
@ -34,7 +34,7 @@ fn test_util_config_schema() {
|
|||||||
[...]
|
[...]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"###)
|
"###);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,7 +197,7 @@ int main(int argc, char **argv)
|
|||||||
}
|
}
|
||||||
"##,
|
"##,
|
||||||
])
|
])
|
||||||
})
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ impl PartialOrd for Commit {
|
|||||||
|
|
||||||
impl Hash for Commit {
|
impl Hash for Commit {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
self.id.hash(state)
|
self.id.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,7 +81,7 @@ impl<T: ContentHash> ContentHash for [T] {
|
|||||||
|
|
||||||
impl<T: ContentHash> ContentHash for Vec<T> {
|
impl<T: ContentHash> ContentHash for Vec<T> {
|
||||||
fn hash(&self, state: &mut impl DigestUpdate) {
|
fn hash(&self, state: &mut impl DigestUpdate) {
|
||||||
self.as_slice().hash(state)
|
self.as_slice().hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,7 +97,7 @@ impl<T: ContentHash> ContentHash for Option<T> {
|
|||||||
None => state.update(&0u32.to_le_bytes()),
|
None => state.update(&0u32.to_le_bytes()),
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
state.update(&1u32.to_le_bytes());
|
state.update(&1u32.to_le_bytes());
|
||||||
x.hash(state)
|
x.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -73,7 +73,7 @@ impl Eq for IndexEntry<'_> {}
|
|||||||
|
|
||||||
impl Hash for IndexEntry<'_> {
|
impl Hash for IndexEntry<'_> {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
self.pos.hash(state)
|
self.pos.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,7 +183,7 @@ impl<'a> RevsetGraphWalk<'a> {
|
|||||||
parent_edges
|
parent_edges
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|edge| known_ancestors.insert(edge.target)),
|
.filter(|edge| known_ancestors.insert(edge.target)),
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -233,7 +233,7 @@ impl DefaultIndexStore {
|
|||||||
change_id_length,
|
change_id_length,
|
||||||
)?;
|
)?;
|
||||||
maybe_parent_file = Some(parent_file.clone());
|
maybe_parent_file = Some(parent_file.clone());
|
||||||
mutable_index = DefaultMutableIndex::incremental(parent_file)
|
mutable_index = DefaultMutableIndex::incremental(parent_file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -745,7 +745,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_word_ranges_multibyte() {
|
fn test_find_word_ranges_multibyte() {
|
||||||
assert_eq!(find_word_ranges("⊢".as_bytes()), vec![0..3])
|
assert_eq!(find_word_ranges("⊢".as_bytes()), vec![0..3]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -328,7 +328,7 @@ fn build_union_matcher(expressions: &[FilesetExpression]) -> Box<dyn Matcher> {
|
|||||||
FilePattern::FilePath(path) => file_paths.push(path),
|
FilePattern::FilePath(path) => file_paths.push(path),
|
||||||
FilePattern::PrefixPath(path) => prefix_paths.push(path),
|
FilePattern::PrefixPath(path) => prefix_paths.push(path),
|
||||||
FilePattern::FileGlob { dir, pattern } => {
|
FilePattern::FileGlob { dir, pattern } => {
|
||||||
file_globs.push((dir, pattern.clone()))
|
file_globs.push((dir, pattern.clone()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
@ -1782,10 +1782,10 @@ pub fn parse_gitmodules(
|
|||||||
// TODO Git warns when a duplicate config entry is found, we should
|
// TODO Git warns when a duplicate config entry is found, we should
|
||||||
// consider doing the same.
|
// consider doing the same.
|
||||||
("path", PartialSubmoduleConfig { path: None, .. }) => {
|
("path", PartialSubmoduleConfig { path: None, .. }) => {
|
||||||
map_entry.path = Some(config_value.to_string())
|
map_entry.path = Some(config_value.to_string());
|
||||||
}
|
}
|
||||||
("url", PartialSubmoduleConfig { url: None, .. }) => {
|
("url", PartialSubmoduleConfig { url: None, .. }) => {
|
||||||
map_entry.url = Some(config_value.to_string())
|
map_entry.url = Some(config_value.to_string());
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
};
|
};
|
||||||
|
@ -85,7 +85,7 @@ where
|
|||||||
reverse_edges.entry(target).or_default().push(GraphEdge {
|
reverse_edges.entry(target).or_default().push(GraphEdge {
|
||||||
target: node.clone(),
|
target: node.clone(),
|
||||||
edge_type,
|
edge_type,
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
entries.push(node);
|
entries.push(node);
|
||||||
}
|
}
|
||||||
|
@ -513,10 +513,10 @@ fn conflict_to_proto(conflict: &Conflict) -> crate::protos::local_store::Conflic
|
|||||||
fn conflict_from_proto(proto: crate::protos::local_store::Conflict) -> Conflict {
|
fn conflict_from_proto(proto: crate::protos::local_store::Conflict) -> Conflict {
|
||||||
let mut conflict = Conflict::default();
|
let mut conflict = Conflict::default();
|
||||||
for term in proto.removes {
|
for term in proto.removes {
|
||||||
conflict.removes.push(conflict_term_from_proto(term))
|
conflict.removes.push(conflict_term_from_proto(term));
|
||||||
}
|
}
|
||||||
for term in proto.adds {
|
for term in proto.adds {
|
||||||
conflict.adds.push(conflict_term_from_proto(term))
|
conflict.adds.push(conflict_term_from_proto(term));
|
||||||
}
|
}
|
||||||
conflict
|
conflict
|
||||||
}
|
}
|
||||||
|
@ -424,7 +424,7 @@ impl<T> FromIterator<T> for MergeBuilder<T> {
|
|||||||
|
|
||||||
impl<T> Extend<T> for MergeBuilder<T> {
|
impl<T> Extend<T> for MergeBuilder<T> {
|
||||||
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
|
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
|
||||||
self.values.extend(iter)
|
self.values.extend(iter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -512,7 +512,7 @@ impl<T> Merge<Merge<T>> {
|
|||||||
|
|
||||||
impl<T: ContentHash> ContentHash for Merge<T> {
|
impl<T: ContentHash> ContentHash for Merge<T> {
|
||||||
fn hash(&self, state: &mut impl DigestUpdate) {
|
fn hash(&self, state: &mut impl DigestUpdate) {
|
||||||
self.values.hash(state)
|
self.values.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ impl PartialOrd for Operation {
|
|||||||
|
|
||||||
impl Hash for Operation {
|
impl Hash for Operation {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
self.id.hash(state)
|
self.id.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ impl OpHeadsStore for SimpleOpHeadsStore {
|
|||||||
assert!(!old_ids.contains(new_id));
|
assert!(!old_ids.contains(new_id));
|
||||||
self.add_op_head(new_id);
|
self.add_op_head(new_id);
|
||||||
for old_id in old_ids {
|
for old_id in old_ids {
|
||||||
self.remove_op_head(old_id)
|
self.remove_op_head(old_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ fn copy_directory(src: &Path, dst: &Path) {
|
|||||||
let base_name = child_src.file_name().unwrap();
|
let base_name = child_src.file_name().unwrap();
|
||||||
let child_dst = dst.join(base_name);
|
let child_dst = dst.join(base_name);
|
||||||
if child_src.is_dir() {
|
if child_src.is_dir() {
|
||||||
copy_directory(&child_src, &child_dst)
|
copy_directory(&child_src, &child_dst);
|
||||||
} else {
|
} else {
|
||||||
std::fs::copy(&child_src, &child_dst).unwrap();
|
std::fs::copy(&child_src, &child_dst).unwrap();
|
||||||
}
|
}
|
||||||
|
@ -535,7 +535,7 @@ line 5
|
|||||||
2
|
2
|
||||||
),
|
),
|
||||||
None
|
None
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -631,7 +631,7 @@ fn test_parse_conflict_simple() {
|
|||||||
2
|
2
|
||||||
),
|
),
|
||||||
@"None"
|
@"None"
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -746,7 +746,7 @@ fn test_parse_conflict_wrong_arity() {
|
|||||||
3
|
3
|
||||||
),
|
),
|
||||||
None
|
None
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -767,7 +767,7 @@ fn test_parse_conflict_malformed_missing_removes() {
|
|||||||
2
|
2
|
||||||
),
|
),
|
||||||
None
|
None
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -790,7 +790,7 @@ fn test_parse_conflict_malformed_marker() {
|
|||||||
2
|
2
|
||||||
),
|
),
|
||||||
None
|
None
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -814,7 +814,7 @@ fn test_parse_conflict_malformed_diff() {
|
|||||||
2
|
2
|
||||||
),
|
),
|
||||||
None
|
None
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -650,7 +650,7 @@ fn test_reindex_corrupt_segment_files() {
|
|||||||
// u32: number of local change ids
|
// u32: number of local change ids
|
||||||
// u32: number of overflow parent entries
|
// u32: number of overflow parent entries
|
||||||
// u32: number of overflow change id positions
|
// u32: number of overflow change id positions
|
||||||
fs::write(entry.path(), b"\0".repeat(24)).unwrap()
|
fs::write(entry.path(), b"\0".repeat(24)).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
let repo = load_repo_at_head(&settings, test_repo.repo_path());
|
let repo = load_repo_at_head(&settings, test_repo.repo_path());
|
||||||
|
@ -541,7 +541,7 @@ fn test_resolve_with_conflict() {
|
|||||||
vec![expected_base1],
|
vec![expected_base1],
|
||||||
vec![expected_side1, expected_side2]
|
vec![expected_side1, expected_side2]
|
||||||
))
|
))
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
Loading…
x
Reference in New Issue
Block a user