# # # delete "tests/resolve_duplicate_name_conflict/checkout.sh-merged" # # add_file "tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_3-beth_3" # content [e3dbedc55d31338b163a1189d45c0dc11b2d4076] # # add_file "tests/resolve_duplicate_name_conflict/expected-merge-messages-jim_1-beth_2" # content [9695e88d0094a99f57675a57585140dd326ea7b6] # # add_file "tests/resolve_duplicate_name_conflict/expected-update-messages-beth_3" # content [ecae97897d0cc4849f9dc322c8a55d8a6338f4b6] # # add_file "tests/resolve_duplicate_name_conflict/expected-update-messages-jim_2" # content [215059719d4b198766fe5b87accfb85ea8c5d562] # # add_file "tests/resolve_duplicate_name_conflict/merge-beth_2-jim_1-conflicts" # content [049415781a34be6355a37bffaaac77f4fc7ed51c] # # patch "basic_io.cc" # from [f9f5453f817910a7ac6cfc1ec0591b7d938642b5] # to [8537b617f85273a7c80736695a983d8a6104c205] # # patch "basic_io.hh" # from [6c148df1eafd7316edd472e231541f98c62fcb0b] # to [212d1bb1bb8fed7cb5d123eb9b09f60e993d1018] # # patch "cmd_merging.cc" # from [89b55863602631a158f56836c4cdd5e4a3dd331b] # to [aad61ffd84255e1446ad469c6fa607a05c0757ae] # # patch "diff_patch.cc" # from [211424b28fdc3a5dcdae5a781feea97e1e915326] # to [007f6d5be298e952730a10f970a8905df32f5d17] # # patch "diff_patch.hh" # from [8386bfd9dc62a30e7173fca60d27adab6d02167b] # to [f04e39dfc101c83a107775115405eca963f04466] # # patch "merge.cc" # from [c70546fe818693647be0483beda94da09f8656a8] # to [0fe162b99691ba1ed73b3222bb787fcf3189fd3e] # # patch "monotone.texi" # from [abd7160576898d8bc863dbf84d62d2790d760a20] # to [53a2aa982f8fbc875b31ce1db576838753880da9] # # patch "options_list.hh" # from [aa442efe2b176aeaa7775093d8957ddc8f7549a9] # to [154daf8d9f4ab4673441dc8e30bf7531fb7fe3eb] # # patch "roster.cc" # from [9989ba61bf9d98cdc3551daaa3666cd37a752695] # to [0e36c692b2a4b063ba1d83b719d9f14aa74da61e] # # patch "roster.hh" # from [e752044dc5fb4183c3118daa8c8da40ef86ee3c7] # to [8130ebfcc8a1324e6a3db3b10fe8d8e61e714a46] # # patch "roster_merge.cc" # from [de299cc9be1090e4b906e68cb7179e9f5127bae7] # to [cb125c57bf3a35e88f1e43e143aa289260526e11] # # patch "roster_merge.hh" # from [ac60af8e6a41966a00430a85604e9d1c08acc67d] # to [2ea251ea72b44a75a48fd80db8ea0e0742110a7b] # # patch "tests/resolve_duplicate_name_conflict/__driver__.lua" # from [3da589c23ef5c0e996b267d74be712fc6c2176a2] # to [aed0dbae5f8f352b3dabe3b91cffbe6a87acae62] # # patch "tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_1-beth_1" # from [8cc5c13905078a96dd1e308964537c69ed78f4e8] # to [b1db34423258a86c124deda6ad04755de0afa1dc] # # patch "tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_2-jim_1" # from [318b3562e18678195b6209aedc9a157a8f2937e2] # to [13ef6b36c69721f15474749865d55d111eb728b9] # # patch "tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_2-jim_1-conflicts" # from [c505d776680d4278cfbb27e0cb850ba8486e86bd] # to [2222ecb924620c741d8d84477d172287fecfa8d0] # # patch "tests/resolve_duplicate_name_conflict/expected-merged-revision-jim_1" # from [257729bebdb32819cd1fc059806e0fb4144f7ec7] # to [16d13cce9d163a224e0e491da41415c322727b46] # # patch "tests/resolve_duplicate_name_conflict/expected-update-messages-jim_1" # from [d42cacc7d4725d62070fa6f34bc0189c2da4d1cd] # to [d6cdd06022bdadb6f41e395bb8e3f89dbcb64e11] # # patch "tests/resolve_duplicate_name_conflict/merge-abe_2-jim_1-resolve_conflicts" # from [bb50072864e1404e1da5597bca04c486c33bf2ed] # to [a62d7972e68ee89201aeb87d575b1c97634b40dd] # ============================================================ --- tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_3-beth_3 e3dbedc55d31338b163a1189d45c0dc11b2d4076 +++ tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_3-beth_3 e3dbedc55d31338b163a1189d45c0dc11b2d4076 @@ -0,0 +1,5 @@ +mtn: 2 heads on branch 'testbranch' +mtn: [left] 3be078910d1f5452a2c3cbf09787a36620b2a824 +mtn: [right] 7cb32b106906f18ea25ee2578d82de78b2c95337 +mtn: [merged] cae3c066a1dfc2e5e0c57aa6ed758f4aa2564a03 +mtn: note: your workspaces have not been updated ============================================================ --- tests/resolve_duplicate_name_conflict/expected-merge-messages-jim_1-beth_2 9695e88d0094a99f57675a57585140dd326ea7b6 +++ tests/resolve_duplicate_name_conflict/expected-merge-messages-jim_1-beth_2 9695e88d0094a99f57675a57585140dd326ea7b6 @@ -0,0 +1,4 @@ +mtn: [left] 99458e9fba1b7a43ce1a50df44754efaee63fab1 +mtn: [right] 16d13cce9d163a224e0e491da41415c322727b46 +mtn: merged checkout.sh, checkout.sh +mtn: [merged] 3be078910d1f5452a2c3cbf09787a36620b2a824 ============================================================ --- tests/resolve_duplicate_name_conflict/expected-update-messages-beth_3 ecae97897d0cc4849f9dc322c8a55d8a6338f4b6 +++ tests/resolve_duplicate_name_conflict/expected-update-messages-beth_3 ecae97897d0cc4849f9dc322c8a55d8a6338f4b6 @@ -0,0 +1,9 @@ +mtn: updating along branch 'testbranch' +mtn: note: branch 'testbranch' has multiple heads +mtn: note: perhaps consider 'mtn merge' +mtn: selected update target 3be078910d1f5452a2c3cbf09787a36620b2a824 +mtn: adding checkout.sh +mtn: renaming thermostat.c to thermostat-honeywell.c +mtn: adding thermostat-westinghouse.c +mtn: dropping checkout.sh +mtn: updated to base revision 3be078910d1f5452a2c3cbf09787a36620b2a824 ============================================================ --- tests/resolve_duplicate_name_conflict/expected-update-messages-jim_2 215059719d4b198766fe5b87accfb85ea8c5d562 +++ tests/resolve_duplicate_name_conflict/expected-update-messages-jim_2 215059719d4b198766fe5b87accfb85ea8c5d562 @@ -0,0 +1,5 @@ +mtn: updating along branch 'testbranch' +mtn: selected update target cae3c066a1dfc2e5e0c57aa6ed758f4aa2564a03 +mtn: modifying checkout.sh +mtn: modifying thermostat-westinghouse.c +mtn: updated to base revision cae3c066a1dfc2e5e0c57aa6ed758f4aa2564a03 ============================================================ --- tests/resolve_duplicate_name_conflict/merge-beth_2-jim_1-conflicts 049415781a34be6355a37bffaaac77f4fc7ed51c +++ tests/resolve_duplicate_name_conflict/merge-beth_2-jim_1-conflicts 049415781a34be6355a37bffaaac77f4fc7ed51c @@ -0,0 +1,13 @@ + left [99458e9fba1b7a43ce1a50df44754efaee63fab1] + right [16d13cce9d163a224e0e491da41415c322727b46] +ancestor [e9ad84a3fc40ef1109251c308428439c21ad1de9] + + conflict content + node_type "file" + ancestor_name "checkout.sh" + ancestor_file_id [dd6805ae36432d6edcbdff6ea578ea981ffa2144] + left_name "checkout.sh" + left_file_id [d41d63b495331210b2cd5ea69295c8b4be0cdf74] + right_name "checkout.sh" + right_file_id [f7a9033fcfa98450d0c0a25e41aa68904b7a33ce] +resolved_internal ============================================================ --- basic_io.cc f9f5453f817910a7ac6cfc1ec0591b7d938642b5 +++ basic_io.cc 8537b617f85273a7c80736695a983d8a6104c205 @@ -69,6 +69,14 @@ void basic_io::stanza::push_binary_pair( push_hex_pair(k, hexenc(encode_hexenc(v()))); } +void +basic_io::stanza::push_symbol(symbol const & k) +{ + entries.push_back(make_pair(k, "")); + if (k().size() > indent) + indent = k().size(); +} + void basic_io::stanza::push_hex_pair(symbol const & k, hexenc const & v) { entries.push_back(make_pair(k, "")); ============================================================ --- basic_io.hh 6c148df1eafd7316edd472e231541f98c62fcb0b +++ basic_io.hh 212d1bb1bb8fed7cb5d123eb9b09f60e993d1018 @@ -246,6 +246,7 @@ namespace basic_io stanza(); size_t indent; std::vector > entries; + void push_symbol(symbol const & k); void push_hex_pair(symbol const & k, hexenc const & v); void push_binary_pair(symbol const & k, id const & v); void push_binary_triple(symbol const & k, std::string const & n, ============================================================ --- cmd_merging.cc 89b55863602631a158f56836c4cdd5e4a3dd331b +++ cmd_merging.cc aad61ffd84255e1446ad469c6fa607a05c0757ae @@ -46,6 +46,7 @@ three_way_merge(revision_id const & ance three_way_merge(revision_id const & ancestor_rid, roster_t const & ancestor_roster, revision_id const & left_rid, roster_t const & left_roster, revision_id const & right_rid, roster_t const & right_roster, + content_merge_adaptor & adaptor, roster_merge_result & result, marking_map & left_markings, marking_map & right_markings) @@ -85,7 +86,7 @@ three_way_merge(revision_id const & ance // And do the merge roster_merge(left_roster, left_markings, left_uncommon_ancestors, right_roster, right_markings, right_uncommon_ancestors, - result); + adaptor, result); } static bool @@ -250,6 +251,12 @@ CMD(update, "update", "", CMD_REF(worksp roster_merge_result result; + map paths; + get_content_paths(*working_roster, paths); + + content_merge_workspace_adaptor wca(db, base_rid, base_roster, + chosen_markings, working_markings, paths); + // If we are not switching branches, and the user has not specified a // specific revision, we treat the workspace as a revision that has not // yet been committed, and do a normal merge. This supports sutures and @@ -267,7 +274,7 @@ CMD(update, "update", "", CMD_REF(worksp safe_insert(working_uncommon_ancestors, working_rid); roster_merge(*working_roster, working_markings, working_uncommon_ancestors, - chosen_roster, chosen_markings, chosen_uncommon_ancestors, + chosen_roster, chosen_markings, chosen_uncommon_ancestors, wca, result); } else @@ -298,18 +305,13 @@ CMD(update, "update", "", CMD_REF(worksp // And finally do the merge three_way_merge(base_rid, *base_roster, working_rid, *working_roster, - chosen_rid, chosen_roster, + chosen_rid, chosen_roster, wca, result, working_markings, chosen_markings); } roster_t & merged_roster = result.roster; - map paths; - get_content_paths(*working_roster, paths); - - content_merge_workspace_adaptor wca(db, base_rid, base_roster, - chosen_markings, working_markings, paths); wca.cache_roster(working_rid, working_roster); resolve_merge_conflicts(app.lua, *working_roster, chosen_roster, result, wca, false); @@ -659,17 +661,18 @@ CMD(merge_into_dir, "merge_into_dir", "" } roster_merge_result result; + content_merge_database_adaptor + dba(db, left_rid, right_rid, left_marking_map, right_marking_map); + roster_merge(left_roster, left_marking_map, left_uncommon_ancestors, right_roster, right_marking_map, right_uncommon_ancestors, + dba, result); - content_merge_database_adaptor - dba(db, left_rid, right_rid, left_marking_map, right_marking_map); - bool resolutions_given; parse_resolve_conflicts_opts (app.opts, left_roster, right_roster, result, resolutions_given); @@ -773,12 +776,6 @@ CMD(merge_into_workspace, "merge_into_wo left_uncommon_ancestors, right_uncommon_ancestors); - roster_merge_result merge_result; - MM(merge_result); - roster_merge(*left.first, *left.second, left_uncommon_ancestors, - *right.first, *right.second, right_uncommon_ancestors, - merge_result); - revision_id lca_id; cached_roster lca; find_common_ancestor_for_merge(db, left_id, right_id, lca_id); @@ -790,6 +787,13 @@ CMD(merge_into_workspace, "merge_into_wo content_merge_workspace_adaptor wca(db, lca_id, lca.first, *left.second, *right.second, paths); wca.cache_roster(working_rid, working_roster); + + roster_merge_result merge_result; + MM(merge_result); + roster_merge(*left.first, *left.second, left_uncommon_ancestors, + *right.first, *right.second, right_uncommon_ancestors, + wca, merge_result); + resolve_merge_conflicts(app.lua, *left.first, *right.first, merge_result, wca, false); // Make sure it worked... @@ -887,9 +891,11 @@ show_conflicts_core (database & db, revi set l_uncommon_ancestors, r_uncommon_ancestors; db.get_uncommon_ancestors(l_id, r_id, l_uncommon_ancestors, r_uncommon_ancestors); roster_merge_result result; + content_merge_database_adaptor adaptor(db, l_id, r_id, l_marking, r_marking); + roster_merge(*l_roster, l_marking, l_uncommon_ancestors, *r_roster, r_marking, r_uncommon_ancestors, - result); + adaptor, result); // note that left and right are in the order specified on the command line // they are not in lexical order as they are with other merge commands so @@ -922,9 +928,6 @@ show_conflicts_core (database & db, revi } else { - content_merge_database_adaptor adaptor(db, l_id, r_id, - l_marking, r_marking); - { basic_io::printer pr; st.push_binary_pair(syms::ancestor, adaptor.lca.inner()); @@ -1158,19 +1161,19 @@ CMD(pluck, "pluck", "", CMD_REF(workspac // Now do the merge roster_merge_result result; marking_map left_markings, right_markings; - three_way_merge(from_rid, *from_roster, - working_rid, *working_roster, - to_rid, *to_roster, - result, left_markings, right_markings); - - roster_t & merged_roster = result.roster; - map paths; get_content_paths(*working_roster, paths); content_merge_workspace_adaptor wca(db, from_rid, from_roster, left_markings, right_markings, paths); + three_way_merge(from_rid, *from_roster, + working_rid, *working_roster, + to_rid, *to_roster, + wca, result, left_markings, right_markings); + + roster_t & merged_roster = result.roster; + wca.cache_roster(working_rid, working_roster); // cache the synthetic to_roster under the to_rid so that the real // to_roster is not fetched from the db which does not have temporary nids ============================================================ --- diff_patch.cc 211424b28fdc3a5dcdae5a781feea97e1e915326 +++ diff_patch.cc 007f6d5be298e952730a10f970a8905df32f5d17 @@ -767,6 +767,61 @@ bool } bool +content_merger::attempt_auto_merge(file_path const & anc_path, // inputs + file_path const & left_path, + file_path const & right_path, + file_id const & ancestor_id, + file_id const & left_id, + file_id const & right_id, + file_data & left_data, // outputs + file_data & right_data, + file_data & merge_data) +{ + I(left_id != right_id); + + if (attribute_manual_merge(left_path, left_ros) || + attribute_manual_merge(right_path, right_ros)) + { + return false; + } + + // both files mergeable by monotone internal algorithm, try to merge + // note: the ancestor is not considered for manual merging. Forcing the + // user to merge manually just because of an ancestor mistakenly marked + // manual seems too harsh + + file_data ancestor_data; + + adaptor.get_version(left_id, left_data); + adaptor.get_version(ancestor_id, ancestor_data); + adaptor.get_version(right_id, right_data); + + data const left_unpacked = left_data.inner(); + data const ancestor_unpacked = ancestor_data.inner(); + data const right_unpacked = right_data.inner(); + + string const left_encoding(get_file_encoding(left_path, left_ros)); + string const anc_encoding(get_file_encoding(anc_path, anc_ros)); + string const right_encoding(get_file_encoding(right_path, right_ros)); + + vector left_lines, ancestor_lines, right_lines, merged_lines; + split_into_lines(left_unpacked(), left_encoding, left_lines); + split_into_lines(ancestor_unpacked(), anc_encoding, ancestor_lines); + split_into_lines(right_unpacked(), right_encoding, right_lines); + + if (merge3(ancestor_lines, left_lines, right_lines, merged_lines)) + { + string tmp; + + join_lines(merged_lines, tmp); + merge_data = file_data(tmp); + return true; + } + + return false; +} + +bool content_merger::try_auto_merge(file_path const & anc_path, file_path const & left_path, file_path const & right_path, @@ -795,57 +850,27 @@ content_merger::try_auto_merge(file_path return true; } - file_data left_data, right_data, ancestor_data; - data left_unpacked, ancestor_unpacked, right_unpacked, merged_unpacked; + file_data left_data, right_data, merge_data; - adaptor.get_version(left_id, left_data); - adaptor.get_version(ancestor_id, ancestor_data); - adaptor.get_version(right_id, right_data); - - left_unpacked = left_data.inner(); - ancestor_unpacked = ancestor_data.inner(); - right_unpacked = right_data.inner(); - - if (!attribute_manual_merge(left_path, left_ros) && - !attribute_manual_merge(right_path, right_ros)) + if (attempt_auto_merge(anc_path, left_path, right_path, + ancestor_id, left_id, right_id, + left_data, right_data, merge_data)) { - // both files mergeable by monotone internal algorithm, try to merge - // note: the ancestor is not considered for manual merging. Forcing the - // user to merge manually just because of an ancestor mistakenly marked - // manual seems too harsh - string left_encoding, anc_encoding, right_encoding; - left_encoding = this->get_file_encoding(left_path, left_ros); - anc_encoding = this->get_file_encoding(anc_path, anc_ros); - right_encoding = this->get_file_encoding(right_path, right_ros); + L(FL("internal 3-way merged ok")); + calculate_ident(merge_data, merged_id); - vector left_lines, ancestor_lines, right_lines, merged_lines; - split_into_lines(left_unpacked(), left_encoding, left_lines); - split_into_lines(ancestor_unpacked(), anc_encoding, ancestor_lines); - split_into_lines(right_unpacked(), right_encoding, right_lines); + adaptor.record_merge(left_id, right_id, merged_id, + left_data, right_data, merge_data); - if (merge3(ancestor_lines, left_lines, right_lines, merged_lines)) - { - file_id tmp_id; - file_data merge_data; - string tmp; - - L(FL("internal 3-way merged ok")); - join_lines(merged_lines, tmp); - merge_data = file_data(tmp); - calculate_ident(merge_data, merged_id); - - adaptor.record_merge(left_id, right_id, merged_id, - left_data, right_data, merge_data); - - return true; - } + return true; } return false; } bool -content_merger::try_user_merge(file_path const & anc_path, +content_merger::try_user_merge(lua_hooks & lua, + file_path const & anc_path, file_path const & left_path, file_path const & right_path, file_path const & merged_path, ============================================================ --- diff_patch.hh 8386bfd9dc62a30e7173fca60d27adab6d02167b +++ diff_patch.hh f04e39dfc101c83a107775115405eca963f04466 @@ -1,6 +1,7 @@ #ifndef __DIFF_PATCH_HH__ #define __DIFF_PATCH_HH__ +// Copyright (C) 2008 Stephen Leake // Copyright (C) 2002 Graydon Hoare // // This program is made available under the GNU GPL version 2.0 or @@ -160,26 +161,37 @@ struct content_merger struct content_merger { - lua_hooks & lua; roster_t const & anc_ros; roster_t const & left_ros; roster_t const & right_ros; content_merge_adaptor & adaptor; - content_merger(lua_hooks & lua, - roster_t const & anc_ros, + content_merger(roster_t const & anc_ros, roster_t const & left_ros, roster_t const & right_ros, content_merge_adaptor & adaptor) - : lua(lua), - anc_ros(anc_ros), + : anc_ros(anc_ros), left_ros(left_ros), right_ros(right_ros), adaptor(adaptor) {} - // merge3 on a file (line by line) + // Attempt merge3 on a file (line by line). Return true and valid data if + // it would succeed; false and invalid data otherwise. + bool attempt_auto_merge(file_path const & anc_path, // inputs + file_path const & left_path, + file_path const & right_path, + file_id const & ancestor_id, + file_id const & left_id, + file_id const & right_id, + file_data & left_data, // outputs + file_data & right_data, + file_data & merge_data); + + // Attempt merge3 on a file (line by line). If it succeeded, store results + // in database and return true and valid merged_id; return false + // otherwise. bool try_auto_merge(file_path const & anc_path, file_path const & left_path, file_path const & right_path, @@ -189,7 +201,8 @@ struct content_merger file_id const & right, file_id & merged_id); - bool try_user_merge(file_path const & anc_path, + bool try_user_merge(lua_hooks & lua, + file_path const & anc_path, file_path const & left_path, file_path const & right_path, file_path const & merged_path, ============================================================ --- merge.cc c70546fe818693647be0483beda94da09f8656a8 +++ merge.cc 0fe162b99691ba1ed73b3222bb787fcf3189fd3e @@ -35,17 +35,6 @@ namespace enum merge_method { auto_merge, user_merge }; void - get_file_details(roster_t const & ros, node_id nid, - file_id & fid, - file_path & pth) - { - I(ros.has_node(nid)); - file_t f = downcast_to_file_t(ros.get_node(nid)); - fid = f->content; - ros.get_name(nid, pth); - } - - void try_to_merge_files(lua_hooks & lua, roster_t const & left_roster, roster_t const & right_roster, roster_merge_result & result, content_merge_adaptor & adaptor, @@ -72,13 +61,13 @@ namespace file_id anc_id, left_id, right_id; file_path anc_path, left_path, right_path; - get_file_details(*ancestor_roster, ancestor_nid, anc_id, anc_path); - get_file_details(left_roster, conflict.left_nid, left_id, left_path); - get_file_details(right_roster, conflict.right_nid, right_id, right_path); + ancestor_roster->get_file_details(ancestor_nid, anc_id, anc_path); + left_roster.get_file_details(conflict.left_nid, left_id, left_path); + right_roster.get_file_details(conflict.right_nid, right_id, right_path); file_id merged_id; - content_merger cm(lua, *ancestor_roster, left_roster, right_roster, adaptor); + content_merger cm(*ancestor_roster, left_roster, right_roster, adaptor); bool merged = false; @@ -91,7 +80,7 @@ namespace break; case user_merge: - merged = cm.try_user_merge(anc_path, left_path, right_path, + merged = cm.try_user_merge(lua, anc_path, left_path, right_path, right_path, anc_id, left_id, right_id, merged_id); @@ -133,43 +122,41 @@ resolve_merge_conflicts(lua_hooks & lua, bool resolutions_given) { if (!result.is_clean()) - result.log_conflicts(); + { + result.log_conflicts(); - if (result.has_non_content_conflicts()) - { if (resolutions_given) { - // We just report the conflicts we don't know how to resolve yet. + // If there are any conflicts for which we don't currently support + // resolutions, give a nice error message. + char const * const msg = "conflict resolution for %s not yet supported"; - result.report_missing_root_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_invalid_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_directory_loop_conflicts(left_roster, right_roster, adaptor, false, std::cout); + N(!result.missing_root_dir, F(msg) % "missing_root_dir"); + N(result.invalid_name_conflicts.size() == 0, F(msg) % "invalid_name_conflicts"); + N(result.directory_loop_conflicts.size() == 0, F(msg) % "directory_loop_conflicts"); + N(result.orphaned_node_conflicts.size() == 0, F(msg) % "orphaned_node_conflicts"); + N(result.multiple_name_conflicts.size() == 0, F(msg) % "multiple_name_conflicts"); + N(result.attribute_conflicts.size() == 0, F(msg) % "attribute_conflicts"); - result.report_orphaned_node_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_multiple_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); - - result.report_attribute_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_file_content_conflicts(left_roster, right_roster, adaptor, false, std::cout); - - // If there aren't any we can't resolve, resolve the ones we can. + // resolve the ones we can. result.resolve_duplicate_name_conflicts(lua, left_roster, right_roster, adaptor); - - // FIXME: need to resolve content conflicts here + result.resolve_file_content_conflicts(lua, left_roster, right_roster, adaptor); } - else - { - result.report_missing_root_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_invalid_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_directory_loop_conflicts(left_roster, right_roster, adaptor, false, std::cout); + } - result.report_orphaned_node_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_multiple_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_duplicate_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); + if (result.has_non_content_conflicts()) + { + result.report_missing_root_conflicts(left_roster, right_roster, adaptor, false, std::cout); + result.report_invalid_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); + result.report_directory_loop_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_attribute_conflicts(left_roster, right_roster, adaptor, false, std::cout); - result.report_file_content_conflicts(left_roster, right_roster, adaptor, false, std::cout); - } - } + result.report_orphaned_node_conflicts(left_roster, right_roster, adaptor, false, std::cout); + result.report_multiple_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); + result.report_duplicate_name_conflicts(left_roster, right_roster, adaptor, false, std::cout); + + result.report_attribute_conflicts(left_roster, right_roster, adaptor, false, std::cout); + result.report_file_content_conflicts(left_roster, right_roster, adaptor, false, std::cout); + } else if (result.has_content_conflicts()) { // Attempt to auto-resolve any content conflicts using the line-merger. @@ -215,15 +202,14 @@ interactive_merge_and_store(lua_hooks & left_uncommon_ancestors, right_uncommon_ancestors); roster_merge_result result; + content_merge_database_adaptor dba(db, left_rid, right_rid, + left_marking_map, right_marking_map); roster_merge(left_roster, left_marking_map, left_uncommon_ancestors, right_roster, right_marking_map, right_uncommon_ancestors, - result); + dba, result); bool resolutions_given; - content_merge_database_adaptor dba(db, left_rid, right_rid, - left_marking_map, right_marking_map); - parse_resolve_conflicts_opts (opts, left_roster, right_roster, result, resolutions_given); resolve_merge_conflicts(lua, left_roster, right_roster, result, dba, resolutions_given); ============================================================ --- monotone.texi abd7160576898d8bc863dbf84d62d2790d760a20 +++ monotone.texi 53a2aa982f8fbc875b31ce1db576838753880da9 @@ -3018,6 +3018,25 @@ @section Merge Conflicts Unfortunately, these commands can't yet list conflicts between a database revision and the current workspace. +All merging commands accept options that specify conflict resolutions: address@hidden @command address@hidden address@hidden resolution} address@hidden address@hidden address@hidden table + +For @var{--resolve-conflicts-file}, the file must contain the output +of @command{automate show_conflicts}, with conflict resolutions +appended to each stanza. @file{_MTN/conflicts} is a good place to put +the file. + +For @var{--resolve-conflicts}, a single conflict resolution is given +in the string; it has the same format as the conflict resolutions in +the file, and must be applicable to all conflicts in the merge. This +is most usefull when there is a single conflict. + +The possible conflict resolutions are discussed with each conflict in +the following sections. + @subsection Conflict Types Monotone versions both files and directories explicitly and it tracks @@ -8599,9 +8618,14 @@ @section Automation workspace is present, the branch may be given by the @var{--branch} option. address@hidden Added in: address@hidden Changes: -7.1 address@hidden address@hidden +?? -- added default resolution for file content conflicts address@hidden +7.1 -- initial address@hidden itemize @item Purpose: @@ -8725,14 +8749,15 @@ @section Automation File content changed (this may be resolvable by the internal line merger), file also renamed: @verbatim - conflict content - node_type "file" - ancestor_name "bar" -ancestor_file_id [f0ef49fe92167fe2a086588019ffcff7ea561786] - left_name "bar" - left_file_id [08cd878106a93ce2ef036a32499c1432adb3ee0d] - right_name "bar" - right_file_id [0cf419dd93d38b2daaaf1f5e0f3ec647745b9690] + conflict content + node_type "file" + ancestor_name "bar" + ancestor_file_id [f0ef49fe92167fe2a086588019ffcff7ea561786] + left_name "bar" + left_file_id [08cd878106a93ce2ef036a32499c1432adb3ee0d] + right_name "bar" + right_file_id [0cf419dd93d38b2daaaf1f5e0f3ec647745b9690] +resolved_internal conflict content node_type "file" @@ -8743,6 +8768,10 @@ @section Automation right_name "baz" right_file_id [b966b2d35b99e456cb0c55e4573ef0b1b155b4a9] @end verbatim address@hidden is a conflict resolution; see @ref{Merge +Conflicts}. If the file contents in the two revs can be successfully +merged by the internal line merger, @code{resolved_internal} is +output. File added and/or renamed: @verbatim ============================================================ --- options_list.hh aa442efe2b176aeaa7775093d8957ddc8f7549a9 +++ options_list.hh 154daf8d9f4ab4673441dc8e30bf7531fb7fe3eb @@ -641,7 +641,7 @@ OPTION(resolve_conflicts_opts, resolve_c OPTVAR(resolve_conflicts_opts, std::string, resolve_conflicts, ) OPTION(resolve_conflicts_opts, resolve_conflicts_file, true, "resolve-conflicts-file", - gettext_noop("use _MTN/conflicts to resolve conflicts")) + gettext_noop("use file to resolve conflicts")) #ifdef option_bodies { N(!resolve_conflicts_given, ============================================================ --- roster.cc 9989ba61bf9d98cdc3551daaa3666cd37a752695 +++ roster.cc 0e36c692b2a4b063ba1d83b719d9f14aa74da61e @@ -1698,9 +1698,28 @@ namespace } else { - // suture. - new_marking.birth_revision = new_rid; - new_marking.birth_cause = make_pair (marking_t::suture, make_pair(ln->self, rn->self)); + // suture is involved somewhere. + + if (ln->self == n->self) + { + // ln was previously sutured, now rn is being added to the + // suture. Keep the birth revision for the original suture. + new_marking.birth_revision = left_marking.birth_revision; + new_marking.birth_cause = left_marking.birth_cause; + } + else if (rn->self == n->self) + { + // rn was previously sutured, now ln is being added to the + // suture. Keep the birth revision for the original suture. + new_marking.birth_revision = right_marking.birth_revision; + new_marking.birth_cause = right_marking.birth_cause; + } + else + { + // new suture + new_marking.birth_revision = new_rid; + new_marking.birth_cause = make_pair (marking_t::suture, make_pair(ln->self, rn->self)); + } } // name @@ -2716,6 +2735,17 @@ void } void +roster_t::get_file_details(node_id nid, + file_id & fid, + file_path & pth) const +{ + I(has_node(nid)); + file_t f = downcast_to_file_t(get_node(nid)); + fid = f->content; + get_name(nid, pth); +} + +void roster_t::extract_path_set(set & paths) const { paths.clear(); ============================================================ --- roster.hh e752044dc5fb4183c3118daa8c8da40ef86ee3c7 +++ roster.hh 8130ebfcc8a1324e6a3db3b10fe8d8e61e714a46 @@ -227,6 +227,10 @@ public: attr_key const & key, attr_value & val) const; + void get_file_details(node_id nid, + file_id & fid, + file_path & pth) const; + void extract_path_set(std::set & paths) const; node_map const & all_nodes() const ============================================================ --- roster_merge.cc de299cc9be1090e4b906e68cb7179e9f5127bae7 +++ roster_merge.cc cb125c57bf3a35e88f1e43e143aa289260526e11 @@ -203,9 +203,11 @@ namespace symbol const node_type("node_type"); symbol const orphaned_directory("orphaned_directory"); symbol const orphaned_file("orphaned_file"); + symbol const resolved_internal("resolved_internal"); symbol const resolved_rename_left("resolved_rename_left"); symbol const resolved_rename_right("resolved_rename_right"); symbol const resolved_suture ("resolved_suture"); + symbol const resolved_user("resolved_user"); symbol const right_attr_state("right_attr_state"); symbol const right_attr_value("right_attr_value"); symbol const right_file_id("right_file_id"); @@ -401,7 +403,6 @@ put_attr_conflict (basic_io::stanza & st db_adaptor.db.get_file_content (db_adaptor.lca, conflict.nid, ancestor_fid); st.push_str_pair(syms::ancestor_name, ancestor_name.as_external()); st.push_binary_pair(syms::ancestor_file_id, ancestor_fid.inner()); - // FIXME: don't have this. st.push_str_pair(syms::ancestor_attr_value, ???); file_id left_fid; db_adaptor.db.get_file_content (db_adaptor.left_rid, conflict.nid, left_fid); st.push_file_pair(syms::left_name, left_name); @@ -418,7 +419,6 @@ put_attr_conflict (basic_io::stanza & st st.push_str_pair(syms::node_type, "directory"); st.push_str_pair(syms::attr_name, conflict.key()); st.push_str_pair(syms::ancestor_name, ancestor_name.as_external()); - // FIXME: don't have this. st.push_str_pair(syms::ancestor_attr_value, ???); st.push_file_pair(syms::left_name, left_name); put_attr_state_left (st, conflict); st.push_file_pair(syms::right_name, right_name); @@ -462,6 +462,23 @@ put_content_conflict (basic_io::stanza & st.push_binary_pair(syms::left_file_id, conflict.left.inner()); st.push_file_pair(syms::right_name, right_name); st.push_binary_pair(syms::right_file_id, conflict.right.inner()); + + switch (conflict.resolution.first) + { + case resolve_conflicts::none: + break; + + case resolve_conflicts::content_internal: + st.push_symbol(syms::resolved_internal); + break; + + case resolve_conflicts::content_user: + st.push_file_pair(syms::resolved_user, conflict.resolution.second); + break; + + default: + I(false); + } } else { @@ -1375,15 +1392,51 @@ namespace resolve_conflicts case none: return "none"; + case content_internal: + return "content_internal"; + + case content_user: + return "content_user"; + case suture: return "suture"; case rename: return "rename"; + } return ""; // suppress bogus compiler warning } + + bool + do_auto_merge(file_content_conflict const & conflict, + content_merge_adaptor & adaptor, + roster_t const & left_roster, + roster_t const & right_roster, + roster_t const & result_roster, + file_id & merged_id) + { + node_id ancestor_nid; + revision_id ancestor_rid; + shared_ptr ancestor_roster; + conflict.get_ancestor_roster(adaptor, ancestor_nid, ancestor_rid, ancestor_roster); + + I(ancestor_roster); + I(ancestor_roster->has_node(ancestor_nid)); // this fails if there is no least common ancestor + + file_id anc_id, left_id, right_id; + file_path anc_path, left_path, right_path, merged_path; + ancestor_roster->get_file_details(ancestor_nid, anc_id, anc_path); + left_roster.get_file_details(conflict.left_nid, left_id, left_path); + right_roster.get_file_details(conflict.right_nid, right_id, right_path); + result_roster.get_file_details(conflict.result_nid, merged_id, merged_path); + + content_merger cm(*ancestor_roster, left_roster, right_roster, adaptor); + + return cm.try_auto_merge(anc_path, left_path, right_path, merged_path, + anc_id, left_id, right_id, merged_id); + } } static void @@ -1404,22 +1457,18 @@ parse_duplicate_name_conflicts(basic_io: string left_name, right_name; pars.esym(syms::left_type); pars.str(); - pars.esym (syms::left_name); - left_name = pars.token; - pars.str(); + pars.esym (syms::left_name); pars.str(left_name); pars.esym(syms::left_file_id); pars.hex(); pars.esym(syms::right_type); pars.str(); - pars.esym (syms::right_name); - right_name = pars.token; - pars.str(); + pars.esym (syms::right_name); pars.str(right_name); pars.esym(syms::right_file_id); pars.hex(); left_nid = left_roster.get_node (file_path_internal (left_name))->self; right_nid = right_roster.get_node (file_path_internal (right_name))->self; N(left_nid == conflict.left_nid & right_nid == conflict.right_nid, - F("conflict mismatch: (duplicate_name, left %s, right %s") + F("conflicts file does not match current conflicts: (duplicate_name, left %s, right %s") % left_name % right_name); // check for a resolution @@ -1448,7 +1497,7 @@ parse_duplicate_name_conflicts(basic_io: pars.str(); } else - N(false, F("%s is not a supported conflict resolution for duplicate_name") % pars.token); + N(false, F("%s is not a supported conflict resolution for %s") % pars.token % "duplicate_name"); } if (pars.tok.in.lookahead != EOF) @@ -1462,6 +1511,73 @@ static void } // parse_duplicate_name_conflicts static void +parse_file_content_conflicts(basic_io::parser & pars, + std::vector & conflicts, + roster_t const & left_roster, + roster_t const & right_roster) +{ + for (std::vector::iterator i = conflicts.begin(); + i != conflicts.end(); + ++i) + { + string tmp; + node_id left_nid, right_nid; + string left_name, right_name, result_name; + + file_content_conflict & conflict = *i; + + pars.esym(syms::content); + + pars.esym(syms::node_type); + pars.str(tmp); + I(tmp == "file"); + + pars.esym (syms::ancestor_name); pars.str(); + pars.esym (syms::ancestor_file_id); pars.hex(); + + pars.esym (syms::left_name); pars.str(left_name); + pars.esym(syms::left_file_id); pars.hex(); + + pars.esym (syms::right_name); pars.str(right_name); + pars.esym(syms::right_file_id); pars.hex(); + + left_nid = left_roster.get_node (file_path_internal (left_name))->self; + right_nid = right_roster.get_node (file_path_internal (right_name))->self; + + N(left_nid == conflict.left_nid & right_nid == conflict.right_nid, + F("conflicts file does not match current conflicts: (file_content, left %s, right %s") + % left_name % right_name); + + // check for a resolution + if ((!pars.symp (syms::conflict)) && pars.tok.in.lookahead != EOF) + { + if (pars.symp (syms::resolved_internal)) + { + conflict.resolution.first = resolve_conflicts::content_internal; + pars.sym(); + } + else if (pars.symp (syms::resolved_user)) + { + conflict.resolution.first = resolve_conflicts::content_user; + pars.sym(); + conflict.resolution.second = file_path_internal (pars.token); + pars.str(); + } + else + N(false, F("%s is not a supported conflict resolution for %s") % pars.token % "file_content"); + } + + if (pars.tok.in.lookahead != EOF) + pars.esym (syms::conflict); + else + { + std::vector::iterator tmp = i; + N(++tmp == conflicts.end(), F("conflicts file does not match current conflicts")); + } + } +} // parse_file_content_conflicts + +static void parse_resolve_conflicts_str(basic_io::parser & pars, roster_merge_result & result) { char const * error_message = "can't specify a %s conflict resolution for more than one conflict"; @@ -1509,9 +1625,8 @@ parse_resolve_conflicts_str(basic_io::pa N(false, F("%s is not a supported conflict resolution") % pars.token); } // while +} -} // parse_resolv_conflicts_str - void parse_resolve_conflicts_opts (options const & opts, roster_t const & left_roster, @@ -1563,21 +1678,21 @@ parse_resolve_conflicts_opts (options co // the conflicts in the same order they are generated; see merge.cc // resolve_merge_conflicts. - // If there are any conflicts for which we don't currently support - // resolutions, give a nice error message. - char const * const msg = "conflict resolution for %s not yet supported"; + // resolve_merge_conflicts should not call us if there are any + // conflicts for which we don't currently support resolutions; assert + // that - N(!result.missing_root_dir, F(msg) % "missing_root_dir"); - N(result.invalid_name_conflicts.size() == 0, F(msg) % "invalid_name_conflicts"); - N(result.directory_loop_conflicts.size() == 0, F(msg) % "directory_loop_conflicts"); - N(result.orphaned_node_conflicts.size() == 0, F(msg) % "orphaned_node_conflicts"); - N(result.multiple_name_conflicts.size() == 0, F(msg) % "multiple_name_conflicts"); - N(result.attribute_conflicts.size() == 0, F(msg) % "attribute_conflicts"); - N(result.file_content_conflicts.size() == 0, F(msg) % "file_content_conflicts"); + I(!result.missing_root_dir); + I(result.invalid_name_conflicts.size() == 0); + I(result.directory_loop_conflicts.size() == 0); + I(result.orphaned_node_conflicts.size() == 0); + I(result.multiple_name_conflicts.size() == 0); + I(result.attribute_conflicts.size() == 0); // These are the ones we know how to resolve. parse_duplicate_name_conflicts(pars, result.duplicate_name_conflicts, left_roster, right_roster); + parse_file_content_conflicts(pars, result.file_content_conflicts, left_roster, right_roster); if (src.lookahead != EOF) pars.err("extra conflicts in file"); @@ -1730,6 +1845,82 @@ void } void +roster_merge_result::resolve_file_content_conflicts(lua_hooks & lua, + roster_t const & left_roster, + roster_t const & right_roster, + content_merge_adaptor & adaptor) +{ + MM(left_roster); + MM(right_roster); + MM(this->roster); // New roster + + // Conflict node is present and attached in the new roster, with a null + // file content id. The resolution is to enter the user specified file + // content in the database and roster, or let the internal line merger + // handle it. + + for (std::vector::const_iterator i = file_content_conflicts.begin(); + i != file_content_conflicts.end(); + ++i) + { + file_content_conflict const & conflict = *i; + MM(conflict); + + file_path left_name, right_name; + + left_roster.get_name(conflict.left_nid, left_name); + right_roster.get_name(conflict.right_nid, right_name); + + switch (conflict.resolution.first) + { + case resolve_conflicts::content_internal: + case resolve_conflicts::none: + { + file_id merged_id; + + N(resolve_conflicts::do_auto_merge(conflict, adaptor, left_roster, + right_roster, this->roster, merged_id), + F("merge of %s, %s failed") % left_name % right_name); + + P(F("merged %s, %s") % left_name % right_name); + + file_t result_node = downcast_to_file_t(roster.get_node(conflict.result_nid)); + result_node->content = merged_id; + } + break; + + case resolve_conflicts::content_user: + { + P(F("replacing content of %s, %s with %s") % left_name % right_name % conflict.resolution.second); + + file_id result_id; + file_data left_data, right_data, result_data; + data result_raw_data; + adaptor.get_version(conflict.left, left_data); + adaptor.get_version(conflict.right, right_data); + read_data(conflict.resolution.second, result_raw_data); + result_data = file_data(result_raw_data); + calculate_ident(result_data, result_id); + + file_t result_node = downcast_to_file_t(roster.get_node(conflict.result_nid)); + result_node->content = result_id; + + adaptor.record_merge(conflict.left, conflict.right, result_id, + left_data, right_data, result_data); + + } + break; + + default: + I(false); + } + + } // end for + + file_content_conflicts.clear(); +} + +void roster_merge_result::clear() { missing_root_dir = false; @@ -2069,15 +2260,47 @@ namespace assign_name(result, n->self, old_n->parent, old_n->name, side); } + bool + auto_merge_succeeds(file_content_conflict conflict, + content_merge_adaptor & adaptor, + roster_t const & left_roster, + roster_t const & right_roster) + { + node_id ancestor_nid; + revision_id ancestor_rid; + shared_ptr ancestor_roster; + conflict.get_ancestor_roster(adaptor, ancestor_nid, ancestor_rid, ancestor_roster); + + I(ancestor_roster); + I(ancestor_roster->has_node(ancestor_nid)); // this fails if there is no least common ancestor + + file_id anc_id, left_id, right_id; + file_path anc_path, left_path, right_path; + ancestor_roster->get_file_details(ancestor_nid, anc_id, anc_path); + left_roster.get_file_details(conflict.left_nid, left_id, left_path); + right_roster.get_file_details(conflict.right_nid, right_id, right_path); + + content_merger cm(*ancestor_roster, left_roster, right_roster, adaptor); + + file_data left_data, right_data, merge_data; + + return cm.attempt_auto_merge(anc_path, left_path, right_path, + anc_id, left_id, right_id, + left_data, right_data, merge_data); + } + void merge_nodes(node_t const left_n, marking_t const & left_marking, set const & left_uncommon_ancestors, + roster_t const & left_roster, node_t const right_n, marking_t const & right_marking, set const & right_uncommon_ancestors, + roster_t const & right_roster, node_t const new_n, - roster_merge_result & result) + roster_merge_result & result, + content_merge_adaptor & adaptor) { // merge name pair left_name, right_name, new_name; @@ -2128,6 +2351,9 @@ namespace } else { + if (auto_merge_succeeds(conflict, adaptor, left_roster, right_roster)) + conflict.resolution = make_pair(resolve_conflicts::content_internal, file_path()); + downcast_to_file_t(new_n)->content = file_id(); result.file_content_conflicts.push_back(conflict); } @@ -2192,6 +2418,7 @@ roster_merge(roster_t const & left_paren roster_t const & right_parent, marking_map const & right_markings, set const & right_uncommon_ancestors, + content_merge_adaptor & adaptor, roster_merge_result & result) { set already_handled; @@ -2280,11 +2507,14 @@ roster_merge(roster_t const & left_paren merge_nodes(left_n, left_mi->second, // left_marking left_uncommon_ancestors, + left_parent, right_n, right_mi->second, right_uncommon_ancestors, + right_parent, new_i->second, // new_n - result); + result, + adaptor); ++new_i; } else @@ -2326,11 +2556,14 @@ roster_merge(roster_t const & left_paren merge_nodes(left_n, left_mi->second, // left_marking left_uncommon_ancestors, + left_parent, i.right_data(), // right_n right_mi->second, // right_marking right_uncommon_ancestors, + right_parent, new_i->second, // new_n - result); + result, + adaptor); ++new_i; } else @@ -2357,11 +2590,14 @@ roster_merge(roster_t const & left_paren merge_nodes(i.left_data(), // left_n left_mi->second, // left_marking left_uncommon_ancestors, + left_parent, i.right_data(), // right_n right_mi->second, // right_marking right_uncommon_ancestors, + right_parent, new_i->second, // new_n - result); + result, + adaptor); } ++left_mi; ++right_mi; ============================================================ --- roster_merge.hh ac60af8e6a41966a00430a85604e9d1c08acc67d +++ roster_merge.hh 2ea251ea72b44a75a48fd80db8ea0e0742110a7b @@ -37,7 +37,7 @@ namespace resolve_conflicts namespace resolve_conflicts { - enum resolution_t {none, suture, rename}; + enum resolution_t {none, content_user, content_internal, rename, suture}; } // renaming the root dir allows these: @@ -124,9 +124,17 @@ struct file_content_conflict struct file_content_conflict { node_id left_nid, right_nid, result_nid; + file_id left, right; + + std::pair resolution; + + file_content_conflict () : + left_nid(the_null_node), right_nid(the_null_node), result_nid(the_null_node), + resolution(std::make_pair(resolve_conflicts::none, file_path())) {}; + file_content_conflict(node_id left_nid, node_id right_nid, node_id result_nid) : - left_nid(left_nid), right_nid(right_nid), result_nid(result_nid) {} - file_id left, right; + left_nid(left_nid), right_nid(right_nid), result_nid(result_nid), + resolution(std::make_pair(resolve_conflicts::none, file_path())) {}; void get_ancestor_roster(content_merge_adaptor & adaptor, node_id & ancestor_nid, @@ -224,6 +232,10 @@ struct roster_merge_result content_merge_adaptor & adaptor, bool const basic_io, std::ostream & output) const; + void resolve_file_content_conflicts(lua_hooks & lua, + roster_t const & left_roster, + roster_t const & right_roster, + content_merge_adaptor & adaptor); void clear(); }; @@ -237,6 +249,7 @@ roster_merge(roster_t const & left_paren roster_t const & right_parent, marking_map const & right_markings, std::set const & right_uncommon_ancestors, + content_merge_adaptor & adaptor, roster_merge_result & result); void ============================================================ --- tests/resolve_duplicate_name_conflict/__driver__.lua 3da589c23ef5c0e996b267d74be712fc6c2176a2 +++ tests/resolve_duplicate_name_conflict/__driver__.lua aed0dbae5f8f352b3dabe3b91cffbe6a87acae62 @@ -97,7 +97,7 @@ check ("checkout.sh abe 1" == readfile ( rename ("stdout", "checkout.sh-abe") check ("checkout.sh abe 1" == readfile ("checkout.sh-abe")) -get ("checkout.sh-merged", "checkout.sh") +writefile ("checkout.sh", "checkout.sh merged") -- This has the resolution lines get ("conflicts-resolved", "_MTN/conflicts") @@ -129,7 +129,7 @@ check("thermostat honeywell beth 1" == r -- Verify file contents check("thermostat westinghouse abe 1" == readfile("thermostat-westinghouse.c")) check("thermostat honeywell beth 1" == readfile("thermostat-honeywell.c")) -check("checkout.sh merged\n" == readfile("checkout.sh")) +check("checkout.sh merged" == readfile("checkout.sh")) -- In the second step, we extend the revision history graph: -- @@ -156,7 +156,7 @@ jim_1 = base_revision() jim_1 = base_revision() --- Abe edits his files and merges +-- Abe edits his files and merges to g revert_to(abe_1) writefile("thermostat.c", "thermostat westinghouse abe 2") @@ -170,8 +170,6 @@ check(samefile("expected-merge-messages- get ("expected-merge-messages-abe_2-jim_1-conflicts") check(samefile("expected-merge-messages-abe_2-jim_1-conflicts", "stderr")) -check (mtn("automate", "show_conflicts"), 0, true, nil) - -- This succeeds get ("merge-abe_2-jim_1-resolve_conflicts", "_MTN/conflicts") check(mtn("merge", "--resolve-conflicts-file=_MTN/conflicts"), 0, nil, true) @@ -179,20 +177,52 @@ check(samefile("expected-merge-messages- get ("expected-merge-messages-abe_2-jim_1") check(samefile("expected-merge-messages-abe_2-jim_1", "stderr")) --- Beth edits her files and merges +check(mtn("update"), 0, nil, true) +check("checkout.sh abe 2" == readfile("checkout.sh")) + +-- Beth edits her files and merges to h revert_to(beth_1) -writefile("thermostat.c", "thermostat honeywell beth 1") -writefile("checkout.sh", "checkout.sh beth 1") +writefile("thermostat.c", "thermostat honeywell beth 2") +writefile("checkout.sh", "checkout.sh beth 1\n\n\nbeth 2") +-- line merger succeeds for checkout.sh, since it is a simple diff +-- from the ancestor + commit("testbranch", "beth_2") beth_2 = base_revision() +check (mtn("automate", "show_conflicts", beth_2, jim_1), 0, true, nil) +canonicalize("stdout") +get ("merge-beth_2-jim_1-conflicts", "_MTN/conflicts") +check(samefile("_MTN/conflicts", "stdout")) + -- If we just do 'merge', mtn will merge 'e' and 'g', since those are -- the current heads. To emulate separate development databases, we --- specify the revisions to merge. -check(mtn("merge", jim_1, beth_2), 0, nil, true) +-- specify the revisions to merge. This also lets us excercise the +-- other branch of some 'if's in the code; in merging to abe_2, jim_1 +-- was left; now it is right. +check(mtn("explicit_merge", "--resolve-conflicts=resolve_internal", beth_2, jim_1, "testbranch"), 0, nil, true) canonicalize("stderr") get ("expected-merge-messages-jim_1-beth_2") check(samefile("expected-merge-messages-jim_1-beth_2", "stderr")) +check(mtn("update"), 0, nil, true) +canonicalize("stderr") +get ("expected-update-messages-beth_3") +check(samefile("expected-update-messages-beth_3", "stderr")) + +check("checkout.sh merged\n\n\nbeth 2\n" == readfile("checkout.sh")) + +-- merge g, h to f +check(mtn("merge"), 0, nil, true) +canonicalize("stderr") +get ("expected-merge-messages-abe_3-beth_3") +check(samefile("expected-merge-messages-abe_3-beth_3", "stderr")) + +check(mtn("update"), 0, nil, true) +canonicalize("stderr") +get ("expected-update-messages-jim_2") +check(samefile("expected-update-messages-jim_2", "stderr")) + +check("checkout.sh abe 2\n\n\nbeth 2\n" == readfile("checkout.sh")) -- end of file ============================================================ --- tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_1-beth_1 8cc5c13905078a96dd1e308964537c69ed78f4e8 +++ tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_1-beth_1 b1db34423258a86c124deda6ad04755de0afa1dc @@ -1,8 +1,8 @@ mtn: renaming thermostat.c to thermostat mtn: 2 heads on branch 'testbranch' mtn: [left] 5285636b9d9f988e79b3dcd9a40e64d15fb7fc9f mtn: [right] e9ad84a3fc40ef1109251c308428439c21ad1de9 mtn: suturing checkout.sh, checkout.sh into checkout.sh mtn: renaming thermostat.c to thermostat-westinghouse.c mtn: renaming thermostat.c to thermostat-honeywell.c -mtn: [merged] 257729bebdb32819cd1fc059806e0fb4144f7ec7 +mtn: [merged] 16d13cce9d163a224e0e491da41415c322727b46 mtn: note: your workspaces have not been updated ============================================================ --- tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_2-jim_1 318b3562e18678195b6209aedc9a157a8f2937e2 +++ tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_2-jim_1 13ef6b36c69721f15474749865d55d111eb728b9 @@ -1 +1,6 @@ -should get suture/edit conflict, or just merge +mtn: 2 heads on branch 'testbranch' +mtn: [left] 16d13cce9d163a224e0e491da41415c322727b46 +mtn: [right] 3bb925aabafadcdbdc502699024ee282fef0c512 +mtn: replacing content of checkout.sh, checkout.sh with checkout.sh +mtn: [merged] 7cb32b106906f18ea25ee2578d82de78b2c95337 +mtn: note: your workspaces have not been updated ============================================================ --- tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_2-jim_1-conflicts c505d776680d4278cfbb27e0cb850ba8486e86bd +++ tests/resolve_duplicate_name_conflict/expected-merge-messages-abe_2-jim_1-conflicts 2222ecb924620c741d8d84477d172287fecfa8d0 @@ -1,9 +1,9 @@ mtn: 2 heads on branch 'testbranch' mtn: 2 heads on branch 'testbranch' -mtn: [left] 257729bebdb32819cd1fc059806e0fb4144f7ec7 +mtn: [left] 16d13cce9d163a224e0e491da41415c322727b46 mtn: [right] 3bb925aabafadcdbdc502699024ee282fef0c512 mtn: 1 content conflict requires user intervention mtn: conflict: content conflict on file 'checkout.sh' -mtn: content hash is 3390893b9a31eaa9ef0e9364b27ee1e617e6891a on the left +mtn: content hash is f7a9033fcfa98450d0c0a25e41aa68904b7a33ce on the left mtn: content hash is a8acbc7472178c5e87c3f0a6953ac5db954e1205 on the right mtn: help required for 3-way merge mtn: [ancestor] checkout.sh ============================================================ --- tests/resolve_duplicate_name_conflict/expected-merged-revision-jim_1 257729bebdb32819cd1fc059806e0fb4144f7ec7 +++ tests/resolve_duplicate_name_conflict/expected-merged-revision-jim_1 16d13cce9d163a224e0e491da41415c322727b46 @@ -1,6 +1,6 @@ format_version "2" format_version "2" -new_manifest [9a3352571f7379d5114aa57fb650fe2acb7588f6] +new_manifest [2a69f545ebc0c6ec2feae218a1f5b6fdff456661] old_revision [5285636b9d9f988e79b3dcd9a40e64d15fb7fc9f] @@ -13,7 +13,7 @@ second_ancestor "" sutured_file "checkout.sh" first_ancestor "checkout.sh" second_ancestor "" - content [3390893b9a31eaa9ef0e9364b27ee1e617e6891a] + content [f7a9033fcfa98450d0c0a25e41aa68904b7a33ce] old_revision [e9ad84a3fc40ef1109251c308428439c21ad1de9] @@ -26,4 +26,4 @@ second_ancestor "" sutured_file "checkout.sh" first_ancestor "checkout.sh" second_ancestor "" + content [f7a9033fcfa98450d0c0a25e41aa68904b7a33ce] - content [3390893b9a31eaa9ef0e9364b27ee1e617e6891a] ============================================================ --- tests/resolve_duplicate_name_conflict/expected-update-messages-jim_1 d42cacc7d4725d62070fa6f34bc0189c2da4d1cd +++ tests/resolve_duplicate_name_conflict/expected-update-messages-jim_1 d6cdd06022bdadb6f41e395bb8e3f89dbcb64e11 @@ -1,7 +1,7 @@ mtn: updating along branch 'testbranch' mtn: updating along branch 'testbranch' -mtn: selected update target 257729bebdb32819cd1fc059806e0fb4144f7ec7 +mtn: selected update target 16d13cce9d163a224e0e491da41415c322727b46 mtn: adding checkout.sh mtn: renaming thermostat.c to thermostat-honeywell.c mtn: adding thermostat-westinghouse.c mtn: dropping checkout.sh +mtn: updated to base revision 16d13cce9d163a224e0e491da41415c322727b46 -mtn: updated to base revision 257729bebdb32819cd1fc059806e0fb4144f7ec7 ============================================================ --- tests/resolve_duplicate_name_conflict/merge-abe_2-jim_1-resolve_conflicts bb50072864e1404e1da5597bca04c486c33bf2ed +++ tests/resolve_duplicate_name_conflict/merge-abe_2-jim_1-resolve_conflicts a62d7972e68ee89201aeb87d575b1c97634b40dd @@ -10,4 +10,4 @@ ancestor_file_id [61b8d4fb0e5d78be111f69 left_file_id [3390893b9a31eaa9ef0e9364b27ee1e617e6891a] right_name "checkout.sh" right_file_id [a8acbc7472178c5e87c3f0a6953ac5db954e1205] + resolved_user "checkout.sh" -resolved_content "checkout.sh"