# # delete_file "tests/t_merge_binary.at" # # add_file "tests/t_lua_includedir.at" # # add_file "tests/t_merge_manual.at" # # add_file "tests/t_rcfile_dir.at" # # add_file "tests/t_revert_restrict.at" # # add_file "tests/t_status.at" # # patch "ChangeLog" # from [4f5d38c23ff44435d5657bcdcc9ccb93f7976e09] # to [5b2431e5d6c1515b9c46cbb61dcddc6f6ac7d540] # # patch "INSTALL" # from [91300af1b7411653bb3eff5608a548b6a5ed633b] # to [400eaf8e798d44fce8b0622c88bc324fd31830c2] # # patch "Makefile.am" # from [bedd826d1991853466ab57d77d80be1635014d17] # to [8f57b338067b43d1d1ae5de118323a6c67f6be7d] # # patch "app_state.cc" # from [7c12ea6894df8aed4ac689b333df2f5dc19e3fd4] # to [1e2cfd4af8c4ccc2b721c758469659dc3d7f4131] # # patch "app_state.hh" # from [3ac03fa785dd39a9c005a7a35ad1962468d5be32] # to [ba2b8d968ce1eabe69b2c7fb60a7c666cfa4ada7] # # patch "commands.cc" # from [0b37c574ff4f24aa3ef783d34654c53a4e067818] # to [2b9bb2299e2fe0f337e33d1989d6f3f6d8c793c7] # # patch "contrib/mtbrowse.sh" # from [4e1791bf6cfa9d4be0ebb45884a663fb366d876e] # to [be25d5887954500c958377bc524161d7a641fcaa] # # patch "database.cc" # from [181e7b79e4435b2ffa1561ce8b9162a41563ed72] # to [1ce26993177acbeb3e3d18aba17f43700372fa3c] # # patch "diff_patch.cc" # from [209e60efcca30474071383769840ae18d3d1aeef] # to [fbef84d6f575c4f14850b8a8e7d78e42021a0e80] # # patch "diff_patch.hh" # from [8045a7d688a9494585eab247815ef9573687039f] # to [18be28ae5d744c23da230988bd4a48f6556cb4b4] # # patch "file_io.cc" # from [35b0d27bb9a0a1c72b651fd2081a0f9a6d6c58f1] # to [6125dbcc50e69e63ba8a22664e3e8126ee1115a0] # # patch "file_io.hh" # from [d2865f7695f667b872b858b276edcec5d7cf1605] # to [80ef83f6cd2479dd5dbcfd13bca81172d90fde4d] # # patch "lua.cc" # from [107b2deb3efdabe34542d967503b2250055b4d23] # to [e4dd123e0c07b59361555b521f3b2cc17aa445b5] # # patch "monotone.cc" # from [c2d839bc84a52c398ae42a860f31b87d0004cc85] # to [8469a8de748e5eb3958f74fc204703a041b1a86c] # # patch "monotone.texi" # from [e4a42059771b12538e575453621b24cc20ddf2ad] # to [131933a2c2cd95c13bf8e3b8c988b8e4bc97664d] # # patch "netcmd.cc" # from [feff9457b6ea819cad0007c67d5fbe0cdf0d1cd7] # to [5ee6262d8ddead8fe0c14e4c32c0d0d6781ee5fe] # # patch "netsync.cc" # from [492fe6044e61209e5edb00016262062ade0ee65c] # to [4e9c31748b5a75e9535937786862574606fab923] # # patch "std_hooks.lua" # from [572befdb727cd626e7a8efd27d5a8109bdfebd27] # to [9b1a27e30129929aa366223e51f1c11ec918fc93] # # patch "tests/t_cat_file_by_name.at" # from [009033538c4424ad6994c33eef9dab24bef15980] # to [45f9704b56b2b74f16c5579539bc1c6f777b1e56] # # patch "tests/t_lua_includedir.at" # from [] # to [5b4ce1e6a2781a7a841039a1e02a96cc0221f3ff] # # patch "tests/t_merge_manual.at" # from [] # to [30eeeb2ea878e180248e8969dda05c07cd9df743] # # patch "tests/t_netsync_diffbranch.at" # from [a529b5b568039685d66db00c8550b817fbc3a4a4] # to [6a2a4e9b6dada24dea2ad88bd0c3d0caf8d180bc] # # patch "tests/t_rcfile_dir.at" # from [] # to [06128ab21f137c3a69e4fce2db25b0ea3fa424de] # # patch "tests/t_restrictions.at" # from [d3500f6e212de48b798b050de730f4762e378b2e] # to [be72348a94c89d89703de67b40c886edc2bef302] # # patch "tests/t_revert_restrict.at" # from [] # to [ca9b1e167516b4b92a5d285da56b7ca076dbd587] # # patch "tests/t_status.at" # from [] # to [0b361ba1ed3579bcd794fb3edd21a881f4a46e23] # # patch "testsuite.at" # from [4a1a612444d0916a4fcbe71053f66ba1d7aaa18e] # to [2e17e62213b9878d08d7c792f244b6e193498618] # # patch "ui.cc" # from [e42166c04687d0d23d1432cbd3861246352a3e9f] # to [228f2912ff4f24f41df4f4b0c62f2b5321970aee] # # patch "work.cc" # from [b165e29831c433d9866bb5d3e03ceccdefd6be39] # to [a62b401c81e47925f2911689aa985d652eb624ae] # # patch "work.hh" # from [70d8503b81c8ab4d49481cd740f49fa7e4375498] # to [b632c7b9bda8e2df8b392b3606d554398e6ddb97] # --- ChangeLog +++ ChangeLog @@ -1,5 +1,174 @@ +2005-06-22 Nathaniel Smith + + * netcmd.cc, netsync.cc: Revert backwards compatibility code; 0.19 + and 0.20 can't be usefully compatible, and the code as it existed + would cause real version mismatch error reporting to not work + right. (Old client with new server would give a generic "server + disconnected" error message instead of something useful.) + +2005-06-21 Nathaniel Smith + + * netsync.cc (rebuild_merkle_trees): Fix FIXME comments to match + reality. + * tests/t_netsync_diffbranch.at: No longer a bug, remove + priority. + +2005-06-20 Nathaniel Smith + + * monotone.texi (Hook Reference): Oops, missed a @ref. + +2005-06-20 Nathaniel Smith + + * monotone.texi (Default monotonerc): Rename section to... + (Default hooks): ...this, to emphasize is still read even when a + monotonerc exists. + +2005-06-19 Richard Levitte + + * Makefile.am: There's no reason for monotone.pdf or .dvi to + depend on monotone.info, since they are built from the .texi + files. Also, make the monotone.html and html targets depend + on version.texi and std_hooks.lua as well. + +2005-06-18 Matt Johnston + + * INSTALL: fix typo, should be -Iboost_1_31_0 not -Iboost_1_31_2 + +2005-06-18 Riccardo Ghetta + * monotone.texi: include std_hooks.lua as an appendix and remove long + lua excerpts from hook reference. + * Makefile.am : make monotone.pdf/eps depend on monotone.info + +2005-06-17 Matt Johnston + + * database.cc (database::execute()): truncate long query log messages + before copying, saving memory. + Patch from Eric Anderson < ea at cello hpl hp com > + +2005-06-17 Riccardo Ghetta + Adds include()/includedir() to lua hooks and extend --rcfile + * lua.cc: handle --rcfile with directories, implement + include() and includedir() + * testsuite.at, t_lua_includedir.at, t_rcfile_dir.at: + test new functionality + * monotone.texi: document all functions available to hook + writers, including the new include() and includedir() + +2005-06-16 Nathaniel Smith + + * diff_patch.cc (merge_extents): Typo caught by anonymous reader. + +2005-06-16 Nathaniel Smith + + * commands.cc (cat): Account for being in a subdir in 'cat file + REV PATH'. + * tests/t_cat_file_by_name.at: Test. + +2005-06-17 Richard Levitte + + * app_state.cc (app_state::app_state()): Avoid a gcc warning by + having the class members initialised in the same order they are + defined in the class. + +2005-06-16 Nathaniel Smith + + * std_hooks.lua (ignore_file): Add Cons/SCons cache files to + default ignore list. + +2005-06-16 Matt Johnston + + * ui.cc: increase the divisor as required so that we don't get spurious + screen updates when we're using the kilobyte/megabyte tickers + +2005-06-15 Matt Johnston + + * monotone.texi: clarify some netsync parts of the tutorial + +2005-06-15 Richard Levitte + + * netsync.cc (struct session): Add a pattern regex cache. + (analyze_ancestry_graph): Use the regex cache instead of the + pattern string itself. This is especially important when the + pattern is used as an old-style collection. + (process_hello_cmd): Recreate the pattern regex cache with the + conversion of the pattern to a regex when it's used as an + old-style collection. + (process_auth_cmd): When the pattern changes, change the regex + cache as well. + +2005-06-14 Richard Levitte + + * std_hooks.lua (get_preferred_merge2_command, + get_preferred_merge3_command): EDITOR may be undefined. In that + case, os.getenv() returns nil, on which string.lower() chokes. + It's much better to check for that and default to an empty + string. + +2005-06-11 Derek Scherger + + * commands.cc (complete_command): log command expansion messages + with L instead of P to reduce chatter + (status): add --brief option and corresponding output + (identify): add trailing space to comment gcc complains about + * monotone.cc: fix comment typo and add additional details for + command specific options + * monotone.texi (Automation): list inventory status code + combinations and descriptions + * tests/t_status.at: new test of status command and --brief option + * testsuite.at: add it + +2005-06-11 Matt Johnston + + * commands.cc: revert should ignore the ignore hooks, otherwise bad + things happen (revert a single ignored file, resultant empty ignore list + reverts the whole working copy). + * app_state.cc, app_state.hh: give set_restriction a flag to disregard + file-ignore hooks. + * tests/t_revert_restrict.at, testsuite.at: a test + +2005-06-09 Riccardo Ghetta + + * std_hooks.lua: make binary_file return nil on unreadable/empty files + +2005-06-10 Joel Reed + + * commands.cc (CMD(cdiff)): Add OPT_DEPTH to command options. + * t_restrictions.at: Add to testcase. + +2005-06-09 Joel Reed + + * commands.cc (CMD(diff)): Add OPT_DEPTH back in, as it is used. + * t_restrictions.at: Add to testcase to increase likelihood of + keeping it around :) + +2005-06-10 Richard Levitte + + * commands.cc (CMD(diff)): Remove OPT_DEPTH, as it was never + used. + +2005-06-09 Richard Levitte + + * monotone.texi (Merging): I assume that "apposite" was supposed + to be "appropriate". + +2005-06-09 Riccardo Ghetta + + * diff_patch.cc/hh: honor the new manual_merge attribute + * file_io.cc/hh: move here the guess_binary function + * lua.cc: let guess_binary available to lua + * std_hooks.lua: handle manual_merge as an add-time attribute and + initialize by default make it true if the file appears to be binary. + Make read_contents_of_file able to read "binary" files. + * tests/t_merge_manual.at: tests new behaviour, superceding the + old XFAIL t_merge_binary.at test. + * monotone.texi: document changes, adding a small section on merging. + 2005-06-07 Nathaniel Smith + * ChangeLog: Fixup. + +2005-06-07 Nathaniel Smith + * monotone.texi (Storage and workflow): Attempt to thwart some common misconceptions. @@ -28,6 +197,7 @@ Throw in some calls to CANONICALISE, maybe this will help on Win32... +2005-06-04 Timothy Brownawell * netsync.cc, netcmd.cc: Style cleanups (mostly whitespace). --- INSTALL +++ INSTALL @@ -69,7 +69,7 @@ standard include path, or you can pass additional configuration options to your monotone configure build, such as: - ./configure CPPFLAGS="-Iboost_1_31_2" LDFLAGS="-Lboost_1_31_0/libs" + ./configure CPPFLAGS="-Iboost_1_31_0" LDFLAGS="-Lboost_1_31_0/libs" monotone does not use all of boost -- for instance, people often have trouble building boost.python, which we do not use. you don't --- Makefile.am +++ Makefile.am @@ -297,10 +297,12 @@ MAKEINFOFLAGS=-I $(top_builddir) -monotone.pdf: monotone.texi version.texi $(PDF_FIGURES) +monotone.info: monotone.texi version.texi std_hooks.lua -monotone.dvi: monotone.texi version.texi $(EPS_FIGURES) +monotone.pdf: monotone.texi version.texi std_hooks.lua $(PDF_FIGURES) +monotone.dvi: monotone.texi version.texi std_hooks.lua $(EPS_FIGURES) + #%.eps: %.epsi # mv $< $@ # @@ -408,14 +410,14 @@ # automake doesn't build html docs -monotone.html: monotone.texi texinfo.css +monotone.html: monotone.texi version.texi std_hooks.lua texinfo.css makeinfo --no-split --no-headers --output $@ --html $< perl -i.perlbak -pe 's@@\n@' $@ rm -f monotone.html.perlbak # The .perlbak thing is needed, otherwise the perl executions fails on MinGW -html: monotone.texi texinfo.css +html: monotone.texi version.texi std_hooks.lua texinfo.css mkdir -p html makeinfo --number-sections --html --output html $< perl -i.perlbak -pe 's@@\n@' html/*.html --- app_state.cc +++ app_state.cc @@ -31,7 +31,7 @@ app_state::app_state() : branch_name(""), db(""), stdhooks(true), rcfiles(true), diffs(false), - search_root("/"), depth(-1), last(-1), verbose(false) + verbose(false), search_root("/"), depth(-1), last(-1) { db.set_app(this); } @@ -155,7 +155,9 @@ } void -app_state::set_restriction(path_set const & valid_paths, vector const & paths) +app_state::set_restriction(path_set const & valid_paths, + vector const & paths, + bool respect_ignore) { // this can't be a file-global static, because file_path's initializer // depends on another global static being defined. @@ -165,7 +167,7 @@ { file_path p = prefix(*i); - if (lua.hook_ignore_file(p)) + if (respect_ignore && lua.hook_ignore_file(p)) { L(F("'%s' ignored by restricted path set\n") % p()); continue; --- app_state.hh +++ app_state.hh @@ -54,7 +54,9 @@ void create_working_copy(std::string const & dir); file_path prefix(utf8 const & path); - void set_restriction(path_set const & valid_paths, std::vector const & paths); + void app_state::set_restriction(path_set const & valid_paths, + std::vector const & paths, + bool respect_ignore = true); bool restriction_includes(file_path const & path); // Set the branch name. If you only invoke set_branch, the branch --- commands.cc +++ commands.cc @@ -129,7 +129,7 @@ { if (cmd.length() == 0 || cmds.find(cmd) != cmds.end()) return cmd; - P(F("expanding command '%s'\n") % cmd); + L(F("expanding command '%s'\n") % cmd); vector matched; @@ -146,7 +146,7 @@ if (matched.size() == 1) { string completed = *matched.begin(); - P(F("expanded command to '%s'\n") % completed); + L(F("expanded command to '%s'\n") % completed); return completed; } else if (matched.size() > 1) @@ -1263,7 +1263,7 @@ } CMD(status, "informative", "[PATH]...", "show status of working copy", - OPT_DEPTH) + OPT_DEPTH % OPT_BRIEF) { revision_set rs; manifest_map m_old, m_new; @@ -1273,8 +1273,50 @@ calculate_restricted_revision(app, args, rs, m_old, m_new); - write_revision_set(rs, tmp); - cout << endl << tmp << endl; + if (global_sanity.brief) + { + I(rs.edges.size() == 1); + change_set const & changes = edge_changes(rs.edges.begin()); + change_set::path_rearrangement const & rearrangement = changes.rearrangement; + change_set::delta_map const & deltas = changes.deltas; + + for (path_set::const_iterator i = rearrangement.deleted_files.begin(); + i != rearrangement.deleted_files.end(); ++i) + cout << "dropped " << *i << endl; + + for (path_set::const_iterator i = rearrangement.deleted_dirs.begin(); + i != rearrangement.deleted_dirs.end(); ++i) + cout << "dropped " << *i << "/" << endl; + + for (map::const_iterator + i = rearrangement.renamed_files.begin(); + i != rearrangement.renamed_files.end(); ++i) + cout << "renamed " << i->first << endl + << " to " << i->second << endl; + + for (map::const_iterator + i = rearrangement.renamed_dirs.begin(); + i != rearrangement.renamed_dirs.end(); ++i) + cout << "renamed " << i->first << "/" << endl + << " to " << i->second << "/" << endl; + + for (path_set::const_iterator i = rearrangement.added_files.begin(); + i != rearrangement.added_files.end(); ++i) + cout << "added " << *i << endl; + + for (change_set::delta_map::const_iterator i = deltas.begin(); + i != deltas.end(); ++i) + { + // don't bother printing patches on added files + if (rearrangement.added_files.find(i->first) == rearrangement.added_files.end()) + cout << "patched " << i->first << endl; + } + } + else + { + write_revision_set(rs, tmp); + cout << endl << tmp << endl; + } } CMD(identify, "working copy", "[PATH]", "calculate identity of PATH or stdin", @@ -1328,7 +1370,7 @@ { revision_id rid; complete(app, idx(args, 1)(), rid); - file_path fp(idx(args, 2)()); + file_path fp = app.prefix(idx(args, 2)); manifest_id mid; app.db.get_revision_manifest(rid, mid); manifest_map m; @@ -2687,7 +2729,7 @@ "If one revision is given, the diff between the working directory and\n" "that revision is shown. If two revisions are given, the diff between\n" "them is given.", - OPT_BRANCH_NAME % OPT_REVISION) + OPT_BRANCH_NAME % OPT_REVISION % OPT_DEPTH) { do_diff(name, app, args, context_diff); } @@ -2949,7 +2991,7 @@ // we have the following // // old --- working - // \ \ + // \ \ // chosen --- merged // // - old is the revision specified in MT/revision @@ -3392,7 +3434,7 @@ extract_rearranged_paths(work, valid_paths); add_intermediate_paths(valid_paths); - app.set_restriction(valid_paths, args); + app.set_restriction(valid_paths, args, false); restrict_path_rearrangement(work, included, excluded, app); --- contrib/mtbrowse.sh +++ contrib/mtbrowse.sh @@ -16,12 +16,11 @@ # - Run from working copy of existing project. # Or give full filename to database. # - Change your configuration -# (Delete the "VISUAL", to use the "PAGER") -# Please "Reload DB", if to see the new configuration +# Delete the "VISUAL", to use the "PAGER", deleto both for internal viewer. +# Save configuration. +# Please "Reload DB", to see the new configuration # - Begin with menu "S Select revision" # - Browse in branches, revisions, diff files, view logs .... -# - Quit menu with "Q" to save your environment. -# Or "X" to exit without save anything. # # Needed tools: # monotone 0.19 or compatible @@ -58,10 +57,16 @@ # Fix cache deleting on startup. # Xargs for revision selection with date and key. # +# 2005/6/6 Version 0.1.7 address@hidden +# Backtitle with head, branch and filename. +# Default-item to remember the selection in menues. +# Check filname for reading before MT fails. +# Exit after --help or --version. +# # Known Bugs / ToDo-List: # * For Monotone Version >0.19 s/--depth/--last/, remove the fallback -VERSION="0.1.6" +VERSION="0.1.7" # Save users settings # Default values, can overwrite on .mtbrowserc @@ -131,10 +136,12 @@ then case $1 in --version) - echo "mtbrowse $VERSION" + echo "mtbrowse $VERSION" + exit 0 ;; --help|-h) - echo "mtbrowse [dbfile]" + echo "mtbrowse [dbfile]" + exit 0 ;; *) # Databasefile from command line @@ -144,12 +151,18 @@ # MT change the options, if you continue with other DB here! if [ -f MT/options ] then - echo -e "\n**********\n* WARNING!\n**********\n" - echo "Your MT/options will be overwrite, if" - echo "continue with different DB file or branch" - echo "in exist working directory!" - echo -e "\nENTER to confirm / CTRL-C to abbort" - read junk + + if ! dialog --cr-wrap --title " *********** WARNING! ********** " \ + --defaultno --colors --yesno " +Your \Zb\Z1MT/options\Zn will be overwrite, if +continue with different DB file or branch +in exist working directory! + +YES confirm / NO abbort" 0 0 + then + echo "abbort" + exit 1 + fi fi ;; esac @@ -187,7 +200,6 @@ then $PAGER < $1 else -# dialog --textbox $1 20 75 dialog --textbox $1 0 0 fi rm $1 @@ -242,6 +254,20 @@ # Is parameter given: No user select, if branch known. do_branch_sel() { + local OLD_BRANCH + + if [ ! -f "$DB" ] + then + echo "$DB: File not found! (mtbrowse)" + exit 1 + fi + + if [ ! -r "$DB" ] + then + echo "$DB: Can't read file! (mtbrowse)" + exit 1 + fi + # is Branch set, than can return if [ -n "$BRANCH" -a -n "$1" ] then @@ -254,6 +280,8 @@ echo "$DB" > $TEMPFILE.fname unset BRANCH fi + + SHORT_DB=`basename $DB` OLD_BRANCH=$BRANCH @@ -265,9 +293,18 @@ | sed -n -r -e 's/^(.+)$/\1\t-/p' > $TEMPFILE.dlg-branches \ || exit 200 fi + + if [ ! -s $TEMPFILE.dlg-branches ] + then + echo "Error: No branches found." + exit 1 + fi - dialog --begin 1 2 --menu "Select branch" 0 0 0 \ - `cat $TEMPFILE.dlg-branches` 2> $TEMPFILE.input + dialog --begin 1 2 \ + --default-item "$OLD_BRANCH" \ + --menu "Select branch" 0 0 0 \ + `cat $TEMPFILE.dlg-branches` \ + 2> $TEMPFILE.input BRANCH=`cat $TEMPFILE.input` # Clear Head, if branch changed @@ -318,7 +355,9 @@ do_action_sel() { # Action-Menu - while dialog --menu "Action for $REVISION" 0 60 0 \ + while dialog \ + --backtitle "h:$HEAD b:$BRANCH f:$SHORT_DB" \ + --menu "Action for $REVISION" 0 60 0 \ "L" "Log view of current revision" \ "P" "Diff files from parent" \ "W" "Diff files from working copy head" \ @@ -339,7 +378,7 @@ > $TEMPFILE.change.log then DEPTH_LAST="--depth" - dialog --title "ERROR" --msgbox \ + dialog --title " ERROR " --msgbox \ "Fallback to \"$DEPTH_LAST\" usage.\nPlease try again." 6 40 else do_pager $TEMPFILE.change.log @@ -356,10 +395,11 @@ # Set DATE/KEY information fill_date_key $TEMPFILE.parents $TEMPFILE.certs3tmp - cat cat $TEMPFILE.certs3tmp | xargs dialog --begin 1 2 --menu \ - "Select parent for $REVISION" 0 0 0 \ - 2> $TEMPFILE.input \ - && PARENT=`cat $TEMPFILE.input` + cat cat $TEMPFILE.certs3tmp | \ + xargs dialog --begin 1 2 --default-item "$PARENT" \ + --menu "Select parent for $REVISION" 0 0 0 \ + 2> $TEMPFILE.input \ + && PARENT=`cat $TEMPFILE.input` else # Single parent only PARENT=`cat $TEMPFILE.parents` @@ -403,7 +443,7 @@ # DIFF2: from other revision (not working dir) # Select second revision if cat $TEMPFILE.certs.$BRANCH | \ - xargs dialog --menu \ + xargs dialog --default-item "$REV2" --menu \ "Select _older_ revision for branch:$BRANCH\nrev:$REVISION" \ 0 0 0 2> $TEMPFILE.revision-select then @@ -441,8 +481,12 @@ # Select a revision do_revision_sel() { + local SHORT_REV + # if branch or head not known, ask user + echo "branch check..." do_branch_sel check + echo "head check..." do_head_sel check # Building revisions list @@ -484,25 +528,30 @@ fi fi + SHORT_REV=`echo $REVISION | cut -c 1-$HASH_TRIM` + # Select revision while cat $TEMPFILE.certs.$BRANCH | \ - xargs dialog --menu "Select revision for branch:$BRANCH" \ - 0 0 0 2> $TEMPFILE.revision-select + xargs dialog \ + --backtitle "h:$HEAD b:$BRANCH f:$SHORT_DB" \ + --default-item "$SHORT_REV" \ + --menu "Select revision for branch:$BRANCH" \ + 0 0 0 2> $TEMPFILE.revision-select do - REVISION=`cat $TEMPFILE.revision-select` + SHORT_REV=`cat $TEMPFILE.revision-select` # Remove old marker, set new marker cat $TEMPFILE.certs.$BRANCH | sed -r \ - -e "s/^(.+) <==\"\$/\1\"/" -e "s/^($REVISION.+)\"\$/\1 <==\"/" \ + -e "s/^(.+) <==\"\$/\1\"/" -e "s/^($SHORT_REV.+)\"\$/\1 <==\"/" \ > $TEMPFILE.certs.$BRANCH.base mv $TEMPFILE.certs.$BRANCH.base $TEMPFILE.certs.$BRANCH - # Error, on "monotone automate parent XXXXXX". :-( + # Error, on "monotone automate parent XXXXXX", if short revision. :-( # Expand revision here, if short revision if [ "$SHOW_KEYS" = "yes" ] then - REVISION=`monotone complete revision $REVISION` + REVISION=`monotone complete revision $SHORT_REV` fi # OK Button: Sub Menu @@ -606,7 +655,7 @@ CERTS_MAX="$CERTS_MAX" DEPTH_LAST="$DEPTH_LAST" EOF - dialog --title "Info" --sleep 2 --infobox \ + dialog --title " Info " --sleep 2 --infobox \ "Configration wrote to\n$CONFIGFILE" 0 0 echo "config saved" ;; @@ -632,23 +681,25 @@ mkdir -p $TEMPDIR -while dialog --menu "Main - mtbrowse v$VERSION" 0 0 0 \ - "S" "Select revision" \ - "I" "Input revision" \ - "F" "Change DB File [`basename $DB`]" \ - "B" "Branch select [$BRANCH]" \ - "H" "Head select [$SHORT_HEAD]" \ - "R" "Reload DB, clear cache" \ - "-" "-" \ - "l" "Sumary complete log" \ - "t" "List Tags" \ - "h" "List Heads" \ - "k" "List Keys" \ - "-" "-" \ - "C" "Configuration" \ - "-" "-" \ - "X" "eXit" \ - 2> $TEMPFILE.menu +while dialog \ + --backtitle "h:$HEAD b:$BRANCH f:$DB" \ + --menu "Main - mtbrowse v$VERSION" 0 0 0 \ + "S" "Select revision" \ + "I" "Input revision" \ + "F" "Change DB File [`basename $DB`]" \ + "B" "Branch select [$BRANCH]" \ + "H" "Head select [$SHORT_HEAD]" \ + "R" "Reload DB, clear cache" \ + "-" "-" \ + "l" "Sumary complete log" \ + "t" "List Tags" \ + "h" "List Heads" \ + "k" "List Keys" \ + "-" "-" \ + "C" "Configuration" \ + "-" "-" \ + "X" "eXit" \ + 2> $TEMPFILE.menu do case `cat $TEMPFILE.menu` in S) @@ -679,6 +730,8 @@ ;; H) # Select head + # if branch or head not known, ask user + do_branch_sel check do_head_sel do_clear_cache ;; @@ -740,6 +793,7 @@ ;; X) do_clear_on_exit + clear exit 0 ;; *) @@ -750,3 +804,4 @@ done do_clear_on_exit +clear --- database.cc +++ database.cc @@ -12,6 +12,7 @@ #include #include +#include #include #include @@ -643,24 +644,28 @@ database::execute(char const * query, ...) { va_list ap; - int res; - char * errmsg = NULL; va_start(ap, query); // log it char * formatted = sqlite3_vmprintf(query, ap); - string qq(formatted); - if (qq.size() > constants::db_log_line_sz) - qq = qq.substr(0, constants::db_log_line_sz) + string(" ..."); + string qq; + + if (strlen(formatted) > constants::db_log_line_sz) + { + qq.assign(formatted, constants::db_log_line_sz); + qq.append(" ..."); + } + else + { + qq = formatted; + } L(F("db.execute(\"%s\")\n") % qq); - sqlite3_free(formatted); - va_end(ap); - va_start(ap, query); - // do it - res = sqlite3_exec_vprintf(sql(), query, NULL, NULL, &errmsg, ap); + char * errmsg = NULL; + int res = sqlite3_exec(sql(), formatted, NULL, NULL, &errmsg); + sqlite3_free(formatted); va_end(ap); --- diff_patch.cc +++ diff_patch.cc @@ -21,18 +21,6 @@ using namespace std; -bool guess_binary(string const & s) -{ - // these do not occur in ASCII text files - // FIXME: this heuristic is (a) crap and (b) hardcoded. fix both these. - if (s.find_first_of('\x00') != string::npos || - s.find_first_of("\x01\x02\x03\x04\x05\x06\x0e\x0f" - "\x10\x11\x12\x13\x14\x15\x16\x17\x18" - "\x19\x1a\x1c\x1d\x1e\x1f") != string::npos) - return true; - return false; -} - // // a 3-way merge works like this: // @@ -319,7 +307,7 @@ } // mutual or single-edge deletes - else if ((i->type == deleted && j->len == deleted) + else if ((i->type == deleted && j->type == deleted) || (i->type == deleted && j->type == preserved) || (i->type == preserved && j->type == deleted)) { @@ -510,6 +498,18 @@ return default_encoding; } +bool merge_provider::attribute_manual_merge(file_path const & path, + manifest_map const & man) +{ + std::string mmf; + if (get_attribute_from_db(path, manual_merge_attribute, man, mmf, app)) + { + return mmf == std::string("true"); + } + else + return false; // default: enable auto merge +} + bool merge_provider::try_to_merge_files(file_path const & anc_path, file_path const & left_path, file_path const & right_path, @@ -537,45 +537,53 @@ file_data left_data, right_data, ancestor_data; data left_unpacked, ancestor_unpacked, right_unpacked, merged_unpacked; - string left_encoding, anc_encoding, right_encoding; - vector left_lines, ancestor_lines, right_lines, merged_lines; this->get_version(left_path, left_id, left_data); this->get_version(anc_path, ancestor_id, ancestor_data); this->get_version(right_path, right_id, right_data); - left_encoding = this->get_file_encoding(left_path, left_man); - anc_encoding = this->get_file_encoding(anc_path, anc_man); - right_encoding = this->get_file_encoding(right_path, right_man); - left_unpacked = left_data.inner(); ancestor_unpacked = ancestor_data.inner(); right_unpacked = right_data.inner(); - split_into_lines(left_unpacked(), left_encoding, left_lines); - split_into_lines(ancestor_unpacked(), anc_encoding, ancestor_lines); - split_into_lines(right_unpacked(), right_encoding, right_lines); + if (!attribute_manual_merge(left_path, left_man) && + !attribute_manual_merge(right_path, right_man)) + { + // both files mergeable by monotone internal algorithm, try to merge + // note: the ancestor is not considered for manual merging. Forcing the + // user to merge manually just because of an ancestor mistakenly marked + // manual seems too harsh + string left_encoding, anc_encoding, right_encoding; + left_encoding = this->get_file_encoding(left_path, left_man); + anc_encoding = this->get_file_encoding(anc_path, anc_man); + right_encoding = this->get_file_encoding(right_path, right_man); + + vector left_lines, ancestor_lines, right_lines, merged_lines; + split_into_lines(left_unpacked(), left_encoding, left_lines); + split_into_lines(ancestor_unpacked(), anc_encoding, ancestor_lines); + split_into_lines(right_unpacked(), right_encoding, right_lines); + + if (merge3(ancestor_lines, + left_lines, + right_lines, + merged_lines)) + { + hexenc tmp_id; + file_data merge_data; + string tmp; + + L(F("internal 3-way merged ok\n")); + join_lines(merged_lines, tmp); + calculate_ident(data(tmp), tmp_id); + file_id merged_fid(tmp_id); + merge_data = file_data(tmp); - if (merge3(ancestor_lines, - left_lines, - right_lines, - merged_lines)) - { - hexenc tmp_id; - file_data merge_data; - string tmp; - - L(F("internal 3-way merged ok\n")); - join_lines(merged_lines, tmp); - calculate_ident(data(tmp), tmp_id); - file_id merged_fid(tmp_id); - merge_data = file_data(tmp); - - merged_id = merged_fid; - record_merge(left_id, right_id, merged_fid, - left_data, merge_data); - - return true; + merged_id = merged_fid; + record_merge(left_id, right_id, merged_fid, + left_data, merge_data); + + return true; + } } P(F("help required for 3-way merge\n")); @@ -715,8 +723,18 @@ return default_encoding; } +bool update_merge_provider::attribute_manual_merge(file_path const & path, + manifest_map const & man) +{ + std::string mmf; + if (get_attribute_from_working_copy(path, manual_merge_attribute, mmf)) + return mmf == std::string("true"); + else if (get_attribute_from_db(path, manual_merge_attribute, man, mmf, app)) + return mmf == std::string("true"); + else + return false; // default: enable auto merge +} - // the remaining part of this file just handles printing out various // diff formats for the case where someone wants to *read* a diff // rather than apply it. @@ -845,7 +863,7 @@ if (b_len == 0) ost << " +0,0"; else - { + { ost << " +" << b_begin+1; if (b_len > 1) ost << "," << b_len; --- diff_patch.hh +++ diff_patch.hh @@ -19,7 +19,6 @@ // this file is to contain some stripped down, in-process implementations // of GNU-diffutils-like things (diff, diff3, maybe patch..) -bool guess_binary(std::string const & s); enum diff_type { @@ -81,6 +80,9 @@ virtual std::string get_file_encoding(file_path const & path, manifest_map const & man); + virtual bool attribute_manual_merge(file_path const & path, + manifest_map const & man); + virtual ~merge_provider() {} }; @@ -105,6 +107,9 @@ virtual std::string get_file_encoding(file_path const & path, manifest_map const & man); + virtual bool attribute_manual_merge(file_path const & path, + manifest_map const & man); + virtual ~update_merge_provider() {} }; --- file_io.cc +++ file_io.cc @@ -254,6 +254,18 @@ return fs::exists(localized(p)); } +bool guess_binary(string const & s) +{ + // these do not occur in ASCII text files + // FIXME: this heuristic is (a) crap and (b) hardcoded. fix both these. + if (s.find_first_of('\x00') != string::npos || + s.find_first_of("\x01\x02\x03\x04\x05\x06\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17\x18" + "\x19\x1a\x1c\x1d\x1e\x1f") != string::npos) + return true; + return false; +} + void delete_file(local_path const & p) { --- file_io.hh +++ file_io.hh @@ -59,6 +59,9 @@ bool file_exists(local_path const & path); bool file_exists(file_path const & path); +// returns true if the string content is binary according to monotone euristic +bool guess_binary(std::string const & s); + void mkdir_p(local_path const & path); void mkdir_p(file_path const & path); void make_dir_for(file_path const & p); --- lua.cc +++ lua.cc @@ -30,6 +30,7 @@ #include "sanity.hh" #include "vocab.hh" #include "platform.hh" +#include "transforms.hh" // defined in {std,test}_hooks.lua, converted #include "test_hooks.h" @@ -45,154 +46,6 @@ } */ -extern "C" -{ - static int - monotone_mkstemp_for_lua(lua_State *L) - { - int fd = -1; - FILE **pf = NULL; - char const *filename = lua_tostring (L, -1); - std::string dup(filename); - - fd = monotone_mkstemp(dup); - - if (fd == -1) - return 0; - - // this magic constructs a lua object which the lua io library - // will enjoy working with - pf = static_cast(lua_newuserdata(L, sizeof(FILE *))); - *pf = fdopen(fd, "r+"); - lua_pushstring(L, "FILE*"); - lua_rawget(L, LUA_REGISTRYINDEX); - lua_setmetatable(L, -2); - - lua_pushstring(L, dup.c_str()); - - if (*pf == NULL) - { - lua_pushnil(L); - lua_pushfstring(L, "%s", strerror(errno)); - lua_pushnumber(L, errno); - return 3; - } - else - return 2; - } - - static int - monotone_existsonpath_for_lua(lua_State *L) - { - const char *exe = lua_tostring(L, -1); - lua_pushnumber(L, existsonpath(exe)); - return 1; - } - - static int - monotone_is_executable_for_lua(lua_State *L) - { - const char *path = lua_tostring(L, -1); - lua_pushboolean(L, is_executable(path)); - return 1; - } - - static int - monotone_make_executable_for_lua(lua_State *L) - { - const char *path = lua_tostring(L, -1); - lua_pushnumber(L, make_executable(path)); - return 1; - } - - static int - monotone_spawn_for_lua(lua_State *L) - { - int n = lua_gettop(L); - const char *path = lua_tostring(L, -n); - char **argv = (char**)malloc((n+1)*sizeof(char*)); - int i; - pid_t ret; - if (argv==NULL) - return 0; - argv[0] = (char*)path; - for (i=1; i1) - sig = (int)lua_tonumber(L, -1); - else - sig = SIGTERM; - lua_pushnumber(L, process_kill(pid, sig)); - return 1; - } - - static int - monotone_sleep_for_lua(lua_State *L) - { - int seconds = (int)lua_tonumber(L, -1); - lua_pushnumber(L, process_sleep(seconds)); - return 1; - } -} - - -lua_hooks::lua_hooks() -{ - st = lua_open (); - I(st); - - // no atpanic support in 4.x - // lua_atpanic (st, &panic_thrower); - - luaopen_base(st); - luaopen_io(st); - luaopen_string(st); - luaopen_math(st); - luaopen_table(st); - luaopen_debug(st); - - // add monotone-specific functions - lua_register(st, "mkstemp", monotone_mkstemp_for_lua); - lua_register(st, "existsonpath", monotone_existsonpath_for_lua); - lua_register(st, "is_executable", monotone_is_executable_for_lua); - lua_register(st, "make_executable", monotone_make_executable_for_lua); - lua_register(st, "spawn", monotone_spawn_for_lua); - lua_register(st, "wait", monotone_wait_for_lua); - lua_register(st, "kill", monotone_kill_for_lua); - lua_register(st, "sleep", monotone_sleep_for_lua); -} - -lua_hooks::~lua_hooks() -{ - if (st) - lua_close (st); -} - - // This Lua object represents a single imperative transaction with the lua // interpreter. if it fails at any point, all further commands in the // transaction are ignored. it cleans the lua stack up when it is @@ -525,6 +378,217 @@ std::set Lua::missing_functions; + + + +extern "C" +{ + static int + monotone_mkstemp_for_lua(lua_State *L) + { + int fd = -1; + FILE **pf = NULL; + char const *filename = lua_tostring (L, -1); + std::string dup(filename); + + fd = monotone_mkstemp(dup); + + if (fd == -1) + return 0; + + // this magic constructs a lua object which the lua io library + // will enjoy working with + pf = static_cast(lua_newuserdata(L, sizeof(FILE *))); + *pf = fdopen(fd, "r+"); + lua_pushstring(L, "FILE*"); + lua_rawget(L, LUA_REGISTRYINDEX); + lua_setmetatable(L, -2); + + lua_pushstring(L, dup.c_str()); + + if (*pf == NULL) + { + lua_pushnil(L); + lua_pushfstring(L, "%s", strerror(errno)); + lua_pushnumber(L, errno); + return 3; + } + else + return 2; + } + + static int + monotone_existsonpath_for_lua(lua_State *L) + { + const char *exe = lua_tostring(L, -1); + lua_pushnumber(L, existsonpath(exe)); + return 1; + } + + static int + monotone_is_executable_for_lua(lua_State *L) + { + const char *path = lua_tostring(L, -1); + lua_pushboolean(L, is_executable(path)); + return 1; + } + + static int + monotone_make_executable_for_lua(lua_State *L) + { + const char *path = lua_tostring(L, -1); + lua_pushnumber(L, make_executable(path)); + return 1; + } + + static int + monotone_spawn_for_lua(lua_State *L) + { + int n = lua_gettop(L); + const char *path = lua_tostring(L, -n); + char **argv = (char**)malloc((n+1)*sizeof(char*)); + int i; + pid_t ret; + if (argv==NULL) + return 0; + argv[0] = (char*)path; + for (i=1; i1) + sig = (int)lua_tonumber(L, -1); + else + sig = SIGTERM; + lua_pushnumber(L, process_kill(pid, sig)); + return 1; + } + + static int + monotone_sleep_for_lua(lua_State *L) + { + int seconds = (int)lua_tonumber(L, -1); + lua_pushnumber(L, process_sleep(seconds)); + return 1; + } + + static int + monotone_guess_binary_for_lua(lua_State *L) + { + const char *path = lua_tostring(L, -1); + N(path, F("guess_binary called with an invalid parameter")); + lua_pushboolean(L, guess_binary(std::string(path, lua_strlen(L, -1)))); + return 1; + } + + static int + monotone_include_for_lua(lua_State *L) + { + const char *path = lua_tostring(L, -1); + N(path, F("Include called with an invalid parameter")); + + bool res =Lua(L) + .loadfile(std::string(path, lua_strlen(L, -1))) + .call(0,1) + .ok(); + + lua_pushboolean(L, res); + return 1; + } + + static int + monotone_includedir_for_lua(lua_State *L) + { + const char *pathstr = lua_tostring(L, -1); + N(pathstr, F("IncludeDir called with an invalid parameter")); + + fs::path locpath(pathstr); + N(fs::exists(locpath), F("Directory '%s' does not exists") % pathstr); + N(fs::is_directory(locpath), F("'%s' is not a directory") % pathstr); + + // directory, iterate over it, skipping subdirs, taking every filename, + // sorting them and loading in sorted order + fs::directory_iterator it(locpath); + std::vector arr; + while (it != fs::directory_iterator()) + { + if (!fs::is_directory(*it)) + arr.push_back(*it); + ++it; + } + std::sort(arr.begin(), arr.end()); + for (std::vector::iterator i= arr.begin(); i != arr.end(); ++i) + { + bool res =Lua(L) + .loadfile(i->string()) + .call(0,1) + .ok(); + N(res, F("lua error while loading rcfile '%s'") % i->string()); + } + + lua_pushboolean(L, true); + return 1; + } +} + + +lua_hooks::lua_hooks() +{ + st = lua_open (); + I(st); + + // no atpanic support in 4.x + // lua_atpanic (st, &panic_thrower); + + luaopen_base(st); + luaopen_io(st); + luaopen_string(st); + luaopen_math(st); + luaopen_table(st); + luaopen_debug(st); + + // add monotone-specific functions + lua_register(st, "mkstemp", monotone_mkstemp_for_lua); + lua_register(st, "existsonpath", monotone_existsonpath_for_lua); + lua_register(st, "is_executable", monotone_is_executable_for_lua); + lua_register(st, "make_executable", monotone_make_executable_for_lua); + lua_register(st, "spawn", monotone_spawn_for_lua); + lua_register(st, "wait", monotone_wait_for_lua); + lua_register(st, "kill", monotone_kill_for_lua); + lua_register(st, "sleep", monotone_sleep_for_lua); + lua_register(st, "guess_binary", monotone_guess_binary_for_lua); + lua_register(st, "include", monotone_include_for_lua); + lua_register(st, "includedir", monotone_includedir_for_lua); +} + +lua_hooks::~lua_hooks() +{ + if (st) + lua_close (st); +} + static bool run_string(lua_State * st, string const &str, string const & identity) { @@ -581,6 +645,29 @@ lua_hooks::load_rcfile(utf8 const & rc) { I(st); + if (rc() != "-") + { + fs::path locpath(localized(rc)); + if (fs::exists(locpath) && fs::is_directory(locpath)) + { + // directory, iterate over it, skipping subdirs, taking every filename, + // sorting them and loading in sorted order + fs::directory_iterator it(locpath); + std::vector arr; + while (it != fs::directory_iterator()) + { + if (!fs::is_directory(*it)) + arr.push_back(*it); + ++it; + } + std::sort(arr.begin(), arr.end()); + for (std::vector::iterator i= arr.begin(); i != arr.end(); ++i) + { + load_rcfile(*i, true); + } + return; // directory read, skip the rest ... + } + } data dat; L(F("opening rcfile '%s' ...\n") % rc); read_data_for_command_line(rc, dat); --- monotone.cc +++ monotone.cc @@ -35,9 +35,12 @@ char * argstr = NULL; long arglong = 0; -// Options are divide into two tables. The first one is command-specific +// Options are split between two tables. The first one is command-specific // options (hence the `c' in `coptions'). The second is the global one // with options that aren't tied to specific commands. +// +// the intent is to ensure that any command specific options mean the same +// thing to all commands that use them struct poptOption coptions[] = { --- monotone.texi +++ monotone.texi @@ -77,6 +77,7 @@ * Hook Reference:: Functions which extend monotone * Special Topics:: Extra explanations and details * Man Page:: That other document +* Default hooks:: The standard hook definitions * Index:: Index of concepts and functions @end menu @@ -1634,19 +1635,19 @@ @smallexample @group $ cat >>~/.monotone/monotonerc -function get_netsync_read_permitted (regex, identity) +function get_netsync_read_permitted (branch, identity) if (identity == "abe@@juicebot.co.jp") then return true end if (identity == "beth@@juicebot.co.jp") then return true end return false end -function get_netsync_write_permitted (regex, identity) +function get_netsync_write_permitted (branch, identity) if (identity == "abe@@juicebot.co.jp") then return true end if (identity == "beth@@juicebot.co.jp") then return true end return false end -function get_netsync_anonymous_read_permitted (regex) +function get_netsync_anonymous_read_permitted (branch) return false end ^D @@ -1659,7 +1660,7 @@ @smallexample @group -$ monotone --db=jim.db serve jim-laptop.juicebot.co.jp jp.co.juicebot.jb7.* +$ monotone --db=jim.db serve jim-laptop.juicebot.co.jp "jp.co.juicebot.jb7.*" @end group @end smallexample @@ -1673,7 +1674,7 @@ @smallexample @group -monotone --db=abe.db sync jim-laptop.juicebot.co.jp jp.co.juicebot.jb7.* +monotone --db=abe.db sync jim-laptop.juicebot.co.jp "jp.co.juicebot.jb7.*" monotone: rebuilding merkle trees for pattern jp.co.juicebot.jb7.* monotone: connecting to jim-laptop.juicebot.co.jp monotone: [bytes in: 3200] [bytes out: 673] @@ -1819,7 +1820,7 @@ @smallexample @group -$ monotone sync jim-laptop.juicebot.co.jp jp.co.juicebot.jb7.* +$ monotone sync jim-laptop.juicebot.co.jp "jp.co.juicebot.jb7.*" monotone: rebuilding merkle trees for pattern jp.co.juicebot.jb7.* monotone: including branch jp.co.juicebot.jb7 monotone: [keys: 2] [rcerts: 8] @@ -1834,7 +1835,7 @@ @smallexample @group -monotone --db=beth.db sync jim-laptop.juicebot.co.jp jp.co.juicebot.jb7.* +$ monotone --db=beth.db sync jim-laptop.juicebot.co.jp "jp.co.juicebot.jb7.*" monotone: rebuilding merkle trees for pattern jp.co.juicebot.jb7.* monotone: connecting to jim-laptop.juicebot.co.jp monotone: [bytes in: 3200] [bytes out: 673] @@ -1886,7 +1887,7 @@ @smallexample @group -$ monotone sync jim-laptop.juicebot.co.jp jp.co.juicebot.jb7.* +$ monotone sync jim-laptop.juicebot.co.jp "jp.co.juicebot.jb7.*" monotone: rebuilding merkle trees for pattern jp.co.juicebot.jb7.* monotone: including branch jp.co.juicebot.jb7 monotone: [keys: 3] [rcerts: 12] @@ -2229,6 +2230,7 @@ * Reserved Certs:: Certificate names with special meanings. * Naming Conventions:: Choosing appropriate names for keys and branches. * File Attributes:: Marking files as executable, or other attributes. +* Merging:: Merging with external tools, handling binary files. * Migrating and Dumping:: Changing the underlying storage system. * Importing from CVS:: Building a monotone database from a CVS repository. @end menu @@ -2898,7 +2900,48 @@ other people make, you will have to resolve those conflicts, as plain text, just as with any other text file in your working copy. address@hidden address@hidden Merging address@hidden Merging +Monotone has two merging modes, controlled by the @code{manual_merge} +attribute. +By default all files are merged in automatic mode, unless the address@hidden attribute for that file is present and address@hidden +In automatic mode files are merged without user intervention, using +monotone internal three-way merging algorithm. +Only if there are conflicts or an ancestor is not available monotone +switches to manual mode, essentially escalating the merging to the user. +When working in manual mode, monotone invokes the merge2 (for two-way +merging) or merge3 (three-way) hooks to start an user defined external +merge tool. +If the tool terminates without writing the merged file, monotone aborts the +merging, reverting any changes made. +By redefining the aforementioned hooks the user can not only choose a +preferred merge tool, but even select different programs for different +file types. For example, gimp for .png files, OpenOffice.org for +.doc, and so on. +Starting with monotone 0.20, the @code{manual_merge} attribute is +automatically set at add time for all ``binary'' files, i.e. all files +for wich the @code{binary_file} hook returns true. +Currently, this means all files with extension gif, jpeg, png, bz2, gz +and zip, plus files containing at least one of the following +bytes: + address@hidden address@hidden +0x00 thru 0x06 +0x0E thru 0x1a +0x1c thru 0x1f address@hidden group address@hidden smallexample + +The attribute could also be manually forced or removed using the +appropriate monotone commands. +Remember that monotone switches to manual merging even if only one of +the files to be merged has the @code{manual_merge} attribute set. + @page @node Migrating and Dumping @section Migrating and Dumping @@ -3641,7 +3684,7 @@ @smallexample @group -$ monotone serve alice.someisp.com net.venge.monotone.* +$ monotone serve alice.someisp.com "net.venge.monotone.*" @end group @end smallexample @@ -3649,7 +3692,7 @@ @smallexample @group -$ monotone sync alice.someisp.com net.venge.monotone.* +$ monotone sync alice.someisp.com "net.venge.monotone.*" @end group @end smallexample @@ -4964,6 +5007,65 @@ 'M' the file is missing but is included in the current manifest @end verbatim +Note that there are 45 possible status code combinations, some of which +are not valid, detailed below. + address@hidden +' ' unchanged +' P' patched (contents changed) +' U' unknown (exists on the filesystem but not tracked) +' I' ignored (exists on the filesystem but excluded by lua hook) +' M' missing (exists in the manifest but not on the filesystem) + +' A ' added (invalid, add should have associated patch) +' AP' added and patched +' AU' added but unknown (invalid) +' AI' added but ignored (seems invalid, but may be possible?) +' AM' added but missing from the filesystem + +' R ' rename target +' RP' rename target and patched +' RU' rename target but unknown (invalid) +' RI' rename target but ignored (seems invalid, but may be possible?) +' RM' rename target but missing from the filesystem + +'D ' dropped +'D P' dropped and patched (invalid) +'D U' dropped and unknown (still exists on the filesystem) +'D I' dropped and ignored (seems invalid, but may be possible?) +'D M' dropped and missing (invalid) + +'DA ' dropped and added (invalid, add should have associated patch) +'DAP' dropped and added and patched +'DAU' dropped and added but unknown (invalid) +'DAI' dropped and added but ignored (seems invalid, but may be possible?) +'DAM' dropped and added but missing from the filesystem + +'DR ' dropped and rename target +'DRP' dropped and rename target and patched +'DRU' dropped and rename target but unknown (invalid) +'DRI' dropped and rename target but ignored (invalid) +'DRM' dropped and rename target but missing from the filesystem + +'R ' rename source +'R P' rename source and patched (invalid) +'R U' rename source and unknown (still exists on the filesystem) +'R I' rename source and ignored (seems invalid, but may be possible?) +'R M' rename source and missing (invalid) + +'RA ' rename source and added (invalid, add should have associated patch) +'RAP' rename source and added and patched +'RAU' rename source and added but unknown (invalid) +'RAI' rename source and added but ignored (seems invalid, but may be possible?) +'RAM' rename source and added but missing from the filesystem + +'RR ' rename source and target +'RRP' rename source and target and target patched +'RRU' rename source and target and target unknown (invalid) +'RRI' rename source and target and target ignored (seems invalid, but may be possible?) +'RRM' rename source and target and target missing address@hidden verbatim + Full support for versioned directories is not yet complete and the inventory will only list entries for renamed or dropped directories. @@ -5179,10 +5281,27 @@ using the @address@hidden option; hooks defined in files specified on the command-line will shadow hooks from the the automatic files. +By specifying @address@hidden you can automatically +load all the files contained into @var{directory}. -The remainder of this section documents the existing hook functions -and their default definitions. +Monotone also makes available to hook writers a number of helper +functions exposing functionality not available with standard lua. +For the complete source of the default hooks see @ref{Default +hooks}. + address@hidden +* Hooks:: All hooks called by monotone. +* Additional Lua Functions:: Extra functionality availabe to hook writers. address@hidden menu + address@hidden address@hidden Hooks address@hidden Hooks + +This section documents the existing hook functions and their default +definitions. + @ftable @code @item note_commit (@var{new_id}, @var{certs}) @@ -5279,46 +5398,8 @@ a successful commit, the contents of @file{MT/log} are erased setting the system up for another edit/commit cycle. -The default definition of this hook is: +For the default definition of this hook, see @ref{Default hooks}. address@hidden address@hidden -function edit_comment(commentary, user_log_message) - local exe = "vi" - local visual = os.getenv("VISUAL") - if (visual ~= nil) then exe = visual end - local editor = os.getenv("EDITOR") - if (editor ~= nil) then exe = editor end - - local tmp, tname = temp_file() - if (tmp == nil) then return nil end - commentary = "MT: " .. string.gsub(commentary, "\n", "\nMT: ") - tmp:write(user_log_message) - tmp:write(commentary) - io.close(tmp) - - if (os.execute(string.format("%s %s", exe, tname)) ~= 0) then - os.remove(tname) - return nil - end - - tmp = io.open(tname, "r") - if (tmp == nil) then os.remove(tname); return nil end - local res = "" - local line = tmp:read() - while(line ~= nil) do - if (not string.find(line, "^MT:")) then - res = res .. line .. "\n" - end - line = tmp:read() - end - io.close(tmp) - os.remove(tname) - return res -end address@hidden group address@hidden smallexample - @item persist_phrase_ok () Returns @code{true} if you want monotone to remember the passphrase of @@ -5419,34 +5500,9 @@ Returns @code{true} if @var{filename} should be ignored while adding, dropping, or moving files. Otherwise returns @code{false}. This is most important when performing recursive actions on directories, which -may affect multiple files simultaneously. The default definition of -this hook is: +may affect multiple files simultaneously. +For the default definition of this hook, see @ref{Default hooks}. address@hidden address@hidden -function ignore_file(name) - if (string.find(name, "%.a$")) then return true end - if (string.find(name, "%.so$")) then return true end - if (string.find(name, "%.o$")) then return true end - if (string.find(name, "%.la$")) then return true end - if (string.find(name, "%.lo$")) then return true end - if (string.find(name, "%.aux$")) then return true end - if (string.find(name, "%.bak$")) then return true end - if (string.find(name, "%.orig$")) then return true end - if (string.find(name, "%.rej$")) then return true end - if (string.find(name, "%~$")) then return true end - if (string.find(name, "/core$")) then return true end - if (string.find(name, "^CVS/")) then return true end - if (string.find(name, "/CVS/")) then return true end - if (string.find(name, "^%.svn/")) then return true end - if (string.find(name, "/%.svn/")) then return true end - if (string.find(name, "^SCCS/")) then return true end - if (string.find(name, "/SCCS/")) then return true end - return false; -end address@hidden group address@hidden smallexample - @item ignore_branch (@var{branchname}) Returns @code{true} if @var{branchname} should be ignored while listing @@ -5545,61 +5601,10 @@ strings, which are the contents of the @var{left} and @var{right} nodes of a file fork which monotone was unable to automatically merge. The merge should either call an intelligent merge program or -interact with the user. The default definition of this hook is: +interact with the user. +For the default definition of this hook, see @ref{Default hooks}. address@hidden address@hidden -function merge2 (left_path, right_path, merged_path, left, right) - local ret = nil - local tbl = @address@hidden - - tbl.lfile = nil - tbl.rfile = nil - tbl.outfile = nil - tbl.meld_exists = false - - tbl.lfile = write_to_temporary_file (left) - tbl.rfile = write_to_temporary_file (right) - tbl.outfile = write_to_temporary_file ("") - - if tbl.lfile ~= nil and tbl.rfile ~= nil and tbl.outfile ~= nil - then - tbl.left_path = left_path - tbl.right_path = right_path - tbl.merged_path = merged_path - - local cmd = get_preferred_merge2_command (tbl) - - if cmd ~=nil - then - io.write ( - string.format("executing external 2-way merge command\n")) - cmd () - if tbl.meld_exists - then - ret = read_contents_of_file (tbl.lfile) - else - ret = read_contents_of_file (tbl.outfile) - end - if string.len (ret) == 0 - then - ret = nil - end - else - io.write ("no external 2-way merge command found\n") - end - end - - os.remove (tbl.lfile) - os.remove (tbl.rfile) - os.remove (tbl.outfile) - - return ret -end address@hidden group address@hidden smallexample - @anchor{get_preferred_merge2_command} @item get_preferred_merge2_command(@var{tbl}) @@ -5609,58 +5614,7 @@ that you would like to use to perform merge2 operations, override this hook to specify it. address@hidden address@hidden -function get_preferred_merge2_command (tbl) - local cmd = nil - local left_path = tbl.left_path - local right_path = tbl.right_path - local merged_path = tbl.merged_path - local lfile = tbl.lfile - local rfile = tbl.rfile - local outfile = tbl.outfile - - local editor = string.lower(os.getenv("EDITOR")) - - - if program_exists_in_path("kdiff3") then - cmd = merge2_kdiff3_cmd (left_path, right_path, merged_path, - lfile, rfile, outfile) - elseif program_exists_in_path ("meld") then - tbl.meld_exists = true - io.write (string.format( - "\nWARNING: 'meld' was choosen to perform external 2-way merge.\n".. - "You should merge all changes to *LEFT* file due to limitation of program\n".. - "arguments.\n\n")) - cmd = merge2_meld_cmd (lfile, rfile) - elseif program_exists_in_path ("xxdiff") then - cmd = merge2_xxdiff_cmd (left_path, right_path, merged_path, - lfile, rfile, outfile) - else - if string.find(editor, "emacs") ~= nil - or string.find(editor, "gnu") ~= nil - then - if program_exists_in_path ("emacs") then - cmd = merge2_emacs_cmd ("emacs", lfile, rfile, outfile) - elseif program_exists_in_path ("xemacs") then - cmd = merge2_emacs_cmd ("xemacs", lfile, rfile, outfile) - end - else if string.find(editor, "vim") ~= nil then - if os.getenv ("DISPLAY") ~= nil - and program_exists_in_path ("gvim") - then - cmd = merge2_vim_cmd ("gvim", lfile, rfile, outfile) - elseif program_exists_in_path ("vim") then - cmd = merge2_vim_cmd ("vim", lfile, rfile, outfile) - end - end - end - return cmd -end address@hidden group address@hidden smallexample - @anchor{merge3} @item merge3 (@var{ancestor}, @var{left}, @var{right}) @@ -5668,65 +5622,10 @@ strings, which are the contents of @var{left} and @var{right} nodes, and least common @var{ancestor}, of a file fork which monotone was unable to automatically merge. This hook delegates the actual merge -to the result of @ref{get_preferred_merge3_command}. The default -definition of this hook is: +to the result of @ref{get_preferred_merge3_command}. +For the default definition of this hook, see @ref{Default hooks}. address@hidden address@hidden -function merge3 (anc_path, left_path, right_path, merged_path, - ancestor, left, right) - local ret - local tbl = @address@hidden - - tbl.anc_path = anc_path - tbl.left_path = left_path - tbl.right_path = right_path - tbl.merged_path = merged_path - tbl.afile = nil - tbl.lfile = nil - tbl.rfile = nil - tbl.outfile = nil - tbl.meld_exists = false - tbl.lfile = write_to_temporary_file (left) - tbl.afile = write_to_temporary_file (ancestor) - tbl.rfile = write_to_temporary_file (right) - tbl.outfile = write_to_temporary_file ("") - - if tbl.lfile ~= nil and tbl.rfile ~= nil - and tbl.afile ~= nil and tbl.outfile ~= nil - then - local cmd = get_preferred_merge3_command (tbl) - if cmd ~=nil - then - io.write (string.format( - "executing external 3-way merge command\n")) - cmd () - if tbl.meld_exists - then - ret = read_contents_of_file (tbl.afile) - else - ret = read_contents_of_file (tbl.outfile) - end - if string.len (ret) == 0 - then - ret = nil - end - else - io.write ("no external 3-way merge command found\n") - end - end - - os.remove (tbl.lfile) - os.remove (tbl.rfile) - os.remove (tbl.afile) - os.remove (tbl.outfile) - - return ret -end address@hidden group address@hidden smallexample - @anchor{get_preferred_merge3_command} @item get_preferred_merge3_command(@var{tbl}) @@ -5736,65 +5635,6 @@ that you would like to use to perform merge3 operations, override this hook to specify it. address@hidden address@hidden -function get_preferred_merge3_command (tbl) - local cmd = nil - local left_path = tbl.left_path - local anc_path = tbl.anc_path - local right_path = tbl.right_path - local merged_path = tbl.merged_path - local lfile = tbl.lfile - local afile = tbl.afile - local rfile = tbl.rfile - local outfile = tbl.outfile - - local editor = string.lower(os.getenv("EDITOR")) - - if program_exists_in_path("kdiff3") then - cmd = merge3_kdiff3_cmd (left_path, anc_path, right_path, - merged_path, lfile, afile, rfile, outfile) - elseif program_exists_in_path ("meld") then - tbl.meld_exists = true - io.write (string.format( - "\nWARNING: 'meld' was choosen to perform external 3-way merge.\n".. - "You should merge all changes to *CENTER* file due to limitation of program\n".. - "arguments.\n\n")) - cmd = merge3_meld_cmd (lfile, afile, rfile) - elseif program_exists_in_path ("xxdiff") then - cmd = merge3_xxdiff_cmd (left_path, anc_path, right_path, - merged_path, lfile, afile, rfile, outfile) - else - -- prefer emacs/xemacs - if string.find(editor, "emacs") ~= nil - or string.find(editor, "gnu") ~= nil - then - if program_exists_in_path ("xemacs") then - cmd = merge3_emacs_cmd ("xemacs", lfile, afile, - rfile, outfile) - elseif program_exists_in_path ("emacs") then - cmd = merge3_emacs_cmd ("emacs", lfile, afile, - rfile, outfile) - end - elseif string.find(editor, "vim") ~= nil then -- prefer vim - if os.getenv ("DISPLAY") ~= nil - and program_exists_in_path ("gvim") - then - cmd = merge3_vim_cmd ("gvim", lfile, afile, - rfile, outfile) - elseif program_exists_in_path ("vim") then - cmd = merge3_vim_cmd ("vim", lfile, afile, - rfile, outfile) - end - end - end - - return cmd -end address@hidden group address@hidden smallexample - - @item expand_selector (@var{str}) Attempts to expand @var{str} as a selector. Expansion generally means @@ -5802,115 +5642,18 @@ authors or @code{d:} for dates. Expansion may also mean recognizing and interpreting special words such as @code{yesterday} or @code{6 months ago} and converting them into well formed selectors. For more -detail on the use of selectors, see @ref{Selectors}. The default -definition of this hook is: +detail on the use of selectors, see @ref{Selectors}. +For the default definition of this hook, see @ref{Default hooks}. address@hidden address@hidden -function expand_selector(str) - - -- something which looks like a generic cert pattern - if string.find(str, "^[^=]*=.*$") - then - return ("c:" .. str) - end - - -- something which looks like an email address - if string.find(str, "[%w%-_]+@@[%w%-_]+") - then - return ("a:" .. str) - end - - -- something which looks like a branch name - if string.find(str, "[%w%-]+%.[%w%-]+") - then - return ("b:" .. str) - end - - -- a sequence of nothing but hex digits - if string.find(str, "^%x+$") - then - return ("i:" .. str) - end - - -- tries to expand as a date - local dtstr = expand_date(str) - if dtstr ~= nil - then - return ("d:" .. dtstr) - end - - return nil -end address@hidden group address@hidden smallexample - @item expand_date (@var{str}) Attempts to expand @var{str} as a date expression. Expansion means recognizing and interpreting special words such as @code{yesterday} or @code{6 months ago} and converting them into well formed date expressions. For more -detail on the use of selectors, see @ref{Selectors}. The default -definition of this hook is: +detail on the use of selectors, see @ref{Selectors}. +For the default definition of this hook, see @ref{Default hooks}. address@hidden address@hidden -function expand_date(str) - -- simple date patterns - if string.find(str, "^19%d%d%-%d%d") - or string.find(str, "^20%d%d%-%d%d") - then - return (str) - end - -- "now" - if str == "now" - then - local t = os.time(os.date('!*t')) - return os.date("%FT%T", t) - end - - -- today don't uses the time - if str == "today" - then - local t = os.time(os.date('!*t')) - return os.date("%F", t) - end - - -- "yesterday", the source of all hangovers - if str == "yesterday" - then - local t = os.time(os.date('!*t')) - return os.date("%F", t - 86400) - end - - -- "CVS style" relative dates such as "3 weeks ago" - local trans = @{ - minute = 60; - hour = 3600; - day = 86400; - week = 604800; - month = 2678400; - year = 31536000 - @} - local pos, len, n, type = string.find(str, "(%d+) ([minutehordaywk]+)s? ago") - if trans[type] ~= nil - then - local t = os.time(os.date('!*t')) - if trans[type] <= 3600 - then - return os.date("%FT%T", t - (n * trans[type])) - else - return os.date("%F", t - (n * trans[type])) - end - end - - return nil -end address@hidden group address@hidden smallexample - - @item get_system_linesep () Returns a string which defines the default system line separator. @@ -6008,8 +5751,8 @@ stored in @file{.mt-attrs} for the given @var{filename}. This table of hook functions is called once for each file during an @dfn{add}. -By default, there is only one entry in this table, for the @code{execute} -attribute. Its definition is: +By default, there are only two entries in this table, for the address@hidden and @code{manual_merge} attributes. Their definition is: @smallexample @group @@ -6021,11 +5764,135 @@ return nil end end +attr_init_functions["manual_merge"] = + function(filename) + if (binary_file(filename)) then + return "true" -- binary files must merged manually + else + return nil + end + end @end group @end smallexample +The @code{binary_file} function is also defined as a lua hook. See address@hidden hooks}. + @end ftable address@hidden address@hidden Additional Lua Functions address@hidden Additional Lua Functions + +This section documents the additional lua functions made available to +hook writers. + address@hidden @code + address@hidden existonpath(@var{possible_command}) + +This function receives a string containing the name of an external +program and returns 0 if it exists on path and is executable, -1 +otherwise. +As an example, @code{existonpath("xxdiff")} returns 0 if the +program xxdiff is available. +On windows, this function automatically appends ``.exe'' to the +program name. In the previous example, @code{existonpath} would search +for ``xxdiff.exe''. + address@hidden guess_binary(@var{filespec}) + +Returns true if the file appears to be binary, i.e. contains one or +more of the following characters: address@hidden address@hidden +0x00 thru 0x06 +0x0E thru 0x1a +0x1c thru 0x1f address@hidden group address@hidden smallexample + address@hidden include(@var{scriptfile}) + +This function tries to load and execute the script contained into +scriptfile. It returns true for success and false if there is an +error. + address@hidden includedir(@var{scriptpath}) + +This function loads and executes in alphabetical order all the scripts +contained into the directory scriptpath. +If one of the scripts has an error, the functions doesn't process the +remaining scripts and immediately returns false. + address@hidden is_executable(@var{filespec}) + +This function returns true if the file is executable, false +otherwise. On windows this function returns always false. + address@hidden kill(@var{pid} [, @var{signal}]) + +This function calls the kill() C library function on posix systems and +TerminateProcess on Win32 (in that case @var{pid} is the process +handle). If the optional @var{signal} parameter is missing, SIGTERM +will be used. +Returns 0 on succes, -1 on error. + address@hidden make_executable(@var{filespec}) + +This function marks the named file as executable. On windows has no +effect. + address@hidden mkstemp(@var{template}) + +Like its C library counterpart, mkstemp creates a unique name and +returns a file descriptor for the newly created file. +The value of template should be a pointer to a character buffer loaded +with a null-terminated string that consists of contiguous, legal file +ad path name characters followed by six Xs. +The function mkstemp replaces the Xs by an alpha-numeric sequence +that is chosen to ensure that no file in the chosen directory has that +name. +Furthermore, subsequent calls to mkstemp within the same process +each yield different file names. +Unlike other implementations, monotone mkstemp allows the template +string to contain a complete path, not only a filename, allowing users +to create temporary files outside the current directory. + address@hidden notice:address@hidden +To create a temporary file, you must use the @code{temp_file()} +function, unless you need to run monotone with the @option{--nostd} +option. @code{temp_file()} builds on @code{mkstemp()} and creates a +file in the standard TMP/TEMP directories. +For the definition of @code{temp_file()}, see @ref{Default hooks}. + address@hidden sleep(@var{seconds}) + +Makes the calling process sleep for the specified number of seconds. + address@hidden spawn(@var{executable} [, @var{args ...}]) + +Starts the named executable with the given arguments. Returns the +process pid on Posix systems, the process handle on Win32 or -1 if +there was an error. +Calls fork/execvp on Posix, CreateProcess on Win32. + address@hidden notice:address@hidden +To spawn a process and wait for its completion, use the @code{execute()} +function, unless you need to run monotone with the @option{--nostd} +option. @code{execute()} builds on @code{spawn()} and @code{wait()} +in a standardized way. + address@hidden wait(@var{pid}) + +Wait until the process with given pid (process handle on Win32) exits. +Returns two values: a result value and the exit code of the waited-for +process. +The exit code is meaningful only if the result value is 0. + address@hidden ftable + + @node Special Topics @chapter Special Topics @@ -7098,6 +6965,16 @@ graydon hoare address@hidden Default hooks address@hidden Default hooks + +This section contains the entire source code of the standard hook file, +that is built in to the monotone executable, and read before any user +hooks files (unless @option{--nostd} is passed). It contains the +default values for all hooks. + address@hidden std_hooks.lua + @node Index @unnumbered Index --- netcmd.cc +++ netcmd.cc @@ -52,9 +52,6 @@ cmd_code(bye_cmd) {} -netcmd::netcmd(u8 _version) : version(_version), cmd_code(bye_cmd) -{} - size_t netcmd::encoded_size() { string tmp; @@ -96,22 +93,6 @@ out.append(digest_str); } -// last should be zero (doesn't mean we're compatible with version 0). -// The nonzero elements are the historical netsync/netcmd versions we can -// interoperate with. For interoperating with newer versions, assume -// compatibility and let the remote host make the call. -static u8 const compatible_versions[] = {4, 0}; - -bool is_compatible(u8 version) -{ - for (u8 const *x = compatible_versions; *x; ++x) - { - if (*x == version) - return true; - } - return false; -} - bool netcmd::read(string & inbuf, netsync_session_key const & key, netsync_hmac_value & hmac_val) @@ -121,28 +102,20 @@ if (inbuf.size() < constants::netcmd_minsz) return false; - u8 ver = extract_datum_lsb(inbuf, pos, "netcmd protocol number"); + u8 extracted_ver = extract_datum_lsb(inbuf, pos, "netcmd protocol number"); int v = version; + if (extracted_ver != version) + throw bad_decode(F("protocol version mismatch: wanted '%d' got '%d'") + % widen(version) + % widen(extracted_ver)); + version = extracted_ver; u8 cmd_byte = extract_datum_lsb(inbuf, pos, "netcmd code"); switch (cmd_byte) { - // hello may be newer than expected, or one we're compatible with case static_cast(hello_cmd): - if (ver < version && !is_compatible(ver)) - throw bad_decode(F("protocol version mismatch: wanted '%d' got '%d'") - % widen(v) - % widen(ver)); - break; - // these may be older compatible versions case static_cast(anonymous_cmd): case static_cast(auth_cmd): - if (ver != version && (ver > version || !is_compatible(ver))) - throw bad_decode(F("protocol version mismatch: wanted '%d' got '%d'") - % widen(v) - % widen(ver)); - break; - // these must match exactly what's expected case static_cast(error_cmd): case static_cast(bye_cmd): case static_cast(confirm_cmd): @@ -153,16 +126,11 @@ case static_cast(data_cmd): case static_cast(delta_cmd): case static_cast(nonexistant_cmd): - if (ver != version) - throw bad_decode(F("protocol version mismatch: wanted '%d' got '%d'") - % widen(v) - % widen(ver)); + cmd_code = static_cast(cmd_byte); break; default: throw bad_decode(F("unknown netcmd code 0x%x") % widen(cmd_byte)); } - cmd_code = static_cast(cmd_byte); - version = ver; // check to see if we have even enough bytes for a complete uleb128 size_t payload_len = 0; --- netsync.cc +++ netsync.cc @@ -264,11 +264,11 @@ string outbuf; netcmd cmd; - u8 protocol_version; bool armed; bool arm(); utf8 pattern; + boost::regex pattern_re; id remote_peer_key_hash; rsa_keypair_id remote_peer_key_name; netsync_session_key session_key; @@ -480,9 +480,9 @@ str(sock, to), inbuf(""), outbuf(""), - protocol_version(constants::netcmd_current_protocol_version), armed(false), pattern(""), + pattern_re(".*"), remote_peer_key_hash(""), remote_peer_key_name(""), session_key(constants::netsync_key_initializer), @@ -509,6 +509,7 @@ N(patterns.size() == 1, F("client can only sync one pattern at a time")); this->pattern = idx(patterns, 0); + this->pattern_re = boost::regex(this->pattern()); } dbw.set_on_revision_written(boost::bind(&session::rev_written_callback, @@ -1164,8 +1165,6 @@ set ok_branches, bad_branches; cert_name bcert_name(branch_cert_name); cert_name tcert_name(tag_cert_name); - //used for permission checking if we're the client - boost::regex reg(pattern()); for (map::iterator i = received_certs.begin(); i != received_certs.end(); ++i) { @@ -1187,7 +1186,7 @@ ok = app.lua.hook_get_netsync_write_permitted(name(), remote_peer_key_name); else - ok = boost::regex_match(name(), reg); + ok = boost::regex_match(name(), pattern_re); if (ok) { ok_branches.insert(name()); @@ -1366,7 +1365,7 @@ session::queue_bye_cmd() { L(F("queueing 'bye' command\n")); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_bye_cmd(); write_netcmd_and_try_flush(cmd); this->sent_goodbye = true; @@ -1376,7 +1375,7 @@ session::queue_error_cmd(string const & errmsg) { L(F("queueing 'error' command\n")); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_error_cmd(errmsg); write_netcmd_and_try_flush(cmd); this->sent_goodbye = true; @@ -1389,7 +1388,7 @@ string typestr; netcmd_item_type_to_string(type, typestr); L(F("queueing 'done' command for %s level %s\n") % typestr % level); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_done_cmd(level, type); write_netcmd_and_try_flush(cmd); } @@ -1398,7 +1397,7 @@ session::queue_hello_cmd(id const & server, id const & nonce) { - netcmd cmd(protocol_version); + netcmd cmd; hexenc server_encoded; encode_hexenc(server, server_encoded); @@ -1418,7 +1417,7 @@ id const & nonce2, base64 server_key_encoded) { - netcmd cmd(protocol_version); + netcmd cmd; rsa_oaep_sha_data hmac_key_encrypted; encrypt_rsa(app.lua, remote_peer_key_name, server_key_encoded, nonce2(), hmac_key_encrypted); @@ -1436,7 +1435,7 @@ string const & signature, base64 server_key_encoded) { - netcmd cmd(protocol_version); + netcmd cmd; rsa_oaep_sha_data hmac_key_encrypted; encrypt_rsa(app.lua, remote_peer_key_name, server_key_encoded, nonce2(), hmac_key_encrypted); @@ -1448,7 +1447,7 @@ void session::queue_confirm_cmd() { - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_confirm_cmd(); write_netcmd_and_try_flush(cmd); } @@ -1462,7 +1461,7 @@ netcmd_item_type_to_string(node.type, typestr); L(F("queueing request for refinement of %s node '%s', level %d\n") % typestr % hpref % static_cast(node.level)); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_refine_cmd(node); write_netcmd_and_try_flush(cmd); } @@ -1492,7 +1491,7 @@ L(F("queueing request for data of %s item '%s'\n") % typestr % hid); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_send_data_cmd(type, item); write_netcmd_and_try_flush(cmd); note_item_requested(type, item); @@ -1528,7 +1527,7 @@ L(F("queueing request for contents of %s delta '%s' -> '%s'\n") % typestr % base_hid % ident_hid); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_send_delta_cmd(type, base, ident); write_netcmd_and_try_flush(cmd); note_item_requested(type, ident); @@ -1553,7 +1552,7 @@ L(F("queueing %d bytes of data for %s item '%s'\n") % dat.size() % typestr % hid); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_data_cmd(type, item, dat); write_netcmd_and_try_flush(cmd); note_item_sent(type, item); @@ -1583,7 +1582,7 @@ L(F("queueing %s delta '%s' -> '%s'\n") % typestr % base_hid % ident_hid); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_delta_cmd(type, base, ident, del); write_netcmd_and_try_flush(cmd); note_item_sent(type, ident); @@ -1606,7 +1605,7 @@ L(F("queueing note of nonexistance of %s item '%s'\n") % typestr % hid); - netcmd cmd(protocol_version); + netcmd cmd; cmd.write_nonexistant_cmd(type, item); write_netcmd_and_try_flush(cmd); } @@ -1692,22 +1691,6 @@ W(F("No branches found.")); } -void -convert_pattern(utf8 & pat, utf8 & conv) -{ - string x = pat(); - string pattern = ""; - string e = ".|*?+()[]{}^$\\"; - for (string::const_iterator i = x.begin(); i != x.end(); i++) - { - if (e.find(*i) != e.npos) - pattern += '\\'; - pattern += *i; - } - conv = pattern + ".*"; -} - - static const var_domain known_servers_domain = var_domain("known-servers"); bool @@ -1772,20 +1755,13 @@ } utf8 pat(pattern); - if (protocol_version < 5) - { - W(F("Talking to an old server. " - "Using %s as a collection, not a regex.") % pattern); - convert_pattern(pattern, pat); - } vector branchnames; set ok_branches; get_branches(app, branchnames); - boost::regex reg(pat()); for (vector::const_iterator i = branchnames.begin(); i != branchnames.end(); i++) { - if (boost::regex_match(*i, reg)) + if (boost::regex_match(*i, pattern_re)) ok_branches.insert(utf8(*i)); } rebuild_merkle_trees(app, ok_branches); @@ -2033,6 +2009,7 @@ // get our private key and sign back L(F("client signature OK, accepting authentication\n")); this->pattern = pattern; + this->pattern_re = boost::regex(this->pattern()); this->authenticated = true; this->remote_peer_key_name = their_id; // assume the (possibly degraded) opposite role @@ -3020,8 +2997,6 @@ rsa_pub_key server_key; id nonce; cmd.read_hello_cmd(server_keyname, server_key, nonce); - if (cmd.get_version() < protocol_version) - protocol_version = cmd.get_version(); return process_hello_cmd(server_keyname, server_key, nonce); } break; @@ -3035,8 +3010,6 @@ { protocol_role role; string pattern; - if (cmd.get_version() < protocol_version) - protocol_version = cmd.get_version(); rsa_oaep_sha_data hmac_key_encrypted; cmd.read_anonymous_cmd(role, pattern, hmac_key_encrypted); L(F("received 'anonymous' netcmd from client for pattern '%s' " @@ -3058,8 +3031,6 @@ protocol_role role; string pattern, signature; id client, nonce1, nonce2; - if (cmd.get_version() < protocol_version) - protocol_version = cmd.get_version(); rsa_oaep_sha_data hmac_key_encrypted; cmd.read_auth_cmd(role, pattern, client, nonce1, hmac_key_encrypted, signature); @@ -3677,21 +3648,6 @@ ticker certs_ticker("certs", "c", 256); ticker keys_ticker("keys", "k", 1); - // this code is wrong. the way the logic _should_ work is: - // -- start with all branches we want to include - // -- for each such branch, find all branch certs for that branch - // -- for each such cert, note down its revision - // (or these two steps can be replaced with anything else that gives us - // list of all revisions in the branch) - // -- expand this set of revisions to include all of their ancestors - // -- for each such revision, insert all of its certs into the cert table, - // and note all of its branches and keys - // -- for each such branch, insert its epoch into the epoch table, and for - // each such key, insert its key into the key table. - // this somewhat convoluted approach is necessary to handle cases where - // ancestors leave the branch inclusion set, where revisions carry branches - // that are otherwise outside of the inclusion set, etc. - set revision_ids; set inserted_keys; @@ -3710,6 +3666,9 @@ } } + // FIXME: we should probably include epochs for all branches mentioned in + // any included branch cert, rather than just for branches included by the + // branch mask { map epochs; app.db.get_epochs(epochs); --- std_hooks.lua +++ std_hooks.lua @@ -25,7 +25,7 @@ -- bit, ACLs, various special flags) which we want to have set and -- re-set any time the files are modified. the attributes themselves -- are stored in a file .mt-attrs, in the working copy (and --- manifest). each (f,k,v) triple in an atribute file turns into a +-- manifest). each (f,k,v) triple in an attribute file turns into a -- call to attr_functions[k](f,v) in lua. if (attr_init_functions == nil) then @@ -41,11 +41,19 @@ end end +attr_init_functions["manual_merge"] = + function(filename) + if (binary_file(filename)) then + return "true" -- binary files must merged manually + else + return nil + end + end + if (attr_functions == nil) then attr_functions = {} end - attr_functions["execute"] = function(filename, value) if (value == "true") then @@ -78,6 +86,11 @@ if (string.find(name, "/autom4te.cache/")) then return true end if (string.find(name, "^.deps/")) then return true end if (string.find(name, "/.deps/")) then return true end + -- Cons/SCons detritus: + if (string.find(name, "^.consign$")) then return true end + if (string.find(name, "/.consign$")) then return true end + if (string.find(name, "^.sconsign$")) then return true end + if (string.find(name, "/.sconsign$")) then return true end -- other VCSes: if (string.find(name, "^CVS/")) then return true end if (string.find(name, "/CVS/")) then return true end @@ -91,6 +104,32 @@ return false; end +-- return true means "binary", false means "text", +-- nil means "unknown, try to guess" +function binary_file(name) + local lowname=string.lower(name) + -- some known binaries, return true + if (string.find(lowname, "%.gif$")) then return true end + if (string.find(lowname, "%.jpe?g$")) then return true end + if (string.find(lowname, "%.png$")) then return true end + if (string.find(lowname, "%.bz2$")) then return true end + if (string.find(lowname, "%.gz$")) then return true end + if (string.find(lowname, "%.zip$")) then return true end + -- some known text, return false + if (string.find(lowname, "%.cc?$")) then return false end + if (string.find(lowname, "%.cxx$")) then return false end + if (string.find(lowname, "%.hh?$")) then return false end + if (string.find(lowname, "%.hxx$")) then return false end + if (string.find(lowname, "%.lua$")) then return false end + if (string.find(lowname, "%.texi$")) then return false end + if (string.find(lowname, "%.sql$")) then return false end + -- unknown - read file and use the guess-binary + -- monotone built-in function + filedata=read_contents_of_file(name, "rb") + if (filedata ~= nil) then return guess_binary(filedata) end + -- still unknown (file empty or unreadable) - report it as nil + return nil +end function edit_comment(basetext, user_log_message) local exe = "vi" @@ -281,8 +320,8 @@ return filename end -function read_contents_of_file(filename) - tmp = io.open(filename, "r") +function read_contents_of_file(filename, mode) + tmp = io.open(filename, mode) if (tmp == nil) then return nil end @@ -304,7 +343,8 @@ local rfile = tbl.rfile local outfile = tbl.outfile - local editor = string.lower(os.getenv("EDITOR")) + local editor = os.getenv("EDITOR") + if editor ~= nil then editor = string.lower(editor) else editor = "" end if program_exists_in_path("kdiff3") then @@ -362,9 +402,9 @@ cmd () if tbl.meld_exists then - ret = read_contents_of_file (tbl.lfile) + ret = read_contents_of_file (tbl.lfile, "r") else - ret = read_contents_of_file (tbl.outfile) + ret = read_contents_of_file (tbl.outfile, "r") end if string.len (ret) == 0 then @@ -393,7 +433,8 @@ local rfile = tbl.rfile local outfile = tbl.outfile - local editor = string.lower(os.getenv("EDITOR")) + local editor = os.getenv("EDITOR") + if editor ~= nil then editor = string.lower(editor) else editor = "" end if program_exists_in_path("kdiff3") then cmd = merge3_kdiff3_cmd (left_path, anc_path, right_path, merged_path, lfile, afile, rfile, outfile) @@ -453,9 +494,9 @@ cmd () if tbl.meld_exists then - ret = read_contents_of_file (tbl.afile) + ret = read_contents_of_file (tbl.afile, "r") else - ret = read_contents_of_file (tbl.outfile) + ret = read_contents_of_file (tbl.outfile, "r") end if string.len (ret) == 0 then --- tests/t_cat_file_by_name.at +++ tests/t_cat_file_by_name.at @@ -7,10 +7,14 @@ ]) AT_DATA(r1testfile, [r1 test file ]) +AT_DATA(subfile, [data in subfile +]) AT_CHECK(cp r0testfile testfile) AT_CHECK(cp r0otherfile otherfile) -AT_CHECK(MONOTONE add testfile otherfile, [], [ignore], [ignore]) +AT_CHECK(mkdir subdir) +AT_CHECK(cp subfile subdir/testfile) +AT_CHECK(MONOTONE add testfile otherfile subdir/testfile, [], [ignore], [ignore]) COMMIT(testbranch) R0=`BASE_REVISION` @@ -30,6 +34,12 @@ AT_CHECK(CANONICALISE(stdout)) AT_CHECK(cmp stdout r1testfile, [], [ignore]) +CHECK_SAME_CANONICALISED_STDOUT(cd subdir && MONOTONE cat file $R0 testfile, cat subfile) + +AT_CHECK(rm -rf MT) + +CHECK_SAME_CANONICALISED_STDOUT(MONOTONE cat file $R0 testfile, cat r0testfile) + AT_CHECK(MONOTONE cat file $R0 no_such_file, [1], [ignore], [ignore]) AT_CHECK(MONOTONE cat file $R0 "", [1], [ignore], [ignore]) --- tests/t_lua_includedir.at +++ tests/t_lua_includedir.at @@ -0,0 +1,44 @@ +AT_SETUP([include() and includedir() lua functions]) +MONOTONE_SETUP + +AT_CHECK(mkdir gongolo) + +AT_DATA(include.lua, [include("../gongolo/aaa.rc") +]) + +AT_DATA(includedir.lua, [includedir("../gongolo") +]) + +# write two files and check that they will be invoked in alphabetic order +AT_DATA(gongolo/aaa.rc, [function paraponzi() + io.write("BOOGA BOOGA") +end +paraponzi() +]) +AT_DATA(gongolo/bbb.zz, [function labellagigogin() + io.write("CICCA CICCA") +end +labellagigogin() +]) + +# setup a wrk dir +AT_CHECK(MONOTONE setup alt_wrk, [], [ignore], [ignore]) + +# include directly a single file +AT_CHECK(cd alt_wrk && MONOTONE --root=. --rcfile=../include.lua status, [], [stdout], [ignore]) +AT_CHECK(grep -q "BOOGA BOOGA" stdout) + +# include dirs +AT_CHECK(cd alt_wrk && MONOTONE --root=. --rcfile=../includedir.lua status, [], [stdout], [ignore]) +AT_CHECK(grep -q "BOOGA BOOGACICCA CICCA" stdout) + +# write a third file: should be read beetween the two previous ones +AT_DATA(gongolo/aba.rc, [function notwowithoutthree() + io.write("hu hu") +end +notwowithoutthree() +]) +AT_CHECK(cd alt_wrk && MONOTONE --root=. --rcfile=../includedir.lua status, [], [stdout], [ignore]) +AT_CHECK(grep -q "BOOGA BOOGAhu huCICCA CICCA" stdout) + +AT_CLEANUP --- tests/t_merge_manual.at +++ tests/t_merge_manual.at @@ -0,0 +1,308 @@ +AT_SETUP([merge manual file]) +MONOTONE_SETUP + +NEED_UNB64 + +# This was a real merge error. A binary file happily merged by monotone +# just because contains some strategically placed line feeds +# now is a test for the new attribute merge_manual and its effect on merging + +AT_DATA(parent.bmp.b64, [Qk1mdQAAAAAAADYAAAAoAAAAZAAAAGQAAAABABgAAAAAADB1AADrCgAA6woAAAAAAAAAAAAAQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQApC +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQApC2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsyChsy2xsy2xsy2xsy2xsy2xsy2xsy2xsyGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxsKCgoK +]) + +AT_DATA(left.bmp.b64, [Qk1mdQAAAAAAADYAAAAoAAAAZAAAAGQAAAABABgAAAAAADB1AADrCgAA6woAAAAAAAAAAAAAQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9C +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQApC +QJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQJ9CQApC2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsyChsy2xsy2xsy2xsy2xsy2xsy2xsy2xsypzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3pzq3 +pzq3pzq3pzq3GxsKCgoK +]) + +AT_DATA(right.bmp.b64, [Qk1mdQAAAAAAADYAAAAoAAAAZAAAAGQAAAABABgAAAAAADB1AADrCgAA6woAAAAAAAAAAAAAOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrt +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtQApC +OtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtOtrtQApC2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy2xsy +2xsy2xsy2xsy2xsy2xsy2xsyChsy2xsy2xsy2xsy2xsy2xsy2xsy2xsyGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvbGxvb +GxvbGxvbGxvbGxsKCgoK +]) + +UNB64(parent.bmp.b64, parent.bmp) +UNB64(left.bmp.b64, left.bmp) +UNB64(right.bmp.b64, right.bmp) + +# hook forces all files binary +AT_DATA(binary.lua, [if (attr_init_functions == nil) then attr_init_functions = {} end +attr_init_functions[["manual_merge"]] = function(filename) return "true" end +]) + +# hook forces all files text +AT_DATA(text.lua, [if (attr_init_functions == nil) then attr_init_functions = {} end +attr_init_functions[["manual_merge"]] = function(filename) return "false" end +]) + +# --- first: auto add as binary +AT_CHECK(cp -f parent.bmp binary.bmp) +AT_CHECK(MONOTONE --rcfile=binary.lua add binary.bmp, [], [ignore], [ignore]) +COMMIT(binbranch) +PARENT_SHA=`BASE_REVISION` + +AT_CHECK(MONOTONE attr get binary.bmp manual_merge, [], [ignore], [ignore]) + +AT_CHECK(cp -f left.bmp binary.bmp) +COMMIT(binbranch) + +REVERT_TO($PARENT_SHA) + +AT_CHECK(cp -f right.bmp binary.bmp) +COMMIT(binbranch) + +# file marked binary: merge should fail +AT_CHECK(MONOTONE --branch=binbranch merge, [1], [ignore], [ignore]) + +# --- second: auto add as text +AT_CHECK(cp -f parent.bmp text.bmp) +AT_CHECK(MONOTONE --rcfile=text.lua add text.bmp, [], [ignore], [ignore]) +COMMIT(textbranch) +PARENT_SHA=`BASE_REVISION` + +AT_CHECK(cp -f left.bmp text.bmp) +COMMIT(textbranch) + +REVERT_TO($PARENT_SHA) + +AT_CHECK(cp -f right.bmp text.bmp) +COMMIT(textbranch) + +# file marked text: merge should work! +AT_CHECK(MONOTONE --branch=textbranch merge, [0], [ignore], [ignore]) + +# --- third: manually make filename as binary +AT_CHECK(cp -f parent.bmp forcebin.bmp) +AT_CHECK(MONOTONE --rcfile=text.lua add forcebin.bmp, [], [ignore], [ignore]) +COMMIT(forcebinbranch) +PARENT_SHA=`BASE_REVISION` + +AT_CHECK(cp -f left.bmp forcebin.bmp) +COMMIT(forcebinbranch) + +REVERT_TO($PARENT_SHA) + +AT_CHECK(cp -f right.bmp forcebin.bmp) + +# set bin +AT_CHECK(MONOTONE attr set forcebin.bmp manual_merge true, [], [ignore], [ignore]) +COMMIT(forcebinbranch) + +# file marked binary: merge should fail +AT_CHECK(MONOTONE --branch=forcebinbranch merge, [1], [ignore], [ignore]) + +# --- fourth: automatically make filename as binary, then force text +AT_CHECK(cp -f parent.bmp forcetext.bmp) +AT_CHECK(MONOTONE --rcfile=binary.lua add forcetext.bmp, [], [ignore], [ignore]) +AT_CHECK(MONOTONE attr set forcetext.bmp manual_merge false, [], [ignore], [ignore]) +COMMIT(forcetextbranch) +PARENT_SHA=`BASE_REVISION` + +AT_CHECK(cp -f left.bmp forcetext.bmp) +COMMIT(forcetextbranch) + +REVERT_TO($PARENT_SHA) + +AT_CHECK(cp -f right.bmp forcetext.bmp) +COMMIT(forcetextbranch) + +# file marked text: merge should work +AT_CHECK(MONOTONE --branch=forcetextbranch merge, [], [ignore], [ignore]) + +AT_CLEANUP --- tests/t_netsync_diffbranch.at +++ tests/t_netsync_diffbranch.at @@ -1,4 +1,4 @@ -AT_SETUP([(normal) pull a netsync branch which has a parent from another branch]) +AT_SETUP([pull a netsync branch which has a parent from another branch]) AT_KEYWORDS([netsync]) --- tests/t_rcfile_dir.at +++ tests/t_rcfile_dir.at @@ -0,0 +1,32 @@ +AT_SETUP([--rcfile=directory]) +MONOTONE_SETUP + +AT_CHECK(mkdir gongolo) + +# write two files and check that they will be invoked in alphabetic order +AT_DATA(gongolo/aaa.rc, [function paraponzi() + io.write("BOOGA BOOGA") +end +paraponzi() +]) +AT_DATA(gongolo/bbb.rc, [function labellagigogin() + io.write("CICCA CICCA") +end +labellagigogin() +]) + +# note: rcfile is placed outside workdir +AT_CHECK(MONOTONE setup alt_wrk, [], [ignore], [ignore]) +AT_CHECK(cd alt_wrk && MONOTONE --root=. --rcfile=../gongolo status, [], [stdout], [ignore]) +AT_CHECK(grep -q "BOOGA BOOGACICCA CICCA" stdout) + +# write a third file: should be read beetween the two previous ones +AT_DATA(gongolo/aba.rc, [function notwowithoutthree() + io.write("hu hu") +end +notwowithoutthree() +]) +AT_CHECK(cd alt_wrk && MONOTONE --root=. --rcfile=../gongolo status, [], [stdout], [ignore]) +AT_CHECK(grep -q "BOOGA BOOGAhu huCICCA CICCA" stdout) + +AT_CLEANUP --- tests/t_restrictions.at +++ tests/t_restrictions.at @@ -93,6 +93,12 @@ AT_DATA(work/file3, [version 2 of file3 with some changes ]) +AT_DATA(work/A/fileA, [version 2 of fileA with some changes +]) + +AT_DATA(work/A/B/fileAB, [version 2 of fileAB with some changes +]) + AT_CHECK(MONOTONE rename work/fileX work/file1, [], [ignore], [ignore]) AT_CHECK(MONOTONE drop work/file2, [], [ignore], [ignore]) AT_CHECK(MONOTONE add work/file4, [], [ignore], [ignore]) @@ -158,6 +164,18 @@ AT_CHECK(MONOTONE diff, [], [stdout], [ignore]) AT_CHECK(INCLUDED(X 1 2 3 4), [0], [ignore]) +AT_CHECK(MONOTONE diff --depth=0 . , [], [stdout], [ignore]) +AT_CHECK(grep fileAB stdout, [1], [ignore]) + +AT_CHECK(MONOTONE diff --depth=2 . , [], [stdout], [ignore]) +AT_CHECK(grep fileA stdout, [0], [ignore]) + +AT_CHECK(MONOTONE cdiff --depth=0 . , [], [stdout], [ignore]) +AT_CHECK(grep fileAB stdout, [1], [ignore]) + +AT_CHECK(MONOTONE cdiff --depth=2 . , [], [stdout], [ignore]) +AT_CHECK(grep fileA stdout, [0], [ignore]) + # include both source and target of rename AT_CHECK(MONOTONE diff work/fileX work/file1, [], [stdout], [ignore]) --- tests/t_revert_restrict.at +++ tests/t_revert_restrict.at @@ -0,0 +1,58 @@ +# -*- Autoconf -*- + +AT_SETUP([revert works with restrictions]) + +MONOTONE_SETUP + +AT_DATA(origfile, [some file +]) +AT_DATA(orig.ignore, [a file type that is usually ignored +]) +AT_DATA(orig2, [another file +]) +AT_DATA(modified1, [this is different 1 +]) +AT_DATA(modified2, [this is different 2 +]) +AT_DATA(modified3, [this is different 3 +]) + +AT_DATA(ignore_hook.lua, [ +function ignore_file(name) + if (string.find(name, "test_hooks.lua")) then return true end + if (string.find(name, "test.db")) then return true end + if (string.find(name, "%.ignore$")) then return true end + return false +end +]) + +AT_CHECK(cp origfile testfile) +AT_CHECK(cp orig.ignore file.ignore) +AT_CHECK(cp orig2 file2) +AT_CHECK(MONOTONE add testfile file.ignore file2, [], [ignore], [ignore]) +AT_CHECK(MONOTONE --branch=testbranch commit --message='blah blah', [], [ignore], [ignore]) + +# modify the files, then revert the 'ignored' file +AT_CHECK(cp modified1 testfile) +AT_CHECK(cp modified2 file.ignore) + +AT_CHECK(MONOTONE --rcfile=ignore_hook.lua revert file.ignore, [], [ignore], [ignore]) + +# check that only the 'ignored' file was reverted +AT_CHECK(cmp testfile modified1, [0], [ignore], [ignore]) +AT_CHECK(cmp file.ignore orig.ignore, [0], [ignore], [ignore]) + +# now run it again with two paths, one in the ignorehook list, the other normal +AT_CHECK(MONOTONE revert, [], [ignore], [ignore]) +AT_CHECK(cp modified1 testfile) +AT_CHECK(cp modified2 file.ignore) +AT_CHECK(cp modified3 file2) + +AT_CHECK(MONOTONE --rcfile=ignore_hook.lua revert file.ignore testfile, [], [ignore], [ignore]) + +# check that the files are correct +AT_CHECK(cmp testfile origfile, [0], [ignore], [ignore]) +AT_CHECK(cmp file.ignore orig.ignore, [0], [ignore], [ignore]) +AT_CHECK(cmp file2 modified3, [0], [ignore], [ignore]) + +AT_CLEANUP --- tests/t_status.at +++ tests/t_status.at @@ -0,0 +1,63 @@ +AT_SETUP([status with missing files]) +MONOTONE_SETUP + +# patch existing file +# add new file (with patch) +# rename existing file +# rename and patch existing file +# drop existing file + +# again with --brief + +ADD_FILE(from, [from +]) +ADD_FILE(from_patched, [from_patched +]) +ADD_FILE(patched, [patched +]) +ADD_FILE(dropped, [dropped +]) + +COMMIT(testbranch) + +ADD_FILE(added, [added +]) + +AT_DATA(from_patched, [from_patched +patched +]) +AT_DATA(patched, [patched +patched +]) + +AT_CHECK(MONOTONE drop dropped, [], [ignore], [ignore]) + +AT_CHECK(mv from to, [], [ignore], [ignore]) +AT_CHECK(mv from_patched to_patched, [], [ignore], [ignore]) + +AT_CHECK(MONOTONE rename from to, [], [ignore], [ignore]) +AT_CHECK(MONOTONE rename from_patched to_patched, [], [ignore], [ignore]) + +AT_CHECK(MONOTONE status, [], [stdout], [ignore]) +AT_CHECK(grep '^delete_file "dropped"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^rename_file "from"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^ to "to"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^rename_file "from_patched"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^ to "to_patched"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^add_file "added"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^patch "added"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^patch "patched"' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^patch "to_patched"' stdout, [], [ignore], [ignore]) + +AT_CHECK(MONOTONE status --brief, [], [stdout], [ignore]) +AT_CHECK(grep '^dropped dropped' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^renamed from' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^ to to' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^renamed from_patched' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^ to to_patched' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^added added' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^patched added' stdout, [1], [ignore], [ignore]) +AT_CHECK(grep '^patched patched' stdout, [], [ignore], [ignore]) +AT_CHECK(grep '^patched to_patched' stdout, [], [ignore], [ignore]) + +AT_CLEANUP --- testsuite.at +++ testsuite.at @@ -650,10 +650,14 @@ m4_include(tests/t_annotate_split_line.at) m4_include(tests/t_automate_certs.at) m4_include(tests/t_selector_later_earlier.at) -m4_include(tests/t_merge_binary.at) m4_include(tests/t_automate_stdio.at) m4_include(tests/t_cvsimport_drepper.at) m4_include(tests/t_update_with_pending_drop.at) m4_include(tests/t_update_with_pending_add.at) m4_include(tests/t_update_with_pending_rename.at) m4_include(tests/t_restricted_commit_with_inodeprints.at) +m4_include(tests/t_merge_manual.at) +m4_include(tests/t_revert_restrict.at) +m4_include(tests/t_status.at) +m4_include(tests/t_rcfile_dir.at) +m4_include(tests/t_lua_includedir.at) --- ui.cc +++ ui.cc @@ -120,6 +120,8 @@ div = 1024; suffix = "k"; } + // we reset the mod to the divider, to avoid spurious screen updates + i->second->mod = div / 10; count = (F("%.1f%s") % (i->second->ticks / div) % suffix).str(); } else --- work.cc +++ work.cc @@ -674,6 +674,8 @@ string const binary_encoding("binary"); string const default_encoding("default"); +string const manual_merge_attribute("manual_merge"); + static bool find_in_attr_map(attr_map const & attr, file_path const & file, std::string const & attr_key, --- work.hh +++ work.hh @@ -165,6 +165,7 @@ attr_map const & options); extern std::string const encoding_attribute; +extern std::string const manual_merge_attribute; bool get_attribute_from_db(file_path const & file, std::string const & attr_key,