[Top][All Lists]
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[Monotone-commits-diffs] net.venge.monotone: bd6181bab38491272d3de42575
From: |
code |
Subject: |
[Monotone-commits-diffs] net.venge.monotone: bd6181bab38491272d3de42575135ad4dd397bc7 |
Date: |
Fri, 18 Feb 2011 20:54:32 +0100 (CET) |
revision: bd6181bab38491272d3de42575135ad4dd397bc7
date: 2011-02-13T10:13:58
author: Richard Levitte <address@hidden>
branch: net.venge.monotone
changelog:
merge of '037931a33262224b5b49151c440ca798f53acff3'
and '5feb494081c224bd4e918a4a31c16060538b8311'
manifest:
format_version "1"
new_manifest [292983b693db9de2fc5e80f3d73af37af6ab8dd9]
old_revision [037931a33262224b5b49151c440ca798f53acff3]
delete "contrib/monotone.bash_completion"
rename "contrib/mtn-cleanup"
to "extra/bin/mtn-cleanup"
add_dir "extra/bin"
add_dir "test/extra/mtn-cleanup"
add_file "test/extra/mtn-cleanup/__driver__.lua"
content [71bb16f84c2b0baf423ac5f04185b11c848642de]
add_file "test/extra/mtn-cleanup/run-mtn-cleanup"
content [7cbf07edf271a2491e51fefb8df88c5bdbf0ee2d]
patch "Makefile.am"
from [3b41341b6a9e7ddf4e362b16f62032cb24b72ea1]
to [93da0951125022729fac43c86ee0db2f2502b835]
patch "test/extra/bash_completion/__driver__.lua"
from [d73c7818279d25d09614bc078061c37d9db1ea4e]
to [8c059916005d8e8a88a439b5df05e7e5f257cda9]
set "test/extra/mtn-cleanup/run-mtn-cleanup"
attr "mtn:execute"
value "true"
old_revision [5feb494081c224bd4e918a4a31c16060538b8311]
patch "src/lcs.cc"
from [fb4049733d44bc08b2961d9b1dda76084db4adad]
to [a7b9e3b57dea4f4648ca2dd3de5cd301ec36802e]
============================================================
--- Makefile.am 3b41341b6a9e7ddf4e362b16f62032cb24b72ea1
+++ Makefile.am 93da0951125022729fac43c86ee0db2f2502b835
@@ -222,6 +222,7 @@ bin_SCRIPTS = mtnopt
EXTRA_PROGRAMS =
bin_SCRIPTS = mtnopt
+dist_bin_SCRIPTS = extra/bin/mtn-cleanup
noinst_SCRIPTS = extra/building/dump-test-logs.sh
mtn_SOURCES = $(MOST_SOURCES) src/monotone.cc
@@ -324,6 +325,14 @@ endif
# extra files
+contribdir = $(docdir)/contrib
+contrib_prefix = contrib
+contrib_data = $(shell cd $(srcdir); find $(contrib_prefix) | sed -e 's,^\./,,')
+
+examplesdir = $(docdir)/examples
+examples_prefix = examples
+examples_data = $(shell cd $(srcdir); find $(examples_prefix) | sed -e 's,^\./,,')
+
EXTRA_DIST = \
HACKING INSTALL INSTALL_windows_cygwin.txt \
INSTALL_windows_native.txt README.visualc8 UPGRADE \
@@ -346,7 +355,7 @@ EXTRA_DIST = \
\
src/package_revision.txt src/package_full_revision_dist.txt \
\
- extra contrib examples mac notes visualc
+ mac notes visualc $(contrib_data) $(examples_data) extra
# Message translation support
# INST_LINGUAS is set by configure, based on the po/LINGUAS file and
@@ -366,27 +375,43 @@ EXTRA_DIST += $(ALL_POFILES) po/LINGUAS
EXTRA_DIST += $(ALL_POFILES) po/LINGUAS po/README
+.PHONY: all-nls install-nls uninstall-nls
if USE_NLS
# for installation
GMOFILES := $(addprefix po/,$(addsuffix .gmo, $(INST_LINGUAS)))
-all-local: $(GMOFILES)
+all-nls: $(GMOFILES)
-install-data-local: all-local
- @linguas='$(INST_LINGUAS)'; set -ex; for lang in $$linguas; do \
+install-nls: all-nls
+ @linguas='$(INST_LINGUAS)'; set -e; for lang in $$linguas; do \
dir=$(DESTDIR)$(localedir)/$$lang/LC_MESSAGES; \
+ echo " $(MKDIR_P) '$$dir'"; \
$(MKDIR_P) $$dir; \
+ echo " $(INSTALL_DATA) po/$$lang.gmo '$$dir/$(PACKAGE).mo'"; \
$(INSTALL_DATA) po/$$lang.gmo $$dir/$(PACKAGE).mo; \
done
-uninstall-local:
- @linguas='$(INST_LINGUAS)'; set -ex; for lang in $$linguas; do \
+uninstall-nls:
+ @linguas='$(INST_LINGUAS)'; set -e; for lang in $$linguas; do \
+ echo " rm -f '$(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(PACKAGE).mo'"; \
rm -f $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(PACKAGE).mo; \
+ echo " rmdir '$(DESTDIR)$(localedir)/$$lang/LC_MESSAGES'; \
rmdir $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES || :; \
+ echo " rmdir '$(DESTDIR)$(localedir)/$$lang'; \
rmdir $(DESTDIR)$(localedir)/$$lang || :; \
done
+else
+# We need to provide these targets because all-local, install-data-local
+# and uninstall-local expects them as dependencies, further down.
+all-nls:
+ @:
+install-nls:
+ @:
+uninstall-nls:
+ @:
endif
+.PHONY: dist-nls
if REBUILD_NLS
po/%.gmo: po/%.merged.po
$(V_msgfmt)$(MSGFMT) -c --statistics -o $@ $<
@@ -415,7 +440,7 @@ po/$(PACKAGE).pot: $(POTFILES)
$(XGETTEXT_PKG_OPTS) $(XGETTEXT_OPTS) \
$(POTFILES)
-dist-hook: $(ALL_GMOFILES)
+dist-nls: $(ALL_GMOFILES)
cp $(ALL_GMOFILES) $(distdir)/po
# also, kill off any backup files that got pulled in by one of the
# recursive subdirectory includes in EXTRA_DIST
@@ -428,11 +453,54 @@ po/%.gmo: $(srcdir)/po/%.gmo
po/%.gmo: $(srcdir)/po/%.gmo
cp $< $@
-dist-hook:
+dist-nls:
@echo "*** Cannot 'make dist' without xgettext" >&2; exit 1
endif
+_install_data = \
+ list='$(1)'; test -n "$(3)" || list=; \
+ for p in $$list; do \
+ if test -f "$$p" || test -d "$$p"; then d=; else d="$(srcdir)/"; fi; \
+ src=""; \
+ dest="`if [ "$$p" != "$(2)" ]; then echo $$p | sed -e 's,$(2)/,,'; fi`"; \
+ if test -d "$$src"; then \
+ echo " $(MKDIR_P) '$(DESTDIR)$(3)/$$dest'"; \
+ $(MKDIR_P) "$(DESTDIR)$(3)/$$dest" || exit $$?; \
+ else \
+ echo " $(INSTALL_DATA) $$src '$(DESTDIR)$(3)/$$dest'"; \
+ $(INSTALL_DATA) $$src "$(DESTDIR)$(3)/$$dest" || exit $$?; \
+ fi; \
+ done
+_uninstall_data = \
+ list='$(1)'; test -n "$(3)" || list=; \
+ dirs_rev=; \
+ for p in $$list; do \
+ to_rm="`if [ "$$p" != "$(2)" ]; then echo $$p | sed -e 's,$(2)/,,'; fi`"; \
+ if [ -n "$$to_rm" ]; then \
+ if test -d "$(DESTDIR)$(3)/$$to_rm"; then \
+ dirs_rev="echo '$$to_rm';$${dirs_rev}"; \
+ else \
+ echo " rm -f '$(DESTDIR)$(3)/$$to_rm'"; \
+ rm -f $$src "$(DESTDIR)$(3)/$$to_rm" || exit $$?; \
+ fi; \
+ fi; \
+ done; \
+ eval "$$dirs_rev" | while read to_rmdir; do \
+ echo " rmdir '$(DESTDIR)$(3)/$$to_rmdir'"; \
+ rmdir "$(DESTDIR)$(3)/$$to_rmdir" || exit $$?; \
+ done
+
+install-contrib-data: $(contrib_data)
+ @$(call _install_data,$(contrib_data),$(contrib_prefix),$(contribdir))
+uninstall-contrib-data:
+ @$(call _uninstall_data,$(contrib_data),$(contrib_prefix),$(contribdir))
+
+install-examples-data: $(examples_data)
+ @$(call _install_data,$(examples_data),$(examples_prefix),$(examplesdir))
+uninstall-examples-data:
+ @$(call _uninstall_data,$(examples_data),$(examples_prefix),$(examplesdir))
+
# build an OS X installer
# requires sudo since packagemaker just looks at directory permissions.
PKG_INST_ROOT = $(PWD)/$(top_builddir)/pkg_inst_root
@@ -492,10 +560,11 @@ linguas.iss: ${top_srcdir}/Makefile.am
# testsuite stuff (could this possibly be more ugly?) To get
# parallelism, we cannot use Automake's TESTS feature at all. The
-# shell script embedded in the 'check-local' rule is partially
+# shell script embedded in the 'check-testers' rule is partially
# borrowed from automake 1.9's check.am
-check-local: test/tester.status test/unit.status test/func.status test/extra.status
+.PHONY: check-testers
+check-testers: test/tester.status test/unit.status test/func.status test/extra.status
@all=0; failed=0; error=0; \
for f in $^; do \
all=`expr $$all + 1`; \
@@ -567,19 +636,21 @@ check_PROGRAMS = test/bin/unit_tester te
# We want the tests re-run even if the .status files already exist.
# .PHONY does not work for that (bad interaction with pattern rules),
# but the FORCE hack does.
-.PHONY: check-local FORCE
+.PHONY: FORCE
FORCE:
.PRECIOUS: run_tester_tests run_unit_tests run_func_tests run_extra_tests
# FIXME: if it annoys anybody, we could clean dvi, pdf and info files
# only in a separate maintainer-clean target as suggested by
# automake's man page
-mostlyclean-local:
+.PHONY: mostlyclean-tests
+mostlyclean-tests:
rm -rf test/bin test/work
rm -f run_*_tests test/*.status test/src/testlib.cc util/txt2c
- -set -x; \
- ls test/unit/tests/*/__driver__.lua 2>/dev/null | while read d; do \
+ @ls test/unit/tests/*/__driver__.lua 2>/dev/null | while read d; do \
+ echo " rm -f '$$d'"; \
rm -f $$d; \
+ echo " rmdir '$${d%/*}'"; \
rmdir $${d%/*} || :; \
done
@@ -725,8 +796,10 @@ distcleancheck_listfiles = find . -type
# line does is tell 'distcheck' to shut up and ignore those two files.
distcleancheck_listfiles = find . -type f -a ! -name package_revision.txt -a ! -name package_full_revision_dist.txt
-# the distcheck-hook checks for errors in the use of base.hh
-distcheck-hook:
+# the distcheck-base.hh checks for errors in the use of base.hh.
+# It's being used by distcheck-hook further down.
+.PHONY: distcheck-base.hh
+distcheck-base.hh:
cd $(srcdir) && $(SHELL) util/audit-includes \
$(sort $(mtn_SOURCES) $(test_unit_tester_SOURCES) $(test_bin_tester_SOURCES) \
$(UNIX_PLATFORM_SOURCES) $(WIN32_PLATFORM_SOURCES) \
@@ -734,6 +807,22 @@ man1_MANS = mtn.1
man1_MANS = mtn.1
+# All local variants of automake-supported targets here, depending on all
+# the things we want to do. This is to support multiple things to do in
+# each of them, as dependencies.
+all-local: all-nls
+install-data-local: install-nls \
+ install-contrib-data install-examples-data
+uninstall-local: uninstall-nls \
+ uninstall-contrib-data uninstall-examples-data
+mostlyclean-local: mostlyclean-tests
+check-local: check-testers
+# All hooks supported by automake here, depending on all the things we want
+# to hook in. This is to support multiple things into the same hook, as
+# dependencies.
+dist-hook: dist-nls
+distcheck-hook: distcheck-base.hh
+
mtn.1: mtn$(EXEEXT)
$(AM_V_GEN)REAL_BLDDIR=$$PWD/$(top_builddir); \
(cd $(srcdir) && $$REAL_BLDDIR/mtn manpage --norc) 2>/dev/null >$@ || rm -f $@
============================================================
--- contrib/monotone.bash_completion 7f13ba6199009a21bbbcdb3385ee9ac8f3ff220f
+++ /dev/null
@@ -1,308 +0,0 @@
-# -*- shell-script -*-
-# vim: set ft=sh sw=4 et:
-
-# bash completion for monotone 0.25
-# Author: Olivier Andrieu <address@hidden>
-# Contributions by Matthew A. Nicholson <address@hidden>
-# and Matthew Sackman <address@hidden>
-
-# source this file from your .bashrc
-# If you use the bash completion package <http://www.caliban.org/bash/>,
-# copy this file in the directory /etc/bash_completion.d for a
-# system-wide install
-
-
-# The function _filedir is defined in /etc/bash_completion.
-# This is a weaker version, for those who do not have the
-# bash completion package installed.
-if ! type _filedir >& /dev/null ; then
-_filedir() {
- local IFS=$'\t\n' arg
- COMPREPLY=( address@hidden:-} $(compgen ${1:--f} -- $cur) )
-}
-fi
-
-_monotone_previous_RETURN=`trap -p RETURN`
-_monotone_previous_SIGHUP=`trap -p SIGHUP`
-_monotone_previous_SIGINT=`trap -p SIGINT`
-_monotone_previous_SIGPIPE=`trap -p SIGPIPE`
-_monotone_previous_SIGTERM=`trap -p SIGTERM`
-if shopt -q extglob; then :; else
- shopt -s extglob
- trap "shopt -u extglob; ${_monotone_previous_RETURN:-trap - RETURN}; ${_monotone_previous_SIGHUP:-trap - SIGHUP}; ${_monotone_previous_SIGINT:-trap - SIGINT}; ${_monotone_previous_SIGPIPE:-trap - SIGPIPE}; ${_monotone_previous_SIGTERM:-trap - SIGTERM}" RETURN SIGHUP SIGINT SIGPIPE SIGTERM
-fi
-
-# Call monotone to complete IDs
-_monotone_complete() {
- if (( "${#cur}" >=2 )) ; then
- COMPREPLY=( address@hidden:-} $(mtn $mono_db complete $1 $cur 2> /dev/null) )
- fi
-}
-
-# Call monotone to complete key ids (private or public)
-_monotone_keys() {
- local range
- if [ "$1" == "privkey" ]; then
- range='/\[private/,$'
- else
- range='1,/\[private/'
- fi
- COMPREPLY=( $(compgen -W "$(mtn $mono_db list keys 2> /dev/null |\
- sed -n ${range}'{/^[0-9a-f]/s/[0-9a-f]* //p}')" -- ${cur#*=} ) )
-}
-
-_monotone_branches() {
- COMPREPLY=( $(compgen -W "$(mtn $mono_db list branches 2> /dev/null)" -- ${cur#*=} ) )
-}
-
-_monotone_tags() {
- COMPREPLY=( $(compgen -W "$(mtn $mono_db list tags 2> /dev/null | awk '{print $1}')" -- ${cur#*=} ) )
-}
-
-_monotone() {
- local cur prev mono_db
-
- for w in address@hidden ; do
- if [[ "$w" == --db=* ]] ; then
- mono_db="$w" ; break
- fi
- done
- if [ -z "$mono_db" ] ; then
- for i in address@hidden ; do
- [ $i -eq 0 ] && continue
- prev="${COMP_WORDS[$i-1]}"
- if [ "$prev" == --db -o "$prev" == -d ] ; then
- mono_db="--db=${COMP_WORDS[$i]}" ; break
- fi
- done
- fi
-
- cur=${COMP_WORDS[COMP_CWORD]}
- prev=${COMP_WORDS[COMP_CWORD-1]}
-
- case $cur in
- */a:!(*/[[:alpha:]]:*) | *=a:!(*/[[:alpha:]]:*) | a:!(*/[[:alpha:]]:*) )
- cur="${cur##*a:}"
- _monotone_keys pubkey
- ;;
- */b:!(*/[[:alpha:]]:*) | *=b:!(*/[[:alpha:]]:*) | b:!(*/[[:alpha:]]:*) )
- cur="${cur##*b:}"
- _monotone_branches
- ;;
- */h:!(*/[[:alpha:]]:*) | *=h:!(*/[[:alpha:]]:*) | h:!(*/[[:alpha:]]:*) )
- cur="${cur##*h:}"
- _monotone_branches
- ;;
- */i:!(*/[[:alpha:]]:*) | *=i:!(*/[[:alpha:]]:*) | i:!(*/[[:alpha:]]:*) )
- cur="${cur##*i:}"
- _monotone_complete revision
- ;;
- */t:!(*/[[:alpha:]]:*) | *=t:!(*/[[:alpha:]]:*) | t:!(*/[[:alpha:]]:*) )
- cur="${cur##*t:}"
- _monotone_tags
- ;;
- --db=* | --rcfile=* | --dump=* )
- cur="${cur#*=}"
- _filedir
- ;;
- --root=* )
- cur="${cur#*=}"
- _filedir -d
- ;;
- --branch=* )
- _monotone_branches
- ;;
- --key=* )
- _monotone_keys pubkey
- ;;
- --ticker=* )
- cur="${cur#*=}"
- COMPREPLY=( $(compgen -W 'count dot none' -- $cur ) )
- ;;
- --revision=* )
- cur="${cur#*=}"
- _monotone_complete revision
- ;;
- -* )
- COMPREPLY=( $(compgen -W '
- --brief
- --confdir
- --db -d
- --debug
- --dump
- --verbose
- --help -h
- --key -k
- --keydir
- --log
- --no-standard-rcfiles
- --no-builtin-rcfiles
- --pid-file
- --quiet
- --rcfile
- --rellyquiet
- --root
- --ticker
- --version
- --xargs -@
- --version
-
- --message -m
- --branch -b
- --revision -r
- --date
- --author
- --depth
- --execute -e
-
- --exclude
- --key-to-push
- --set-default
-
- --bind
- ' -- $cur) )
- ;;
- * )
- case "$prev" in
- --db | -d | --rcfile | --dump | --root )
- _filedir
- ;;
- --branch | -b )
- _monotone_branches
- ;;
- --key | -k )
- _monotone_keys pubkey
- ;;
- --ticker )
- COMPREPLY=( $(compgen -W 'count dot none' -- $cur ) )
- ;;
- --from | --to | --revision | -r )
- _monotone_complete revision
- ;;
- db )
- COMPREPLY=( $(compgen -W 'init info version dump load
- migrate execute
- kill_rev_locally kill_branch_certs_locally
- kill_tag_locally check changesetify rosterify
- regenerate_caches set_epoch' -- $cur ) )
- ;;
- diff )
- COMPREPLY=( $(compgen -W '--context --depth --diff-args
- --exclude --external --no-show-encloser --unified
- --revision -r' -- $cur ) )
- _filedir
- ;;
- annotate )
- COMPREPLY=( $(compgen -W '--brief --revision -r' -- $cur ) )
- _filedir
- ;;
- log )
- COMPREPLY=( $(compgen -W '--brief --diffs --from --last
- --next --no-files --no-graph --no-merges --to' -- $cur ) )
- _filedir
- ;;
- approve | disapprove | comment | suspend | tag | testresult | cert | explicit_merge | trusted | update )
- _monotone_complete revision
- ;;
- ls | list )
- COMPREPLY=( $(compgen -W 'branches certs changed epochs
- keys known missing tags ignored unknown vars' -- $cur ) )
- ;;
- attr )
- COMPREPLY=( $(compgen -W 'get set drop' -- $cur ) )
- ;;
- co | checkout )
- _filedir -d
- _monotone_complete revision
- ;;
- status | cvs_import | add | drop | rm | rename | mv | revert | identify )
- _filedir
- ;;
- complete )
- COMPREPLY=( $(compgen -W 'revision manifest file key' -- $cur) )
- ;;
- cat )
- _filedir
- ;;
- clone | push | pull | serve | sync )
- COMPREPLY=( $(compgen -A hostname -- $cur) )
- ;;
- pubkey | privkey )
- _monotone_keys "$prev"
- ;;
- passphrase | dropkey )
- _monotone_keys privkey
- ;;
- propagate | reindex )
- _monotone_branches
- ;;
- * )
- if (( $COMP_CWORD >= 2 )) ; then
- local prev2=${COMP_WORDS[COMP_CWORD-2]}
- case "$prev2" in
- diff | explicit_merge )
- _monotone_complete revision
- ;;
- co | checkout | rename | mv | annotate )
- _filedir
- ;;
- attr )
- _filedir
- ;;
- list )
- if [ "$prev" == certs ] ; then
- _monotone_complete revision
- _monotone_complete manifest
- _monotone_complete file
- fi
- ;;
- clone | push | pull | serve | sync | propagate )
- _monotone_branches
- ;;
- * )
- if (( $COMP_CWORD >= 3 )) ; then
- local prev3=${COMP_WORDS[COMP_CWORD-3]}
- case "$prev3" in
- explicit_merge )
- _monotone_complete revision
- _monotone_branches
- ;;
- *)
- unset prev2
- unset prev3
- _filedir
- esac
- else
- unset prev2
- _filedir
- fi
- ;;
- esac
- fi
- if (( $COMP_CWORD < 2 )) ; then
- COMPREPLY=( $(compgen -W '
- automate
- db
- fdiff fload fmerge get_roster identify rcs_import
- annotate cat complete diff help list log ls show_conflicts
- status
- cert passphrase dropkey genkey trusted
- clone pull push serve sync
- privkey pubkey read
- cvs_import
- approve comment disapprove suspend tag testresult
- checkout co explicit_merge heads merge merge_into_dir
- migrate_workspace propagate refresh_inodeprints setup
- set unset
- add attr ci commit drop mv pivot_root pluck rename revert
- rm update
- ' -- $cur) )
- fi
- ;;
- esac
- ;;
- esac
- return 0
-}
-
-complete -F _monotone -o filenames mtn
============================================================
--- test/extra/bash_completion/__driver__.lua d73c7818279d25d09614bc078061c37d9db1ea4e
+++ test/extra/bash_completion/__driver__.lua 8c059916005d8e8a88a439b5df05e7e5f257cda9
@@ -2,44 +2,7 @@ skip_if(qgrep("bash[, ]*version 3", "std
check({"bash", "--version"}, 0, true)
-- hashes/dictionaries/associative arrays are new in version 4
skip_if(qgrep("bash[, ]*version 3", "stdout"))
-mtn_setup()
-local tests = {
- ["complete_mtn_-"] = {
- ["prepare"] =
- function ()
- end,
- ["cleanup"] =
- function ()
- end
- },
- ["complete_propagate"] = {
- ["prepare"] =
- function ()
- addfile("prop-test", "foo")
- commit("prop-br1")
- addfile("prop-test2", "bar")
- commit("prop-bra2")
- check(mtn("update","-r","h:prop-br1"), 0, false, false)
- writefile("prop-test", "zoot")
- commit("prop-br1")
- end,
- ["cleanup"] =
- function ()
- end
- },
- ["complete_commit"] = {
- ["prepare"] =
- function ()
- addfile("commit-test1", "foo")
- addfile("commit-test2", "bar")
- end,
- ["cleanup"] =
- function ()
- end
- }
-}
-
function expect(test)
if monotone_path == nil then
monotone_path = os.getenv("mtn")
@@ -62,8 +25,25 @@ get("library.exp")
end
get("library.exp")
-for test,fns in pairs(tests) do
- fns.prepare()
- expect(test)
- fns.cleanup()
-end
+
+mtn_setup()
+
+-- complete_mtn_-
+expect("complete_mtn_-")
+
+-- complete_propagate
+addfile("prop-test", "foo")
+commit("prop-br1")
+addfile("prop-test2", "bar")
+commit("prop-bra2")
+check(mtn("update","-r","h:prop-br1"), 0, false, false)
+writefile("prop-test", "zoot")
+commit("prop-br1")
+
+expect("complete_propagate")
+
+-- complete_commit
+addfile("commit-test1", "foo")
+addfile("commit-test2", "bar")
+
+expect("complete_commit")
============================================================
--- /dev/null
+++ test/extra/mtn-cleanup/__driver__.lua 71bb16f84c2b0baf423ac5f04185b11c848642de
@@ -0,0 +1,22 @@
+mtn_setup()
+
+-- We do everything inside an inner workspace. mtn-cleanup IS a dangerous
+-- command, and will happily wipe away any database that's in the workspace
+check(mtn("setup", "--branch=testbranch", "workspace"), 0, false, false)
+writefile("workspace/test1", "foo")
+check(indir("workspace", mtn("add", "test1")), 0, false, false)
+check(indir("workspace", mtn("commit",
+ "--message", "blah-blah",
+ "--branch", "test1")),
+ 0, false, false)
+writefile("workspace/test1", "foobar")
+writefile("workspace/test2", "bar")
+check(indir("workspace", mtn("add", "test2")), 0, false, false)
+writefile("workspace/test3", "baz")
+
+check(indir("workspace",
+ {srcdir.."/extra/mtn-cleanup/run-mtn-cleanup",srcdir,test.root}),
+ 0, true, false)
+check(exists("workspace/test1"))
+xfail(exists("workspace/test2"))
+xfail(exists("workspace/test3"))
============================================================
--- /dev/null
+++ test/extra/mtn-cleanup/run-mtn-cleanup 7cbf07edf271a2491e51fefb8df88c5bdbf0ee2d
@@ -0,0 +1,7 @@
+#! /bin/sh
+
+# $1 {srcdir}/test
+# $2 {builddir}/test/work/extra/mtn-cleanup
+
+set -x
+yes | PATH=${2}/../../../..:${PATH} ${1}/../extra/bin/mtn-cleanup
============================================================
--- src/lcs.cc fb4049733d44bc08b2961d9b1dda76084db4adad
+++ src/lcs.cc a7b9e3b57dea4f4648ca2dd3de5cd301ec36802e
@@ -43,6 +43,14 @@
*/
+/*
+ This is now understood better. The comments below are all new,
+ most of the variable names that aren't one letter and don't
+ look like x86 registers are new, and there are some complexity
+ fixes so the recursing doesn't make it accidentally O(n^2) in
+ the input length.
+ */
+
#include "base.hh"
#include <algorithm>
#include "vector.hh"
@@ -59,6 +67,62 @@ using std::vector;
using std::sort;
using std::vector;
+/*
+ http://read.pudn.com/downloads131/sourcecode/delphi_control/558602/O(NP).pdf
+ An O(NP) Sequence Comparison Algorithm
+ Sun Wu, Udi Manber, Gene Myers; University of Arizona
+ Webb Miller; Pennsylvania State University
+ August 1989
+
+ The above paper shows how to find the edit distance between strings in time
+ at worst O(num-deletions * longer-length), and on average
+ O(longer-length + (edit-distance * num-deletions)).
+
+
+ Name the two input strings "a" and "b", "a" being the shorter one. Consider
+ and edit graph with a going down (x coordinate) and b going across (y coord).
+
+ stringBislonger
+ s\ \.
+ t \ .
+ r \ .
+ i \ \ .
+ n \ . \ .
+ g X .
+ A .
+
+ You start in the top left corner, and want to end up in the lower right
+ corner. There are 3 ways you can move: follow a diagonal for zero cost,
+ or move directly down or directly right for a cost of one. The total cost
+ of the cheapest path is the edit distance. A movement directly down
+ corresponds to a deletion, and a movement directly right corresponds to
+ an insertion.
+
+ If you had a diagonal from the top all the way to the bottom, the cost
+ would be the difference in the lengths of the input strings ("delta").
+ For every movement directly down you need to add exactly one movement
+ directly right, so the total cost D = delta + (2 * num-deletions).
+
+ Give each diagonal in the edit graph a number. The diagonal through the
+ origin is 0; diagonals above / right of it are numbered 1, 2, ...; diagonals
+ below / left of it are numbered -1, -2, ... . The diagonal through the lower
+ right corner will be number delta (difference of input lengths).
+
+ An edit path with a particular number of deletions cannot go below
+ diagonal -(num-deletions) or above diagonal delta + (num-deletions).
+ So we have bounding diagonals for any edit path up to a given number of
+ deletions and therefore up to a given length.
+
+
+ compare() with a large p_lim and full_scan = false implements this algorithm.
+
+ compare() with a given p_lim (maximum number of deletions) calculates
+ the lowest cost of a path through each relevant point along the bottom of
+ the edit graph.
+ */
+
+
+
struct work_vec
{
long lo;
@@ -126,8 +190,8 @@ struct jaffer_edit_calculator
};
static long run(work_vec & fp, long k,
- subarray<A> const & a, long m,
- subarray<B> const & b, long n,
+ subarray<A> const & a, long a_len,
+ subarray<B> const & b, long b_len,
cost_vec & CC, long p)
{
long cost = k + 2*p;
@@ -142,12 +206,12 @@ struct jaffer_edit_calculator
while (true)
{
// record costs along the way
- long xcst = m - x;
+ long xcst = a_len - x;
if (y < static_cast<long>(CC.size()) && xcst >= 0)
{
CC[y] = min(xcst + cost, CC[y]);
}
- if (x < m && y < n && a[x] == b[y])
+ if (x < a_len && y < b_len && a[x] == b[y])
{
++x; ++y;
}
@@ -199,6 +263,18 @@ struct jaffer_edit_calculator
return delta + 2*p;
}
+ // This splits the edit graph into a top half and a bottom half, calculates
+ // the (cost of the) cheapest possible path through each point along the
+ // middle, and then splits the graph into left/right portions based on that
+ // point. It then recurses on the top left and bottom right quadrants (the
+ // shortest edit path cannot possibly go through the other two quadrants).
+ //
+ // When getting costs through the top and bottom halves, it can discard the
+ // rightmost part of the top and the leftmost part of the bottom, beyond where
+ // the edit band (diagonls -(num-deletes) and delta + num-deletes) crosses
+ // the split. Even with doing this the edit band is overstated in the
+ // calls to compare(), because while max-possible-deletes (p_lim) is correct
+ // the delta value is still larger by max-possible-deletes.
static long divide_and_conquer(subarray<A> const & a, long start_a, long end_a,
subarray<B> const & b, long start_b, long end_b,
edit_vec & edits,
@@ -206,10 +282,13 @@ struct jaffer_edit_calculator
long polarity,
long p_lim)
{
- long mid_a = (start_a + end_a) / 2;
long len_b = end_b - start_b;
long len_a = end_a - start_a;
+ long const delta = len_b - len_a;
+ // total edit distance
long tcst = (2 * p_lim) + (len_b - len_a);
+ // top/bottom split point
+ long mid_a = (start_a + end_a) / 2;
I(start_a >= 0);
I(start_a <= a.size());
@@ -223,19 +302,35 @@ struct jaffer_edit_calculator
cost_vec cc(len_b + 1, len_a + len_b);
cost_vec rr(len_b + 1, len_a + len_b);
+ // get costs from the top left through each point on the top/bottom split
+ long const top_len_a = mid_a - start_a;
+ // trim off the rightmost part of b, past where the edit band crosses the split
+ long const top_end_b = min(end_b, start_b + (top_len_a + delta + p_lim + 1));
compare (cc,
- a.subset(start_a, mid_a), (mid_a - start_a),
- b.subset(start_b, end_b), len_b, min(p_lim, len_a));
+ a.subset(start_a, mid_a), top_len_a,
+ b.subset(start_b, top_end_b), top_end_b - start_b,
+ min(p_lim, len_a));
+ // get costs from the lower right through each point on the top/bottom split
+ long const bottom_len_a = end_a - mid_a;
+ // here we trim the leftmost part of b (before reversing it)
+ long const bottom_start_b = max(start_b, end_b - (bottom_len_a + delta + p_lim + 1));
compare (rr,
- a.subset(end_a, mid_a), (end_a - mid_a),
- b.subset(end_b, start_b), len_b, min(p_lim, len_a));
+ a.subset(end_a, mid_a), bottom_len_a,
+ b.subset(end_b, bottom_start_b), end_b - bottom_start_b,
+ min(p_lim, len_a));
+ // find the first (closest-to-center) point on the split line, which
+ // has the correct total (top + bottom) cost and is therefore on the
+ // shortest edit path
long b_split = mid_split(len_a, len_b, rr, cc, tcst);
+ // known costs of each half of the path
long est_c = cc[b_split];
long est_r = rr[len_b - b_split];
+ // recurse on the two halves
+
long cost_c = diff_to_et (a, start_a, mid_a,
b, start_b, start_b + b_split,
edits, edx, polarity,
[Prev in Thread] |
Current Thread |
[Next in Thread] |
- [Monotone-commits-diffs] net.venge.monotone: bd6181bab38491272d3de42575135ad4dd397bc7,
code <=