# # # patch "dumb.py" # from [13bf83c4976d0d99ebb77e6f6d26cd21af981373] # to [b55ab29b45854fc6e77394f0df086ec6b63d15de] # # patch "monotone.py" # from [a8880d3b520c818237f77bde9ad759a125fef5e8] # to [2011146c9195c7156ab56b6e3f93d193b6b95a29] # # patch "paramiko/channel.py" # from [32c9924c78dcb643cb8be1b62d59d018da874563] # to [0596f26cb50730f9de7c3ee14e36781cd41bf4a0] # ============================================================ --- dumb.py 13bf83c4976d0d99ebb77e6f6d26cd21af981373 +++ dumb.py b55ab29b45854fc6e77394f0df086ec6b63d15de @@ -8,152 +8,156 @@ from monotone import Monotone import zlib -def do_rollback(url): - md = MerkleDir(writeable_fs_for_url(url)) - md.rollback() +class Dumbtone: -def do_full_import(monotone, url): - print " ---------------- starting import ------------- " - monotone.ensure_db() - md = MerkleDir(readable_fs_for_url(url)) - feeder = monotone.feeder() - for id, data in md.all_chunks(): -# uncdata = zlib.decompress(data) -# for pkt in uncdata.split("[end]"): -# if len(pkt)>1: -# feeder.write(pkt+"[end]") - feeder.write(zlib.decompress(data)) - feeder.close() + def __init__(self, db, verbosity=0): + """ receives the db name and a verbosity level from 0 to 2 + 0, the default, means normal messaging + 1 enables some additional info + 2 shows detailed informations + """ + self.monotone = Monotone(db) + self.verbosity = verbosity -def do_export(monotone, url): - md = MerkleDir(writeable_fs_for_url(url)) - try: - md.begin() - curr_ids = Set(md.all_ids()) - keys = monotone.key_names() - for k in keys: - kp = monotone.get_pubkey_packet(k) - id = sha.new(kp).hexdigest() - if id not in curr_ids: - md.add(id, kp) - for rid in monotone.toposort(monotone.revisions_list()): - print "processing revision ", rid - if rid not in curr_ids: - rdata = StringIO() - revision_text = monotone.get_revision(rid) - revision_parsed = monotone.basic_io_parser(revision_text) - new_manifest = None - old_manifest = "" - new_files = {} - for stanza in revision_parsed: - stanza_type = stanza[0][0] - if stanza_type == "new_manifest": - new_manifest = stanza[0][1] -# print stanza_type, ":", new_manifest - elif stanza_type == "old_revision": - if not old_manifest: - old_manifest = stanza[0][1] -# print stanza_type, ":", old_manifest - elif stanza_type == "add_file": - new_files[stanza[1][1]] = None -# print stanza_type, ":", stanza[1][1] - elif stanza_type == "patch": - old_fid = stanza[1][1] - new_fid = stanza[2][1] - if not new_files.has_key(new_fid): - new_files[new_fid] = None + def do_rollback(self, url): + md = MerkleDir(writeable_fs_for_url(url)) + md.rollback() + + def do_full_import(self, url): + if self.verbosity > 0: + print "starting import from:", url + self.monotone.ensure_db() + md = MerkleDir(readable_fs_for_url(url)) + feeder = self.monotone.feeder(self.verbosity) + if self.verbosity > 1: + # verbose op, splits the chunk in the individual packets, + # and reads them one by one + for id, data in md.all_chunks(): + uncdata = zlib.decompress(data) + for pkt in uncdata.split("[end]"): + if len(pkt)>1: + feeder.write(pkt+"[end]") + else: + for id, data in md.all_chunks(): + feeder.write(zlib.decompress(data)) + feeder.close() + + def do_export(self, url): + md = MerkleDir(writeable_fs_for_url(url)) + try: + md.begin() + curr_ids = Set(md.all_ids()) + keys = self.monotone.key_names() + for k in keys: + kp = self.monotone.get_pubkey_packet(k) + id = sha.new(kp).hexdigest() + if id not in curr_ids: + md.add(id, kp) + for rid in self.monotone.toposort(self.monotone.revisions_list()): + print "processing revision ", rid + if rid not in curr_ids: + rdata = StringIO() + revision_text = self.monotone.get_revision(rid) + revision_parsed = self.monotone.basic_io_parser(revision_text) + new_files = {} + for stanza in revision_parsed: + stanza_type = stanza[0][0] + if stanza_type == "add_file": + new_files[stanza[1][1]] = None + if self.verbosity > 0: + print stanza_type, ":", stanza[1][1] + elif stanza_type == "patch": + old_fid = stanza[1][1] + new_fid = stanza[2][1] + if not new_files.has_key(new_fid): + new_files[new_fid] = None + if old_fid: + new_files[new_fid] = old_fid + if self.verbosity > 0: + print stanza_type, ":", stanza[1][1],":", stanza[2][1] + + for new_fid, old_fid in new_files.items(): if old_fid: - new_files[new_fid] = old_fid -# print stanza_type, ":", stanza[1][1],":", stanza[2][1] + if self.verbosity > 0: + print "get_file_delta:",old_fid, new_fid + fdp =self.monotone.get_file_delta_packet(old_fid, new_fid) + if self.verbosity > 0: + print "file_delta (", old_fid, ",", new_fid,"):",fdp + rdata.write(fdp) + else: + if self.verbosity > 0: + print "get file_packet:",new_fid + fpp = self.monotone.get_file_packet(new_fid) + if self.verbosity > 0: + print "file_packet(",new_fid,"):",fpp + rdata.write(fpp) + rdata.write(self.monotone.get_revision_packet(rid)) + md.add(rid, rdata.getvalue()) + certs = self.monotone.get_cert_packets(rid) + if self.verbosity > 0: + print "rev ", rid, " certs:",certs + for cert in certs: + id = sha.new(cert).hexdigest() + if id not in curr_ids: + md.add(id, cert) + md.commit() + except LockError: + raise + except: + md.rollback() + raise + + class CounterCallback: + def __init__(self): + self.added = 0 + def __call__(self, id, data): + self.added += 1 + + class FeederCallback: + def __init__(self, feeder): + self.added = 0 + self.feeder = feeder + def __call__(self, id, data): + self.added += 1 + self.feeder.write(zlib.decompress(data)) + + def do_push(self, local_url, target_url): + print "Exporting changes from monotone db to %s" % (local_url,) + self.do_export(local_url) + print "Pushing changes from %s to %s" % (local_url, target_url) + local_md = MerkleDir(readable_fs_for_url(local_url)) + target_md = MerkleDir(writeable_fs_for_url(target_url)) + c = CounterCallback() + local_md.push(target_md, c) + print "Pushed %s packets to %s" % (c.added, target_url) + + def do_pull(self, local_url, source_url): + print "Pulling changes from %s to %s" % (source_url, local_url) + local_md = MerkleDir(writeable_fs_for_url(local_url)) + source_md = MerkleDir(readable_fs_for_url(source_url)) + self.monotone.ensure_db() + feeder = self.monotone.feeder(self.verbosity) + fc = FeederCallback(feeder) + local_md.pull(source_md, fc) + feeder.close() + print "Pulled and imported %s packets from %s" % (fc.added, source_url) + + def do_sync(self, local_url, other_url): + print "Exporting changes from monotone db to %s" % (local_url,) + self.do_export(local_url) + print "Synchronizing %s and %s" % (local_url, other_url) + local_md = MerkleDir(writeable_fs_for_url(local_url)) + other_md = MerkleDir(writeable_fs_for_url(other_url)) + feeder = self.monotone.feeder(self.verbosity) + pull_fc = FeederCallback(feeder) + push_c = CounterCallback() + local_md.sync(other_md, pull_fc, push_c) + feeder.close() + print "Pulled and imported %s packets from %s" % (pull_fc.added, other_url) + print "Pushed %s packets to %s" % (push_c.added, other_url) - if old_manifest: - mdp = monotone.get_manifest_delta_packet(old_manifest, - new_manifest) -# print "manifest_delta:",mdp - rdata.write(mdp) - else: - mpp=monotone.get_manifest_packet(new_manifest) -# print "manifest_packet:",mpp - rdata.write(mpp) - for new_fid, old_fid in new_files.items(): - if old_fid: -# print "get_file_delta:",old_fid, new_fid - fdp =monotone.get_file_delta_packet(old_fid, new_fid) -# print "file_delta:",fdp - rdata.write(fdp) - else: - fpp = monotone.get_file_packet(new_fid) -# print "file_packet:",fpp - rdata.write(fpp) - rdata.write(monotone.get_revision_packet(rid)) - md.add(rid, rdata.getvalue()) - certs = monotone.get_cert_packets(rid) - for cert in certs: - id = sha.new(cert).hexdigest() - if id not in curr_ids: - md.add(id, cert) - md.commit() - except LockError: - raise - except: - md.rollback() - raise - -class CounterCallback: - def __init__(self): - self.added = 0 - def __call__(self, id, data): - self.added += 1 - -class FeederCallback: - def __init__(self, feeder): - self.added = 0 - self.feeder = feeder - def __call__(self, id, data): - self.added += 1 - self.feeder.write(zlib.decompress(data)) - -def do_push(monotone, local_url, target_url): - print "Exporting changes from monotone db to %s" % (local_url,) - do_export(monotone, local_url) - print "Pushing changes from %s to %s" % (local_url, target_url) - local_md = MerkleDir(readable_fs_for_url(local_url)) - target_md = MerkleDir(writeable_fs_for_url(target_url)) - c = CounterCallback() - local_md.push(target_md, c) - print "Pushed %s packets to %s" % (c.added, target_url) - -def do_pull(monotone, local_url, source_url): - print "Pulling changes from %s to %s" % (source_url, local_url) - local_md = MerkleDir(writeable_fs_for_url(local_url)) - source_md = MerkleDir(readable_fs_for_url(source_url)) - monotone.ensure_db() - feeder = monotone.feeder() - fc = FeederCallback(feeder) - local_md.pull(source_md, fc) - feeder.close() - print "Pulled and imported %s packets from %s" % (fc.added, source_url) - -def do_sync(monotone, local_url, other_url): - print "Exporting changes from monotone db to %s" % (local_url,) - do_export(monotone, local_url) - print "Synchronizing %s and %s" % (local_url, other_url) - local_md = MerkleDir(writeable_fs_for_url(local_url)) - other_md = MerkleDir(writeable_fs_for_url(other_url)) - feeder = monotone.feeder() - pull_fc = FeederCallback(feeder) - push_c = CounterCallback() - local_md.sync(other_md, pull_fc, push_c) - feeder.close() - print "Pulled and imported %s packets from %s" % (pull_fc.added, other_url) - print "Pushed %s packets to %s" % (push_c.added, other_url) - def main(name, args): -# monotone = Monotone("etherape.db") -# do_export(monotone, "guzi") -# monotone = Monotone("e.db") -# do_full_import(monotone,"guzi") - pass + pass if __name__ == "__main__": import sys ============================================================ --- monotone.py a8880d3b520c818237f77bde9ad759a125fef5e8 +++ monotone.py 2011146c9195c7156ab56b6e3f93d193b6b95a29 @@ -7,11 +7,13 @@ pass class Feeder: - def __init__(self, args): + def __init__(self, verbosity, args): # We delay the actual process spawn, so as to avoid running monotone # unless some packets are actually written (this is more efficient, # and also avoids spurious errors from monotone when 'read' doesn't # actually succeed in reading anything). + print "verbosity:",verbosity + self.verbosity=verbosity self.args = args self.process = None @@ -25,12 +27,13 @@ stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.process.stdin.write(data) -# uncomment to force processing at every invocation -# stdout, stderr = self.process.communicate() -# print "writing: >>>",data,"<<<\n",stdout,stderr -# if self.process.returncode: -# raise MonotoneError, stderr -# self.process = None + if self.verbosity>1: + # processing every single call with a new process + stdout, stderr = self.process.communicate() + print "writing: >>>",data,"<<<\n",stdout,stderr + if self.process.returncode: + raise MonotoneError, stderr + self.process = None def close(self): if self.process is None: @@ -83,14 +86,6 @@ def get_file_delta_packet(self, old_fid, new_fid): return self.automate("packet_for_fdelta", old_fid, new_fid) - def get_manifest_packet(self, mid): - return "" -# return self.automate("packet_for_mdata", mid) - - def get_manifest_delta_packet(self, old_mid, new_mid): - return "" -# return self.automate("packet_for_mdelta", old_mid, new_mid) - def get_cert_packets(self, rid): output = self.automate("packets_for_certs", rid) packets = [] @@ -168,9 +163,9 @@ return parser(self.process.stdout) # feeds stuff into 'monotone read' - def feeder(self): + def feeder(self, verbosity): args = [self.executable, "--db", self.db, "read"] - return Feeder(args) + return Feeder(verbosity, args) # copied wholesale from viewmtn (08fd7bf8143512bfcabe5f65cf40013e10b89d28)'s # monotone.py. hacked to remove the []s from hash values, and to leave in ============================================================ --- paramiko/channel.py 32c9924c78dcb643cb8be1b62d59d018da874563 +++ paramiko/channel.py 0596f26cb50730f9de7c3ee14e36781cd41bf4a0 @@ -880,7 +880,7 @@ if self.pipe is not None: self.pipe.set() self.in_buffer += s - self.in_buffer_cv.notifyAll() + self.in_buffer_cv.notifyAll() finally: self.lock.release()