# # # patch "dumb.py" # from [64b9448246fb3ae79838786b65bca5ba0fc831dc] # to [13bf83c4976d0d99ebb77e6f6d26cd21af981373] # # patch "merkle_dir.py" # from [19d4f2bc491bcbce8645e6549c0470ee0bc0c372] # to [20f30c40c7569ca969c2d477c7e552cff3d7b94b] # # patch "monotone.py" # from [23c5f87220af71491e81661b0f3bab303f276b93] # to [a8880d3b520c818237f77bde9ad759a125fef5e8] # ============================================================ --- dumb.py 64b9448246fb3ae79838786b65bca5ba0fc831dc +++ dumb.py 13bf83c4976d0d99ebb77e6f6d26cd21af981373 @@ -13,10 +13,15 @@ md.rollback() def do_full_import(monotone, url): + print " ---------------- starting import ------------- " monotone.ensure_db() md = MerkleDir(readable_fs_for_url(url)) feeder = monotone.feeder() for id, data in md.all_chunks(): +# uncdata = zlib.decompress(data) +# for pkt in uncdata.split("[end]"): +# if len(pkt)>1: +# feeder.write(pkt+"[end]") feeder.write(zlib.decompress(data)) feeder.close() @@ -33,11 +38,6 @@ md.add(id, kp) for rid in monotone.toposort(monotone.revisions_list()): print "processing revision ", rid - certs = monotone.get_cert_packets(rid) - for cert in certs: - id = sha.new(cert).hexdigest() - if id not in curr_ids: - md.add(id, cert) if rid not in curr_ids: rdata = StringIO() revision_text = monotone.get_revision(rid) @@ -66,7 +66,6 @@ new_files[new_fid] = old_fid # print stanza_type, ":", stanza[1][1],":", stanza[2][1] - rdata.write(monotone.get_revision_packet(rid)) if old_manifest: mdp = monotone.get_manifest_delta_packet(old_manifest, new_manifest) @@ -86,7 +85,13 @@ fpp = monotone.get_file_packet(new_fid) # print "file_packet:",fpp rdata.write(fpp) + rdata.write(monotone.get_revision_packet(rid)) md.add(rid, rdata.getvalue()) + certs = monotone.get_cert_packets(rid) + for cert in certs: + id = sha.new(cert).hexdigest() + if id not in curr_ids: + md.add(id, cert) md.commit() except LockError: raise @@ -144,10 +149,10 @@ print "Pushed %s packets to %s" % (push_c.added, other_url) def main(name, args): - #monotone = Monotone("etherape.db") - #do_export(monotone, "guzi") - #monotone = Monotone("e.db") - #do_full_import(monotone,"guzi") +# monotone = Monotone("etherape.db") +# do_export(monotone, "guzi") +# monotone = Monotone("e.db") +# do_full_import(monotone,"guzi") pass if __name__ == "__main__": ============================================================ --- merkle_dir.py 19d4f2bc491bcbce8645e6549c0470ee0bc0c372 +++ merkle_dir.py 20f30c40c7569ca969c2d477c7e552cff3d7b94b @@ -87,6 +87,11 @@ # and have a very small race condition, while each file is being # swapped around. If readers try to open a file but find it does not # exist, they should try again after a short pause, before giving up. +# +# READING ORDER: +# Monotone 0.26 doesn't reorder packets so the fs should make sure to return +# chunks in the exact order of writes, to ensure a proper sequence of +# fdata, rdata, rcerts packets class _HashFile: prefix = "" @@ -309,7 +314,7 @@ #### Compressing and adding new items # can only be called from inside a transaction. def add(self, id, data): -# print ">>>>>>>>>>\n",data,"<<<<<<<<<<<<<<<\n" + # print ">>>>>>>>>>\n",data,"<<<<<<<<<<<<<<<\n" cp_data = zlib.compress(data) self._add_verbatim(id, cp_data) ============================================================ --- monotone.py 23c5f87220af71491e81661b0f3bab303f276b93 +++ monotone.py a8880d3b520c818237f77bde9ad759a125fef5e8 @@ -25,6 +25,12 @@ stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.process.stdin.write(data) +# uncomment to force processing at every invocation +# stdout, stderr = self.process.communicate() +# print "writing: >>>",data,"<<<\n",stdout,stderr +# if self.process.returncode: +# raise MonotoneError, stderr +# self.process = None def close(self): if self.process is None: @@ -79,11 +85,11 @@ def get_manifest_packet(self, mid): return "" - return self.automate("packet_for_mdata", mid) +# return self.automate("packet_for_mdata", mid) def get_manifest_delta_packet(self, old_mid, new_mid): return "" - return self.automate("packet_for_mdelta", old_mid, new_mid) +# return self.automate("packet_for_mdelta", old_mid, new_mid) def get_cert_packets(self, rid): output = self.automate("packets_for_certs", rid)