author |
<albertogli@telpin.com.ar> 2005-03-02 22:48:48 UTC |
committer |
<albertogli@telpin.com.ar> 2005-03-02 22:48:48 UTC |
parent | 2c27b176f4194c3795970474d1e10bf7758ffbeb |
abk | +96 | -53 |
diff --git a/abk b/abk index ac969fa..72a015e 100644 --- a/abk +++ b/abk @@ -269,67 +269,110 @@ def make_path(f): pass +# +# main operations +# + +def make_sync(src_path, srcidx_path, dst_path, dstidx_path): + # load destination index + print "* loading destination index" + dstidx = index_file(dstidx_path) + dstidx.load() + + # create source index + print "* building source index" + srcidx = index_file(srcidx_path) + srcidx.populate(src_path) + srcidx.save() + + print "* sync" + + # compare them + update_files = [] + for f in srcidx.names: + if f not in dstidx.names or \ + not srcidx.db[f].cmp_data(dstidx.db[f]): + # files missing in destination, or data changed + dst = os.path.join(dst_path, f) + print 'data\t', f, dst + quiet_unlink(dst) + srcidx.db[f].copy_file(dst) + update_files.append((f, dst)) + elif not srcidx.db[f].cmp_mdata(dstidx.db[f]): + # metadata changed + dst = os.path.join(dst_path, f) + print 'mdata\t', f, dst + update_files.append((f, dst)) + + # metadata gets changed later because otherwise we could leave + # directory times wrong due to files being added to a directory after + # their creation; this way we're sure there will be no more file + # creation afterwards + print '* mdata' + for f, dst in update_files: + try: + srcidx.db[f].update_mdata(dst) + except: + # it can fail if the destination doesn't have the + # file, ignore for now; TODO: output some kind of + # script so people can run it later when they get all + # back together + pass + + print '* unlink' + for f in dstidx.names: + if f not in srcidx.names: + # files in destination and not in source + dst = os.path.join(dst_path, f) + print 'unlink\t', f, dst + force_unlink(dst, dstidx.db[f].type) + + +def show_idx(idx_path): + print "* loading index" + idx = index_file(idx_path) + idx.load() + for f in idx.names: + fi = idx.db[f] + print "%s %d %f %s %s" % (fi.type, fi.size, fi.mtime, + str(fi.hash), fi.name) + + # # main # +help = """abk - A backup script +Alberto Bertogli (albertogli@telpin.com.ar) + +Use: abk command params + +Commands: + show idx_file + Shows the given index file contents. + sync src src.idx dst dst.idx + Synchronizes src with dst, using the given index files. +""" + try: - src_path = sys.argv[1] - srcidx_path = sys.argv[2] - dst_path = sys.argv[3] - dstidx_path = sys.argv[4] + cmd = sys.argv[1] except: - print "Use: abk srcdir src.idx dstdir dst.idx" + print help sys.exit(1) -# load destination index -print "* loading destination index" -dstidx = index_file(dstidx_path) -dstidx.load() - -# create source index -print "* building source index" -srcidx = index_file(srcidx_path) -srcidx.populate(src_path) -srcidx.save() - -print "* sync" - -# compare them -update_files = [] -for f in srcidx.names: - if f not in dstidx.names or not srcidx.db[f].cmp_data(dstidx.db[f]): - # files missing in destination, or data changed - dst = os.path.join(dst_path, f) - print 'data\t', f, dst - quiet_unlink(dst) - srcidx.db[f].copy_file(dst) - update_files.append((f, dst)) - elif not srcidx.db[f].cmp_mdata(dstidx.db[f]): - # metadata changed - dst = os.path.join(dst_path, f) - print 'mdata\t', f, dst - update_files.append((f, dst)) - -# metadata gets changed later because otherwise we could leave directory times -# wrong due to files being added to a directory after their creation; this way -# we're sure there will be no more file creation afterwards -print '* mdata' -for f, dst in update_files: +if cmd == 'show': + if len(sys.argv) < 2: + print help + sys.exit(1) + show_idx(sys.argv[2]) +elif cmd == 'sync': try: - srcidx.db[f].update_mdata(dst) + src_path = sys.argv[2] + srcidx_path = sys.argv[3] + dst_path = sys.argv[4] + dstidx_path = sys.argv[5] except: - # it can fail if the destination doesn't have the - # file, ignore for now; TODO: output some kind of - # script so people can run it later when they get all - # back together - pass - -print '* unlink' -for f in dstidx.names: - if f not in srcidx.names: - # files in destination and not in source - dst = os.path.join(dst_path, f) - print 'unlink\t', f, dst - force_unlink(dst, dstidx.db[f].type) + print help + sys.exit(1) + make_sync(src_path, srcidx_path, dst_path, dstidx_path)