diff -u rdiff_backup/compare.py rdiff_backup_rev/compare.py --- rdiff_backup/compare.py 2009-03-16 10:36:21.000000000 -0400 +++ rdiff_backup_rev/compare.py 2009-06-19 19:31:10.000000000 -0400 @@ -25,7 +25,7 @@ """ from __future__ import generators -import Globals, restore, rorpiter, log, backup, static, rpath, hash, robust +import Globals, restore, rorpiter, log, backup, static, rpath, hash, robust, Hardlink def Compare(src_rp, mirror_rp, inc_rp, compare_time): """Compares metadata in src_rp dir with metadata in mirror_rp at time""" @@ -80,14 +80,15 @@ bad_files = 0 for repo_rorp in repo_iter: if not repo_rorp.isreg(): continue - if not repo_rorp.has_sha1(): + verify_sha1 = get_hash(repo_rorp) + if not verify_sha1: log.Log("Warning: Cannot find SHA1 digest for file %s,\n" "perhaps because this feature was added in v1.1.1" % (repo_rorp.get_indexpath(),), 2) continue fp = RepoSide.rf_cache.get_fp(base_index + repo_rorp.index, repo_rorp) computed_hash = hash.compute_sha1_fp(fp) - if computed_hash == repo_rorp.get_sha1(): + if computed_hash == verify_sha1: log.Log("Verified SHA1 digest of " + repo_rorp.get_indexpath(), 5) else: bad_files += 1 @@ -95,11 +96,24 @@ "doesn't match recorded digest of\n %s\n" "Your backup repository may be corrupted!" % (repo_rorp.get_indexpath(), computed_hash, - repo_rorp.get_sha1()), 2) + verify_sha1), 2) RepoSide.close_rf_cache() if not bad_files: log.Log("Every file verified successfully.", 3) return bad_files +def get_hash (repo_rorp): + """ Try to get a sha1 digest from the repository. If hardlinks + are saved in the metadata, get the sha1 from the first hardlink """ + Hardlink.add_rorp(repo_rorp) + if Hardlink.islinked(repo_rorp): + verify_sha1 = Hardlink.get_sha1(repo_rorp) + elif repo_rorp.has_sha1(): + verify_sha1 = repo_rorp.get_sha1() + else: + verify_sha1 = None + Hardlink.del_rorp(repo_rorp) + return verify_sha1 + def print_reports(report_iter): """Given an iter of CompareReport objects, print them to screen""" assert not Globals.server @@ -199,12 +213,13 @@ """Like above, but also compare sha1 sums of any regular files""" def hashes_changed(src_rp, mir_rorp): """Return 0 if their data hashes same, 1 otherwise""" - if not mir_rorp.has_sha1(): + verify_sha1 = get_hash(mir_rorp) + if not verify_sha1: log.Log("Warning: Metadata file has no digest for %s, " "unable to compare." % (mir_rorp.get_indexpath(),), 2) return 0 elif (src_rp.getsize() == mir_rorp.getsize() and - hash.compute_sha1(src_rp) == mir_rorp.get_sha1()): + hash.compute_sha1(src_rp) == verify_sha1): return 0 return 1