git » abk » commit a7b4459

Index compare updates.

author
2005-03-02 17:29:30 UTC
committer
2005-03-02 17:29:30 UTC
parent cd169849746bd8468427627e3c88994547a69c8f

Index compare updates.
This patch updates the main index comparison and sync loops; no need to split
this up because it's being reworked all the time.

abk +47 -15

diff --git a/abk b/abk
index d568ba3..34208e7 100644
--- a/abk
+++ b/abk
@@ -168,20 +168,17 @@ class file_info:
 			os.mkfifo(dst, self.mode & 07777)
 		elif self.type == 'd':
 			# we just create directories
-			#os.mkdir(dst, self.type & 07777)
 			try:
 				os.makedirs(dst, self.mode & 07777)
 			except OSError:
-				# ignore if the dir. already exists
+				# ignore if the dir already exists, it could
+				# happen because the walker doesn't do it in
+				# any kind of order, so a subdirectory might
+				# be created before the parent.
 				pass
 		else:
 			raise 'Unk type: 0x%x %d' % (self.mode, self.name)
 
-		# no need to update metadata if we're going to do it
-		# afterwards
-		#self.update_mdata(dst)
-
-
 	def hash_file(self):
 		"Returns the sha1sum of a file."
 		hash = sha.new()
@@ -261,7 +258,26 @@ def UNUSED_bz2_file(src, dst = None):
 	dfile.close()
 
 
+def quiet_unlink(path):
+	"Removes the given file if exists, or do nothing if not."
+	try:
+		os.unlink(path)
+	except OSError:
+		pass
+
 
+def force_unlink(path, type):
+	"Removes a file or directory, recurses if necesary."
+	if type != 'd':
+		try:
+			os.unlink(path)
+		except OSError:
+			pass
+	else:
+		try:
+			os.removedirs(path)
+		except OSError:
+			pass
 
 #
 # main
@@ -281,16 +297,32 @@ srcidx.populate(src_path)
 srcidx.save()
 
 # compare them
+skeys = srcidx.db.keys()
 dkeys = dstidx.db.keys()
-for f in srcidx.db.keys():
-	if f not in dkeys:
+updated_files = []
+
+for f in skeys:
+	if f not in dkeys or srcidx.db[f].cmp_data(dstidx.db[f]):
+		# files missing in destination, or data changed
 		dst = os.path.join(dst_path, f)
-		print 'copy', f, dst
+		print 'c/u', f, dst
+		quiet_unlink(dst)
 		srcidx.db[f].copy_file(dst)
-	elif srcidx.db[f] != dstidx.db[f]:
+		updated_files.append((f, dst))
+	elif srcidx.db[f].cmp_mdata(dstidx.db[f]):
+		# metadata changed
+		updated_files.append((f, dst))
+		
+
+for f in dkeys:
+	if f not in skeys:
+		# files in destination and not in source
 		dst = os.path.join(dst_path, f)
-		print 'chan', f, dst
-		if dstidx.db[f].type != 'd':
-			os.unlink(dst)
-		srcidx.db[f].copy_file(dst)
+		print 'unlink', f, dst
+		force_unlink(dst, dstidx.db[f].type)
+
+# update metadata
+for f, dst in updated_files:
+	print 'update', f, dst
+	srcidx.db[f].update_mdata(dst)