@@ -4,8 +4,8 @@ KVM test utility functions.
@copyright: 2008-2009 Red Hat Inc.
"""
-import md5, thread, subprocess, time, string, random, socket, os, signal, pty
-import select, re, logging, commands, cPickle
+import md5, sha, thread, subprocess, time, string, random, socket, os, signal
+import select, re, logging, commands, cPickle, pty
from autotest_lib.client.bin import utils
from autotest_lib.client.common_lib import error
import kvm_subprocess
@@ -788,3 +788,105 @@ def md5sum_file(filename, size=None):
size -= len(data)
f.close()
return o.hexdigest()
+
+
+def sha1sum_file(filename, size=None):
+ """
+ Calculate the sha1sum of filename.
+ If size is not None, limit to first size bytes.
+ Throw exception if something is wrong with filename.
+ Can be also implemented with bash one-liner (assuming size%1024==0):
+ dd if=filename bs=1024 count=size/1024 | sha1sum -
+
+ @param filename: Path of the file that will have its sha1sum calculated.
+ @param returns: sha1sum of the file.
+ """
+ chunksize = 4096
+ fsize = os.path.getsize(filename)
+ if not size or size>fsize:
+ size = fsize
+ f = open(filename, 'rb')
+ o = sha.new()
+ while size > 0:
+ if chunksize > size:
+ chunksize = size
+ data = f.read(chunksize)
+ if len(data) == 0:
+ logging.debug("Nothing left to read but size=%d" % size)
+ break
+ o.update(data)
+ size -= len(data)
+ f.close()
+ return o.hexdigest()
+
+
+def unmap_url_cache(cachedir, url, expected_hash, method="md5"):
+ """
+ Downloads a file from a URL to a cache directory. If the file is already
+ at the expected position and has the expected hash, let's not download it
+ again.
+
+ @param cachedir: Directory that might hold a copy of the file we want to
+ download.
+ @param url: URL for the file we want to download.
+ @param expected_hash: Hash string that we expect the file downloaded to
+ have.
+ @param method: Method used to calculate the hash string (md5, sha1).
+ """
+ # Let's convert cachedir to a canonical path, if it's not already
+ cachedir = os.path.realpath(cachedir)
+ if not os.path.isdir(cachedir):
+ try:
+ os.makedirs(cachedir)
+ except:
+ raise ValueError('Could not create cache directory %s' % cachedir)
+ file_from_url = os.path.basename(url)
+ file_local_path = os.path.join(cachedir, file_from_url)
+
+ file_hash = None
+ failure_counter = 0
+ while not file_hash == expected_hash:
+ if os.path.isfile(file_local_path):
+ if method == "md5":
+ file_hash = md5sum_file(file_local_path)
+ elif method == "sha1":
+ file_hash = sha1sum_file(file_local_path)
+
+ if file_hash == expected_hash:
+ # File is already at the expected position and ready to go
+ src = file_from_url
+ else:
+ # Let's download the package again, it's corrupted...
+ logging.error("Seems that file %s is corrupted, trying to "
+ "download it again" % file_from_url)
+ src = url
+ failure_counter += 1
+ else:
+ # File is not there, let's download it
+ src = url
+ if failure_counter > 1:
+ raise EnvironmentError("Consistently failed to download the "
+ "package %s. Aborting further download "
+ "attempts. This might mean either the "
+ "network connection has problems or the "
+ "expected hash string that was determined "
+ "for this file is wrong" % file_from_url)
+ file_path = utils.unmap_url(cachedir, src, cachedir)
+
+ return file_path
+
+
+def get_hash_from_file(sha_path, dvd_basename):
+ """
+ Get the a hash from a given DVD image from a hash file
+ (Hash files are usually named MD5SUM or SHA1SUM and are located inside the
+ download directories of the DVDs)
+
+ @param hash_path: Local path to a hash file.
+ @param cd_image: Basename of a CD image
+ """
+ hash_file = open(sha_path, 'r')
+ for line in hash_file.readlines():
+ if dvd_basename in line:
+ return line.split()[0]
+