From patchwork Wed Oct 21 21:31:45 2009 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Lucas Meneghel Rodrigues X-Patchwork-Id: 55222 Received: from vger.kernel.org (vger.kernel.org [209.132.176.167]) by demeter.kernel.org (8.14.2/8.14.2) with ESMTP id n9LLWXVn030880 for ; Wed, 21 Oct 2009 21:32:33 GMT Received: (majordomo@vger.kernel.org) by vger.kernel.org via listexpand id S1755404AbZJUVbw (ORCPT ); Wed, 21 Oct 2009 17:31:52 -0400 Received: (majordomo@vger.kernel.org) by vger.kernel.org id S1755448AbZJUVbw (ORCPT ); Wed, 21 Oct 2009 17:31:52 -0400 Received: from mx1.redhat.com ([209.132.183.28]:58409 "EHLO mx1.redhat.com" rhost-flags-OK-OK-OK-OK) by vger.kernel.org with ESMTP id S1755396AbZJUVbu (ORCPT ); Wed, 21 Oct 2009 17:31:50 -0400 Received: from int-mx08.intmail.prod.int.phx2.redhat.com (int-mx08.intmail.prod.int.phx2.redhat.com [10.5.11.21]) by mx1.redhat.com (8.13.8/8.13.8) with ESMTP id n9LLVq0n018767; Wed, 21 Oct 2009 17:31:53 -0400 Received: from localhost.localdomain (vpn-12-234.rdu.redhat.com [10.11.12.234]) by int-mx08.intmail.prod.int.phx2.redhat.com (8.13.8/8.13.8) with ESMTP id n9LLVoHU004876; Wed, 21 Oct 2009 17:31:50 -0400 From: Lucas Meneghel Rodrigues To: autotest@test.kernel.org Cc: kvm@vger.kernel.org, mgoldish@redhat.com, jburke@redhat.com, Lucas Meneghel Rodrigues Subject: [PATCH 1/3] KVM test: Add new utility functions to kvm_utils Date: Wed, 21 Oct 2009 19:31:45 -0200 Message-Id: <1256160707-4333-1-git-send-email-lmr@redhat.com> X-Scanned-By: MIMEDefang 2.67 on 10.5.11.21 Sender: kvm-owner@vger.kernel.org Precedence: bulk List-ID: X-Mailing-List: kvm@vger.kernel.org diff --git a/client/tests/kvm/kvm_utils.py b/client/tests/kvm/kvm_utils.py index 53b664a..f1a6b4b 100644 --- a/client/tests/kvm/kvm_utils.py +++ b/client/tests/kvm/kvm_utils.py @@ -4,8 +4,8 @@ KVM test utility functions. @copyright: 2008-2009 Red Hat Inc. """ -import md5, thread, subprocess, time, string, random, socket, os, signal, pty -import select, re, logging, commands, cPickle +import md5, sha, thread, subprocess, time, string, random, socket, os, signal +import select, re, logging, commands, cPickle, pty from autotest_lib.client.bin import utils from autotest_lib.client.common_lib import error import kvm_subprocess @@ -788,3 +788,105 @@ def md5sum_file(filename, size=None): size -= len(data) f.close() return o.hexdigest() + + +def sha1sum_file(filename, size=None): + """ + Calculate the sha1sum of filename. + If size is not None, limit to first size bytes. + Throw exception if something is wrong with filename. + Can be also implemented with bash one-liner (assuming size%1024==0): + dd if=filename bs=1024 count=size/1024 | sha1sum - + + @param filename: Path of the file that will have its sha1sum calculated. + @param returns: sha1sum of the file. + """ + chunksize = 4096 + fsize = os.path.getsize(filename) + if not size or size>fsize: + size = fsize + f = open(filename, 'rb') + o = sha.new() + while size > 0: + if chunksize > size: + chunksize = size + data = f.read(chunksize) + if len(data) == 0: + logging.debug("Nothing left to read but size=%d" % size) + break + o.update(data) + size -= len(data) + f.close() + return o.hexdigest() + + +def unmap_url_cache(cachedir, url, expected_hash, method="md5"): + """ + Downloads a file from a URL to a cache directory. If the file is already + at the expected position and has the expected hash, let's not download it + again. + + @param cachedir: Directory that might hold a copy of the file we want to + download. + @param url: URL for the file we want to download. + @param expected_hash: Hash string that we expect the file downloaded to + have. + @param method: Method used to calculate the hash string (md5, sha1). + """ + # Let's convert cachedir to a canonical path, if it's not already + cachedir = os.path.realpath(cachedir) + if not os.path.isdir(cachedir): + try: + os.makedirs(cachedir) + except: + raise ValueError('Could not create cache directory %s' % cachedir) + file_from_url = os.path.basename(url) + file_local_path = os.path.join(cachedir, file_from_url) + + file_hash = None + failure_counter = 0 + while not file_hash == expected_hash: + if os.path.isfile(file_local_path): + if method == "md5": + file_hash = md5sum_file(file_local_path) + elif method == "sha1": + file_hash = sha1sum_file(file_local_path) + + if file_hash == expected_hash: + # File is already at the expected position and ready to go + src = file_from_url + else: + # Let's download the package again, it's corrupted... + logging.error("Seems that file %s is corrupted, trying to " + "download it again" % file_from_url) + src = url + failure_counter += 1 + else: + # File is not there, let's download it + src = url + if failure_counter > 1: + raise EnvironmentError("Consistently failed to download the " + "package %s. Aborting further download " + "attempts. This might mean either the " + "network connection has problems or the " + "expected hash string that was determined " + "for this file is wrong" % file_from_url) + file_path = utils.unmap_url(cachedir, src, cachedir) + + return file_path + + +def get_hash_from_file(sha_path, dvd_basename): + """ + Get the a hash from a given DVD image from a hash file + (Hash files are usually named MD5SUM or SHA1SUM and are located inside the + download directories of the DVDs) + + @param hash_path: Local path to a hash file. + @param cd_image: Basename of a CD image + """ + hash_file = open(sha_path, 'r') + for line in hash_file.readlines(): + if dvd_basename in line: + return line.split()[0] +