[releng] Generate SHA256 as hash if possible instead of MD5
- From: Olav Vitters <ovitters src gnome org>
- To: svn-commits-list gnome org
- Subject: [releng] Generate SHA256 as hash if possible instead of MD5
- Date: Wed, 15 Jul 2009 17:57:55 +0000 (UTC)
commit 9f8cd15bda66638b935b6f5be989c0f45f5bd4c8
Author: Olav Vitters <olav bkor dhs org>
Date: Wed Jul 15 19:31:47 2009 +0200
Generate SHA256 as hash if possible instead of MD5
Note: Python versions before 2.5 will still generate MD5.
tools/release_set_scripts/release | 6 +-
tools/release_set_scripts/release-suites | 6 +-
tools/smoketesting/convert-to-tarballs.py | 59 +++++++++++++++++------------
3 files changed, 41 insertions(+), 30 deletions(-)
---
diff --git a/tools/release_set_scripts/release b/tools/release_set_scripts/release
index 3f306e3..ec6b883 100755
--- a/tools/release_set_scripts/release
+++ b/tools/release_set_scripts/release
@@ -49,9 +49,9 @@ cat $RDATA | while read MDATA; do
done
cd $RDIR
-md5sum *.gz > MD5SUMS-for-gz
-md5sum *.bz2 > MD5SUMS-for-bz2
-chmod g+w $RDIR/MD5SUMS-for-*
+sha256sum *.gz > SHA256SUMS-for-gz
+sha256sum *.bz2 > SHA256SUMS-for-bz2
+chmod g+w $RDIR/SHA256SUMS-for-*
# Print download sizss
echo
diff --git a/tools/release_set_scripts/release-suites b/tools/release_set_scripts/release-suites
index eada539..86503e7 100755
--- a/tools/release_set_scripts/release-suites
+++ b/tools/release_set_scripts/release-suites
@@ -17,7 +17,7 @@
# 3) If not found in step -1- or -2-, complain.
#
# This script will modify the files on the ftp server and create
-# MD5SUMS-for-gz and MD5SUMS-for-bz2 files in the current directory.
+# SHA256SUMS-for-gz and SHA256SUMS-for-bz2 files in the current directory.
#
# Input is of the form
# release-suites <version> <datafile>
@@ -108,8 +108,8 @@ done
for suite in $RSUITES; do
cd $FTPROOT/$suite/$RMAJMIN/$RVERSION/sources
- md5sum *.gz */*.gz > MD5SUMS-for-gz 2> /dev/null
- md5sum *.bz2 */*.bz2 > MD5SUMS-for-bz2 2> /dev/null
+ sha256sum *.gz */*.gz > SHA256SUMS-for-gz 2> /dev/null
+ sha256sum *.bz2 */*.bz2 > SHA256SUMS-for-bz2 2> /dev/null
echo
echo "$suite $RVERSION statistics:"
echo " tar.gz: $(du -Lch *.tar.gz */*.tar.gz 2> /dev/null | grep total$)"
diff --git a/tools/smoketesting/convert-to-tarballs.py b/tools/smoketesting/convert-to-tarballs.py
index 4f0e66d..71698f6 100755
--- a/tools/smoketesting/convert-to-tarballs.py
+++ b/tools/smoketesting/convert-to-tarballs.py
@@ -54,14 +54,18 @@ import os.path
from posixpath import join as posixjoin # Handy for URLs
import subprocess
from ftplib import FTP
-import md5
from xml.dom import minidom, Node
from sgmllib import SGMLParser
import urllib2
import urlparse
-import sets
+if not hasattr(__builtins__, 'set'):
+ from sets import Set as set
import time
import socket
+try:
+ import hashlib
+except ImportError:
+ import md5 as hashlib
try: import psyco
except: pass
@@ -289,6 +293,10 @@ class TarballLocator:
self.have_sftp = self._test_sftp()
self.get_stats = determine_stats
self.local_only = local_only
+ if hasattr(hashlib, 'sha256'):
+ self.hash_algo = 'sha256'
+ else:
+ self.hash_algo = 'md5'
self.cache = {}
for key in mirrors.keys():
mirror = mirrors[key]
@@ -382,6 +390,7 @@ class TarballLocator:
def _get_tarball_stats(self, location, filename):
MAX_TRIES = 10
newfile = os.path.join(self.tarballdir, filename)
+ hashfile = newfile + '.' + self.hash_algo + 'sum'
if newfile.endswith('gz'):
flags = 'xfzO'
elif newfile.endswith('lzma'):
@@ -410,13 +419,15 @@ class TarballLocator:
tries -= 1
continue
- if os.path.exists(newfile + '.md5sum'):
- os.unlink(newfile + '.md5sum')
+ if os.path.exists(hashfile):
+ os.unlink(hashfile)
- if not os.path.exists(newfile + '.md5sum'):
- print 'Untarring archive to check integrity'
+ if not os.path.exists(hashfile):
+ time.sleep(1)
cmd = ['tar', flags, newfile]
- retcode = subprocess.call(cmd, stdout=file('/dev/null', 'w'))
+ devnull = file('/dev/null', 'wb')
+ retcode = subprocess.call(cmd, stdout=devnull)
+ devnull.close()
if retcode:
sys.stderr.write('Integrity check for ' + filename + ' failed!\n')
tries -= 1
@@ -428,19 +439,19 @@ class TarballLocator:
return '', ''
size = os.stat(newfile)[6]
- if not os.path.exists(newfile + '.md5sum'):
- sum = md5.new()
+ if not os.path.exists(hashfile):
+ sum = getattr(hashlib, self.hash_algo)()
fp = open(newfile, 'rb')
data = fp.read(32768)
while data:
sum.update(data)
data = fp.read(32768)
fp.close()
- md5sum = sum.hexdigest()
- file(newfile + '.md5sum', 'w').write(md5sum)
+ hash = sum.hexdigest()
+ file(hashfile, 'w').write(hash)
else:
- md5sum = file(newfile + '.md5sum').read()
- return md5sum, str(size)
+ hash = file(hashfile).read()
+ return '%s:%s' % (self.hash_algo, hash), str(size)
def _get_files_from_ftp(self, parsed_url, max_version):
ftp = FTP(parsed_url.hostname)
@@ -616,11 +627,11 @@ class TarballLocator:
# Only get tarball stats if we're not in a hurry
if self.get_stats:
- md5sum, size = self._get_tarball_stats(location, tarballs[index])
+ hash, size = self._get_tarball_stats(location, tarballs[index])
else:
- md5sum = 'blablablaihavenorealclue'
+ hash = 'md5:blablablaihavenorealclue'
size = 'HUGE'
- return location, version, md5sum, size
+ return location, version, hash, size
class ConvertToTarballs:
def __init__(self, tarballdir, version, sourcedir, options, force, versions_only, local_only):
@@ -675,13 +686,13 @@ class ConvertToTarballs:
baselocation = self.options.get_download_site(cvsroot, name)
max_version = self.options.get_version_limit(name)
real_name = self.options.get_real_name(name)
- location, version, md5sum, size = \
+ location, version, hash, size = \
self.locator.find_tarball(baselocation, real_name, max_version)
- print ' ', location, version, md5sum, size
+ print ' ', location, version, hash, size
tarball.setAttribute('version', version)
source_node.setAttribute('href', location)
source_node.setAttribute('size', size)
- source_node.setAttribute('md5sum', md5sum)
+ source_node.setAttribute('hash', hash)
self.all_tarballs.append(name)
self.all_versions.append(version)
except IOError:
@@ -693,7 +704,7 @@ class ConvertToTarballs:
tarball.setAttribute('version', 'EAT-YOUR-BRAAAAAANE')
source_node.setAttribute('href', 'http://somewhere.over.the.rainbow/where/bluebirds/die')
source_node.setAttribute('size', 'HUGE')
- source_node.setAttribute('md5sum', 'blablablaihavenorealclue')
+ source_node.setAttribute('hash', 'md5:blablablaihavenorealclue')
if revision and not max_version:
self.no_max_version.append(id)
return tarball
@@ -825,7 +836,7 @@ class ConvertToTarballs:
full_whitelist = []
for set in self.options.release_set:
full_whitelist.extend(set)
- unique = sets.Set(full_whitelist) - sets.Set(self.all_tarballs)
+ unique = set(full_whitelist) - set(self.all_tarballs)
for module in unique:
subdir = self.options.get_subdir(module)
if subdir is None:
@@ -835,9 +846,9 @@ class ConvertToTarballs:
baselocation = self.options.get_download_site('gnome.org', name)
max_version = self.options.get_version_limit(name)
real_name = self.options.get_real_name(name)
- location, version, md5sum, size = \
+ location, version, hash, size = \
self.locator.find_tarball(baselocation, real_name, max_version)
- print ' ', location, version, md5sum, size
+ print ' ', location, version, hash, size
self.all_tarballs.append(name)
self.all_versions.append(version)
except IOError:
@@ -858,7 +869,7 @@ class ConvertToTarballs:
full_whitelist = []
for set in self.options.release_set:
full_whitelist.extend(set)
- unique = sets.Set(full_whitelist) - sets.Set(self.all_tarballs)
+ unique = set(full_whitelist) - set(self.all_tarballs)
if not len(unique): return
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]