gget r130 - trunk/gget



Author: johans
Date: Sat Dec 20 11:20:07 2008
New Revision: 130
URL: http://svn.gnome.org/viewvc/gget?rev=130&view=rev

Log:
Updated metalink backend to latest from SVN.

Modified:
   trunk/gget/metalink.py

Modified: trunk/gget/metalink.py
==============================================================================
--- trunk/gget/metalink.py	(original)
+++ trunk/gget/metalink.py	Sat Dec 20 11:20:07 2008
@@ -67,6 +67,12 @@
 #
 # CHANGELOG:
 #
+# Version 4.2
+# -----------
+# - PGP bugfix
+# - Jigdo to Metalink convertor
+# - Other bugfixes
+#
 # Version 4.1
 # -----------
 # - Start of transition of how command line options are used
@@ -194,41 +200,36 @@
 except: pass
 try: import win32process
 except ImportError: pass
-import hashlib
-import xml.parsers.expat
-import time
-import zlib
-import optparse
-import os.path
-import os
+import copy
+import md5
 import sha
-import random
-import threading
+import bz2
+import gzip
+import httplib
 import binascii
-import md5
-import logging
+import urllib2
+import sys
 import gettext
+import BaseHTTPServer
 import socket
-import base64
-import re
-import sys
-import ftplib
-import bz2
+import locale
+import optparse
+import threading
+import zlib
 import os.path
+import ftplib
 import os
-import gettext
-import locale
-import sys
-import httplib
-import urllib2
-import copy
-import subprocess
+import xml.parsers.expat
 import math
+import logging
+import re
+import time
+import subprocess
 import StringIO
+import base64
 import urlparse
-import StringIO
-import gzip
-import locale
+import hashlib
+import random
 class Dummy:
     pass
 #!/usr/bin/env python
@@ -257,8 +258,8 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 #
 # Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/checker.py $
-# Last Updated: $Date: 2008-07-27 08:49:58 +0200 (son, 27 jul 2008) $
-# Version: $Rev: 203 $
+# Last Updated: $Date: 2008-10-21 05:06:03 +0200 (tis, 21 okt 2008) $
+# Version: $Rev: 270 $
 # Author(s): Neil McNab
 #
 # Description:
@@ -277,6 +278,12 @@
 
 
 
+NAME="Metalink Checker"
+VERSION="4.2"
+
+#WEBSITE="http://www.metalinker.org";
+WEBSITE="http://www.nabber.org/projects/checker/";
+
 MAX_REDIRECTS = 20
 MAX_THREADS = 10
 
@@ -302,123 +309,196 @@
 
 _ = translate()
 
-def check_metalink(src):
-    '''
-    Decode a metalink file, can be local or remote
-    First parameter, file to download, URL or file path to download from
-    Returns the results of the check in a dictonary
-    '''
-    src = download.complete_url(src)
-    datasource = urllib2.urlopen(src)
-    try:
-        metalink = xmlutils.Metalink()
-        metalink.parsehandle(datasource)
-    except:
-        print _("ERROR parsing XML.")
-        raise
-    datasource.close()
-    
-    if metalink.type == "dynamic":
-        origin = metalink.origin
-        if origin != src:
-            try:
-                return check_metalink(origin)
-            except:
-                print "Error downloading from origin %s, not using." % origin
-    
-    urllist = metalink.files
-    if len(urllist) == 0:
-        print _("No urls to download file from.")
-        return False
+ABOUT = NAME + "\n" + _("Version") + ": " + VERSION + "\n" + \
+                     _("Website") + ": " + WEBSITE + "\n\n" + \
+                     _("Copyright") + ": 2008 Neil McNab\n" + \
+                     _("License") + ": " + _("GNU General Public License, Version 2") + "\n\n" + \
+                     NAME + _(" comes with ABSOLUTELY NO WARRANTY.  This is free software, and you are welcome to redistribute it under certain conditions, see LICENSE.txt for details.")
 
-    results = {}
-    for filenode in urllist:
-        size = filenode.size
-        name = filenode.filename
-        print "=" * 79
-        print _("File") + ": %s " % name + _("Size") + ": %s" % size
-        results[name] = check_file_node(filenode)
-
-    return results
+class Checker:
+    def __init__(self):
+        self.threadlist = []
+        self.running = False
+        self.clear_results()
+        self.cancel = False
+        
+    def check_metalink(self, src):
+        '''
+        Decode a metalink file, can be local or remote
+        First parameter, file to download, URL or file path to download from
+        Returns the results of the check in a dictonary
+        '''
+        self.running = True
+        
+        src = download.complete_url(src)
+        datasource = urllib2.urlopen(src)
+        try:
+            metalink = xmlutils.Metalink()
+            metalink.parsehandle(datasource)
+        except:
+            print _("ERROR parsing XML.")
+            raise
+        datasource.close()
+        
+        if metalink.type == "dynamic":
+            origin = metalink.origin
+            if origin != src and origin != "":
+                try:
+                    result = self.check_metalink(origin)
+                    self.running = True
+                    return result
+                except:
+                    print "Error downloading from origin %s, not using." % origin
+        
+        urllist = metalink.files
+        if len(urllist) == 0:
+            print _("No urls to download file from.")
+            self.running = False
+            return False
 
-def check_process(headers, filesize):
-    size = "?"
-    
-    sizeheader = get_header(headers, "Content-Length")
+        #results = {}
+        for filenode in urllist:
+            size = filenode.size
+            name = filenode.filename
+            #print "=" * 79
+            #print _("File") + ": %s " % name + _("Size") + ": %s" % size
+            self.check_file_node(filenode)
 
-    if sizeheader != None and filesize != None:
-        if sizeheader == filesize:
-            size = _("OK")
-        else:
-            size = _("FAIL")
+        self.running = False
+        #return results
 
-    response_code = _("OK")
-    temp_code = get_header(headers, "Response")
-    if temp_code != None:
-        response_code = temp_code
-        
-    return (response_code, size)
+    def isAlive(self):
+        if self.running:
+            return True
+        for threadobj in self.threadlist:
+            if threadobj.isAlive():
+                return True
+        return False
 
-def get_header(textheaders, name):
-    textheaders = str(textheaders)
-    
-    headers = textheaders.split("\n")
-    headers.reverse()
-    for line in headers:
-        line = line.strip()
-        result = line.split(": ")
-        if result[0].lower() == name.lower():
-            return result[1]
+    def activeCount(self):
+        count = 0
+        for threadobj in self.threadlist:
+            if threadobj.isAlive():
+                count += 1
+        return count
 
-    return None
+    def _add_result(self, key1, key2, value):
+        try:
+            self.results[key1]
+        except KeyError:
+            self.results[key1] = {}
 
-def check_file_node(item):
-    '''
-    Downloads a specific version of a program
-    First parameter, file XML node
-    Second parameter, file path to save to
-    Third parameter, optional, force a new download even if a valid copy already exists
-    Fouth parameter, optional, progress handler callback
-    Returns dictionary of file paths with headers
-    '''
+        try:
+            self.new_results[key1]
+        except KeyError:
+            self.new_results[key1] = {}
+            
+        self.results[key1][key2] = value
+        self.new_results[key1][key2] = value
 
-    size = item.size
-    urllist = item.resources
-    if len(urllist) == 0:
-        print _("No urls to download file from.")
-        return False
+    def get_results(self, block=True):
+        while block and self.isAlive():
+            time.sleep(0.1)
+
+        return self.results
+
+    def get_new_results(self):
+        temp = self.new_results
+        self.new_results = {}
+        return temp
 
-    def thread(filename):
-        checker = URLCheck(filename)
-        headers = checker.info()
-        result[checker.geturl()] = check_process(headers, size)
-        redir = get_header(headers, "Redirected")
-        print "-" *79
-        print _("Checked") + ": %s" % filename
-        if redir != None:
-            print _("Redirected") + ": %s" % redir
-        print _("Response Code") + ": %s\t" % result[checker.geturl()][0] + _("Size Check") + ": %s" % result[checker.geturl()][1]
+    def stop(self):
+        self.cancel = True
+        while self.isAlive():
+            time.sleep(0.1)        
+
+    def clear_results(self):
+        self.stop()
+        self.threadlist = []
+        self.results = {}
+        self.new_results = {}
+        
+    def _check_process(self, headers, filesize):
+        size = "?"
+        
+        sizeheader = self._get_header(headers, "Content-Length")
+
+        if sizeheader != None and filesize != None:
+            if int(sizeheader) == int(filesize):
+                size = _("OK")
+            elif int(filesize) != 0:
+                size = _("FAIL")
+
+        response_code = _("OK")
+        temp_code = self._get_header(headers, "Response")
+        if temp_code != None:
+            response_code = temp_code
             
-    number = 0
-    filename = {}
+        return [response_code, size]
+
+    def _get_header(self, textheaders, name):
+        textheaders = str(textheaders)
         
-    count = 1
-    result = {}
-    while (count <= len(urllist)):
-        filename = urllist[number].url
-        #don't start too many threads at once
-        while threading.activeCount() > MAX_THREADS:
-            pass
-        mythread = threading.Thread(target = thread, args = [filename])
-        mythread.start()
-        #thread(filename)
-        number = (number + 1) % len(urllist)
-        count += 1
+        headers = textheaders.split("\n")
+        headers.reverse()
+        for line in headers:
+            line = line.strip()
+            result = line.split(": ")
+            if result[0].lower() == name.lower():
+                return result[1]
 
-    # don't return until all threads are finished (except the one main thread)
-    while threading.activeCount() > 1:
-        pass
-    return result
+        return None
+
+    def check_file_node(self, item):
+        '''
+        First parameter, file object
+        Returns dictionary of file paths with headers
+        '''
+        self.running = True
+        #self.results[item.name] = {}
+        size = item.size
+        urllist = item.resources
+        if len(urllist) == 0:
+            print _("No urls to download file from.")
+            self.running = False
+            return False
+
+        def thread(filename):
+            checker = URLCheck(filename)
+            headers = checker.info()
+            redir = self._get_header(headers, "Redirected")
+            result = self._check_process(headers, size)
+            result.append(redir)
+            #self.results[item.name][checker.geturl()] = result
+            self._add_result(item.name, filename, result)
+            #print "-" *79
+            #print _("Checked") + ": %s" % filename
+            #if redir != None:
+            #    print _("Redirected") + ": %s" % redir
+            #print _("Response Code") + ": %s\t" % self.results[item.name][filename][0] + _("Size Check") + ": %s" % self.results[item.name][filename][1]
+                
+        number = 0
+        filename = {}
+            
+        count = 1
+        result = {}
+        while (count <= len(urllist)):
+            filename = urllist[number].url
+            #don't start too many threads at once
+            while self.activeCount() > MAX_THREADS and not self.cancel:
+                time.sleep(0.1)
+            mythread = threading.Thread(target = thread, args = [filename], name = filename)
+            mythread.start()
+            self.threadlist.append(mythread)
+            #thread(filename)
+            number = (number + 1) % len(urllist)
+            count += 1
+
+        # don't return until all threads are finished (except the one main thread)
+        #while threading.activeCount() > 1:
+        #    pass
+        #return result
+        self.running = False
        
 class URLCheck:    
     def __init__(self, url):
@@ -449,10 +529,13 @@
             except socket.timeout:
                 self.infostring += _("Response") + ": " + _("Timeout") + "\r\n"
                 return
+            except socket.error, error:
+                self.infostring += _("Response") + ": " + _("Connection Error") + "\r\n"
+                return
             
             # handle redirects here and set self.url
             count = 0
-            while (resp.status == httplib.MOVED_PERMANENTLY or resp.status == httplib.FOUND) and count < MAX_REDIRECTS:
+            while (resp != None and (resp.status == httplib.MOVED_PERMANENTLY or resp.status == httplib.FOUND) and count < MAX_REDIRECTS):
                 url = resp.getheader("location")
                 #print _("Redirected from ") + self.url + " to %s." % url
                 self.infostring += _("Redirected") + ": %s\r\n" % url
@@ -464,19 +547,26 @@
                     port = urlparts.port
                 
                 conn = download.HTTPConnection(urlparts.hostname, urlparts.port)
-                conn.request("HEAD", url)
-                resp = conn.getresponse()
+                try:
+                    conn.request("HEAD", url)
+                    resp = conn.getresponse()
+                except socket.gaierror:
+                    resp = None
+                
                 count += 1
 
             self.url = url
-            if resp.status == httplib.OK:
+            if resp == None:
+                self.infostring += _("Response") + ": socket error\r\n"
+            elif resp.status == httplib.OK:
                 self.infostring += _("Response") + ": " + _("OK") + "\r\n"
             else:
                 self.infostring += _("Response") + ": %s %s\r\n" % (resp.status, resp.reason)
             
             # need to convert list into string
-            for header in resp.getheaders():
-                self.infostring += header[0] + ": " + header[1] + "\r\n"
+            if resp != None:
+                for header in resp.getheaders():
+                    self.infostring += header[0] + ": " + header[1] + "\r\n"
 
             conn.close()
                 
@@ -554,10 +644,13 @@
             except socket.error:
                 self.infostring += _("Response") + ": " + _("Connection refused") + "\r\n"
                 return
-
+            except (ftplib.error_perm, ftplib.error_temp), error:
+                self.infostring += _("Response") + ": %s\r\n" % error.message
+                return
+            
             try:
                 ftpobj.login(username, password)
-            except (ftplib.error_perm), error:
+            except (ftplib.error_perm, ftplib.error_temp), error:
                 self.infostring += _("Response") + ": %s\r\n" % error.message
                 
             if ftpobj.exist(url):
@@ -587,14 +680,15 @@
         # need response and content-length for HTTP
         return self.infostring
 checker = Dummy()
+checker.ABOUT = ABOUT
+checker.Checker = Checker
 checker.MAX_REDIRECTS = MAX_REDIRECTS
 checker.MAX_THREADS = MAX_THREADS
+checker.NAME = NAME
 checker.URLCheck = URLCheck
+checker.VERSION = VERSION
+checker.WEBSITE = WEBSITE
 checker._ = _
-checker.check_file_node = check_file_node
-checker.check_metalink = check_metalink
-checker.check_process = check_process
-checker.get_header = get_header
 checker.translate = translate
 #!/usr/bin/env python
 ########################################################################
@@ -622,7 +716,7 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 #
 # Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/download.py $
-# Last Updated: $Date: 2008-08-07 02:05:01 +0200 (tor, 07 aug 2008) $
+# Last Updated: $Date: 2008-11-02 03:05:35 +0100 (son, 02 nov 2008) $
 # Author(s): Neil McNab
 #
 # Description:
@@ -651,10 +745,11 @@
 ########################################################################
 
 #import utils
+#import thread
 #import logging
 
 
-USER_AGENT = "Metalink Checker/4.1 +http://www.nabber.org/projects/";
+USER_AGENT = "Metalink Checker/4.2 +http://www.nabber.org/projects/";
 
 SEGMENTED = True
 LIMIT_PER_HOST = 1
@@ -686,6 +781,10 @@
 FTP_PROXY=""
 HTTPS_PROXY=""
 
+# Streaming server setings to use
+HOST = "localhost"
+PORT = None
+
 # Protocols to use for segmented downloads
 PROTOCOLS=("http","https","ftp")
 #PROTOCOLS=("ftp")
@@ -702,17 +801,27 @@
     blanklines = 1
     
     if value == None:
-        tempresult = os.popen2("reg query \"%s\"" % keyname)
+        tempresult = os.popen2("reg.exe query \"%s\"" % keyname)
     else:
-        tempresult = os.popen2("reg query \"%s\" /v \"%s\"" % (keyname, value))
+        tempresult = os.popen2("reg.exe query \"%s\" /v \"%s\"" % (keyname, value))
     stdout = tempresult[1]
     stdout = stdout.readlines()
 
+    # handle case when reg.exe isn't in path
+    if len(stdout) == 0:
+        if value == None:
+            tempresult = os.popen2(os.environ["WINDIR"] + "\\system32\\reg.exe query \"%s\"" % keyname)
+        else:
+            tempresult = os.popen2(os.environ["WINDIR"] + "\\system32\\reg.exe query \"%s\" /v \"%s\"" % (keyname, value))
+        stdout = tempresult[1]
+        stdout = stdout.readlines()
+
     # For Windows XP, this was changed in Vista!
-    if stdout[1].startswith("! REG.EXE"):
+    if len(stdout) > 0 and stdout[1].startswith("! REG.EXE"):
         blanklines += 2
         if value == None:
             blanklines += 2
+
     stdout = stdout[blanklines:]
     
     return stdout
@@ -1108,7 +1217,7 @@
         self.resume = FileResume(filename + ".temp")
         self.resume.add_block(0)
     
-        self.data = open(filename, 'wb')
+        self.data = ThreadSafeFile(filename, 'wb+')
         
         try:
             self.temp = urlopen(remote_file)
@@ -1122,11 +1231,21 @@
             self.size = int(headers['Content-Length'])
         except KeyError:
             self.size = 0
-        
+
+        self.streamserver = None
+        if PORT != None:
+            self.streamserver = StreamServer((HOST, PORT), StreamRequest)
+            self.streamserver.set_stream(self.data)
+        
+            #thread.start_new_thread(self.streamserver.serve, ())
+            mythread = threading.Thread(target=self.streamserver.serve)
+            mythread.start()
+ 
     def close_handler(self):
         self.resume.complete()
         try:
-            self.data.close()
+            if PORT == None:
+                self.data.close()
             self.temp.close()
         except: pass
         
@@ -1141,11 +1260,16 @@
             return False
         
         block = self.temp.read(self.block_size)
+        self.data.acquire()
         self.data.write(block)
+        self.data.release()
         self.counter += 1
         self.total += len(block)
 
         self.resume.set_block_size(self.counter * self.block_size)
+
+        if self.streamserver != None:        
+            self.streamserver.set_length(self.counter * self.block_size)
                         
         if self.status_handler != None:
             self.status_handler(self.total, 1, self.size)
@@ -1791,6 +1915,15 @@
             
         self.resume = FileResume(self.localfile + ".temp")
 
+        self.streamserver = None
+        if PORT != None:
+            self.streamserver = StreamServer((HOST, PORT), StreamRequest)
+            self.streamserver.set_stream(self.f)
+        
+            #thread.start_new_thread(self.streamserver.serve, ())
+            mythread = threading.Thread(target=self.streamserver.serve)
+            mythread.start()
+
     def get_chunksum(self, index):
         mylist = {}
         try:
@@ -1889,6 +2022,11 @@
         '''
         try:
             bytes = self.byte_total()
+
+            index = self.get_chunk_index()
+            if index != None and index > 0 and self.streamserver != None:
+                self.streamserver.set_length((index - 1) * self.chunk_size)
+            
             if self.oldtime == None:
                 self.start_bitrate(bytes)
                 
@@ -1900,6 +2038,7 @@
             
             self.update()
             self.resume.extend_blocks(self.chunk_list())
+
             if bytes >= self.size and self.active_count() == 0:
                 self.resume.complete()
                 self.close_handler()
@@ -1967,7 +2106,7 @@
     def get_chunk_index(self):
         i = -1
         for i in range(len(self.chunks)):
-            if (self.chunks[i].error != None):
+            if (self.chunks[i] == None or self.chunks[i].error != None):
                 return i
             # weed out dead segments that have temp errors and reassign
             if (not self.chunks[i].isAlive() and self.chunks[i].bytes == 0):
@@ -2047,7 +2186,7 @@
 
     def remove_errors(self):
         for item in self.chunks:
-            if item.error != None:
+            if item != None and item.error != None:
                 #print item.error
                 if item.error == httplib.MOVED_PERMANENTLY or item.error == httplib.FOUND:
                     #print "location:", item.location
@@ -2092,7 +2231,8 @@
         return chunks
     
     def close_handler(self):
-        self.f.close()
+        if PORT == None:
+            self.f.close()
         for host in self.sockets:
             host.close()
 
@@ -2526,9 +2666,10 @@
             self.error = _("socket error")
             self.response = None
             return
-        except Exception, e:
+        except TypeError:
             self.response = None
             return
+
         if len(data) == 0:
             return
 
@@ -2724,6 +2865,82 @@
 
     def close(self):
         return self.conn.close()
+
+class StreamRequest(BaseHTTPServer.BaseHTTPRequestHandler):
+    def do_GET(self):
+        self.send_response(200)
+        self.send_header("Content-Type", "application/octet-stream")
+        self.send_header("Cache-Control", "no-cache")
+        self.end_headers()
+
+        start = 0
+        while True:
+            if self.server.fileobj != None and (self.server.length - start) > 0:
+                try:
+                    self.server.fileobj.acquire()
+                    loc = self.server.fileobj.tell()
+                    self.server.fileobj.seek(start, 0)
+                    size = self.server.length - start
+                    
+                    data = self.server.fileobj.read(size)
+                    if len(data) > 0:
+                        self.wfile.write(data)
+
+                    self.server.fileobj.seek(loc, 0)
+                    self.server.fileobj.release()
+                    start += len(data)
+                except ValueError:
+                    break
+            time.sleep(.1)
+
+class StreamServer(BaseHTTPServer.HTTPServer):
+    def __init__(self, *args):
+        BaseHTTPServer.HTTPServer.__init__(self, *args)
+        self.fileobj = None
+        self.length = 0
+
+    # based on: http://code.activestate.com/recipes/425210/
+    def server_bind(self):
+        BaseHTTPServer.HTTPServer.server_bind(self)
+        self.socket.setblocking(0)
+        self.socket.settimeout(1)
+        self.run = True
+
+    def get_request(self):
+        while self.run:
+            try:
+                sock, addr = self.socket.accept()
+                sock.setblocking(0)
+                sock.settimeout(30)
+                return (sock, addr)
+            except socket.timeout:
+                pass
+
+    def stop(self):
+        self.run = False
+
+    def serve(self):
+        try:
+            while self.run:
+                self.handle_request()
+        except KeyboardInterrupt:
+            print "Server Interrupted!"
+            self.fileobj.close()
+            self.stop()
+        
+    def set_stream(self, fileobj):
+        self.fileobj = fileobj
+
+    def set_length(self, length):
+        self.length = int(length)
+
+##myserver = StreamServer(("localhost", 8080), StreamRequest)
+##myserver.set_stream(ThreadSafeFile("C:\\library\\avril\\Avril Lavigne - Complicated.mpg", "rb"))
+##myserver.set_length(50000000)
+##serverthread = threading.Thread(target=myserver.serve_forever)
+##serverthread.start()
+
+#myserver.serve_forever()
 download = Dummy()
 download.CONNECT_RETRY_COUNT = CONNECT_RETRY_COUNT
 download.COUNTRY = COUNTRY
@@ -2733,6 +2950,7 @@
 download.FileResume = FileResume
 download.Ftp_Host = Ftp_Host
 download.Ftp_Host_Segment = Ftp_Host_Segment
+download.HOST = HOST
 download.HOST_LIMIT = HOST_LIMIT
 download.HTTPConnection = HTTPConnection
 download.HTTPSConnection = HTTPSConnection
@@ -2753,9 +2971,12 @@
 download.PGP_KEY_DIR = PGP_KEY_DIR
 download.PGP_KEY_EXTS = PGP_KEY_EXTS
 download.PGP_KEY_STORE = PGP_KEY_STORE
+download.PORT = PORT
 download.PROTOCOLS = PROTOCOLS
 download.SEGMENTED = SEGMENTED
 download.Segment_Manager = Segment_Manager
+download.StreamRequest = StreamRequest
+download.StreamServer = StreamServer
 download.ThreadSafeFile = ThreadSafeFile
 download.URLManager = URLManager
 download.USER_AGENT = USER_AGENT
@@ -3242,7 +3463,7 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 #
 # Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/xmlutils.py $
-# Last Updated: $Date: 2008-08-07 02:05:01 +0200 (tor, 07 aug 2008) $
+# Last Updated: $Date: 2008-10-21 05:06:03 +0200 (tis, 21 okt 2008) $
 # Author(s): Hampus Wessman, Neil McNab
 #
 # Description:
@@ -3684,18 +3905,18 @@
         try:
             if name == "url" and self.parent[-1].name == "resources":
                 fileobj = self.files[-1]
-                fileobj.add_url(self.data, attrs=tag.attrs)
+                fileobj.add_url(self.data.strip(), attrs=tag.attrs)
             elif name == "tags" and self.parent[-1].name != "file":
-                setattr(self, "tags", self.data)
+                setattr(self, "tags", self.data.strip())
             elif name in ("name", "url"):
-                setattr(self, self.parent[-1].name + "_" + name, self.data)
+                setattr(self, self.parent[-1].name + "_" + name, self.data.strip())
             elif name in ("identity", "copyright", "description", "version", "upgrade"):
-                setattr(self, name, self.data)
+                setattr(self, name, self.data.strip())
             elif name == "hash" and self.parent[-1].name == "verification":
                 hashtype = tag.attrs["type"]
                 fileobj = self.files[-1]
                 #setattr(fileobj, "hash_" + hashtype, self.data)
-                fileobj.hashlist[hashtype] = self.data
+                fileobj.hashlist[hashtype] = self.data.strip()
             elif name == "signature" and self.parent[-1].name == "verification":
                 hashtype = tag.attrs["type"]
                 fileobj = self.files[-1]
@@ -3707,18 +3928,18 @@
                 fileobj.piecelength = tag.attrs["length"]
             elif name == "hash" and self.parent[-1].name == "pieces":
                 fileobj = self.files[-1]
-                fileobj.pieces.append(self.data)
+                fileobj.pieces.append(self.data.strip())
             elif name in ("os", "language", "tags"):
                 fileobj = self.files[-1]
-                setattr(fileobj, name, self.data)
+                setattr(fileobj, name, self.data.strip())
             elif name in ("size"):
                 fileobj = self.files[-1]
-                if self.data != "":
-                    setattr(fileobj, name, int(self.data))
+                if self.data.strip() != "":
+                    setattr(fileobj, name, int(self.data.strip()))
         except IndexError: pass
             
     def char_data(self, data):
-        self.data += data.strip()
+        self.data += data #.strip()
 
     def parsefile(self, filename):
         handle = open(filename, "rb")
@@ -3908,7 +4129,7 @@
         while data.strip() != "":
             data = handle.readline()
 
-        data = handle.read(1024)
+        data = handle.read(1024*1024)
         text = ""
 
         #decompress = bz2.BZ2Decompressor()
@@ -3933,7 +4154,7 @@
             if bzip or gzip:
                 #newdata = decompress.decompress(data)
                 text += data
-                data = handle.read(1024)
+                data = handle.read(1024*1024)
             else:
                 data = handle.readline()
         handle.close()
@@ -3955,7 +4176,8 @@
             hexhash = fileobj.get_checksums()["md5"]
             loc = text.find(binascii.unhexlify(hexhash))
             if loc != -1:
-                #print "FOUND:", fileobj.filename
+                if fileobj.filename.find("dists") != -1:
+                    print "FOUND:", fileobj.filename
                 found[loc] = fileobj.filename
 
         decompressor = None
@@ -3974,6 +4196,8 @@
             #print "Adding %s to image..." % found[loc]
             #sys.stdout.write(".")
             lead = decompressor.decompress(text[start:loc])
+            if found[loc].find("dists") != -1:
+                print "Writing:", found[loc]
             filedata = open(found[loc], "rb").read()
             handle.write(lead + filedata)
             start = loc + 16
@@ -4043,8 +4267,8 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 #
 # Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/console.py $
-# Last Updated: $Date: 2008-07-30 18:52:08 +0200 (ons, 30 jul 2008) $
-# Version: $Rev: 204 $
+# Last Updated: $Date: 2008-11-25 06:50:27 +0100 (tis, 25 nov 2008) $
+# Version: $Rev: 281 $
 # Author(s): Neil McNab
 #
 # Description:
@@ -4059,9 +4283,6 @@
 
 
 
-# DO NOT CHANGE
-VERSION="Metalink Checker Version 4.1"
-
 
 def translate():
     '''
@@ -4091,7 +4312,7 @@
     '''
     # Command line parser options.
     usage = "usage: %prog [-c|-d|-j] [options] arg1 arg2 ..."
-    parser = optparse.OptionParser(version=VERSION, usage=usage)
+    parser = optparse.OptionParser(version=checker.ABOUT, usage=usage)
     parser.add_option("--download", "-d", action="store_true", dest="download", help=_("Actually download the file(s) in the metalink"))
     parser.add_option("--check", "-c", action="store_true", dest="check", help=_("Check the metalink file URLs"))
     parser.add_option("--file", "-f", dest="filevar", metavar="FILE", help=_("Metalink file to check or file to download"))
@@ -4104,6 +4325,7 @@
     parser.add_option("--pgp-store", "-p", dest="pgpstore", metavar="FILE", help=_("File with the PGP keys that you trust (default: ~/.gnupg/pubring.gpg)"))
     parser.add_option("--gpg-binary", "-g", dest="gpg", help=_("(optional) Location of gpg binary path if not in the default search path"))
     parser.add_option("--convert-jigdo", "-j", action="store_true", dest="jigdo", help=_("Convert Jigdo format file to Metalink"))
+    parser.add_option("--port", dest="port", help=_("Streaming server port to use (default: No streaming server)"))
     (options, args) = parser.parse_args()
 
     if options.filevar == None and len(args) == 0:
@@ -4122,6 +4344,8 @@
         download.PGP_KEY_DIR = options.pgpdir
     if options.pgpstore != None:
         download.PGP_KEY_STORE = options.pgpstore
+    if options.port != None:
+        download.PORT = int(options.port)
     if options.gpg != None:
         GPG.DEFAULT_PATH.insert(0, options.gpg)
         
@@ -4138,11 +4362,14 @@
 
     if options.check:
         # remove filevar eventually
-        results = checker.check_metalink(options.filevar)
+        mcheck = checker.Checker()
+        mcheck.check_metalink(options.filevar)
+        results = mcheck.get_results()
         print_totals(results)
         for item in args:
             results = checker.check_metalink(item)
             print_totals(results)
+        return
             
     if options.download:
         # remove filevar eventually
@@ -4163,16 +4390,20 @@
     # remove eventually
     elif not options.check:
         if options.filevar != None:
-            results = checker.check_metalink(options.filevar)
+            mcheck = checker.Checker()
+            mcheck.check_metalink(options.filevar)
+            results = mcheck.get_results()
             print_totals(results)
         for item in args:
-            results = checker.check_metalink(item)
-            print_totals(results)            
-
+            mcheck = checker.Checker()
+            mcheck.check_metalink(item)
+            results = mcheck.get_results()
+            print_totals(results)
+    
 def print_totals(results):
     for key in results.keys():
         print "=" * 79
-        print _("Summary for") + ":", key
+        print _("Summary for file") + ":", key
 
         status_count = 0
         size_count = 0
@@ -4189,6 +4420,14 @@
             if size == "FAIL":
                 size_bool = True
 
+            redir = results[key][subkey][2]
+
+            print "-" * 79
+            print _("Checked") + ": %s" % subkey
+            if redir != None:
+                print _("Redirected") + ": %s" % redir
+            print _("Response Code") + ": %s\t" % status + _("Size Check") + ": %s" % size
+                
             if size_bool:
                 size_count += 1
             if status_bool:
@@ -4329,7 +4568,6 @@
     run()
 console = Dummy()
 console.ProgressBar = ProgressBar
-console.VERSION = VERSION
 console._ = _
 console.print_totals = print_totals
 console.run = run



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]