gget r33 - trunk/gget
- From: johans svn gnome org
- To: svn-commits-list gnome org
- Subject: gget r33 - trunk/gget
- Date: Sat, 19 Jul 2008 14:19:34 +0000 (UTC)
Author: johans
Date: Sat Jul 19 14:19:33 2008
New Revision: 33
URL: http://svn.gnome.org/viewvc/gget?rev=33&view=rev
Log:
Code dump tuching alot of things. Added details dialog for downloads.
Added:
trunk/gget/DetailsDialog.py
Modified:
trunk/gget/Download.py
trunk/gget/DownloadList.py
trunk/gget/DownloadManager.py
trunk/gget/GUI.py
trunk/gget/MainWindow.py
trunk/gget/metalink.py
Added: trunk/gget/DetailsDialog.py
==============================================================================
--- (empty file)
+++ trunk/gget/DetailsDialog.py Sat Jul 19 14:19:33 2008
@@ -0,0 +1,95 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2008 Johan Svedberg <johan svedberg com>
+
+# This file is part of gget.
+
+# gget is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# gget is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with gget; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
+
+from gettext import gettext as _
+
+import gtk
+
+import GUI
+import Utils
+from gget import NAME
+
+class DetailsDialog:
+ def __init__(self, download):
+ self.download = download
+ download.connect("update", self.__download_update)
+
+ self.__get_widgets()
+ self.__connect_widgets()
+
+ if download.pixbuf:
+ self.image.set_from_pixbuf(download.pixbuf)
+ else:
+ self.image.set_from_icon_name(NAME.lower(), gtk.ICON_SIZE_DIALOG)
+
+ self.uri_label.set_text(download.uri)
+ self.name_label.set_text(download.file_name)
+ self.folder_label.set_text(download.path)
+ self.current_size_label.set_text("%s (%s bytes)" % \
+ (Utils.get_readable_size(download.current_size),
+ download.current_size))
+ self.total_size_label.set_text("%s (%s bytes)" % \
+ (Utils.get_readable_size(download.total_size),
+ download.total_size))
+ self.mime_type_label.set_text(download.mime_type)
+ self.date_started_label.set_text(str(download.get_date_str("started")))
+ self.date_completed_label.set_text(str(download.get_date_str("completed")))
+
+ self.dialog.show()
+
+ def __get_widgets(self):
+ xml = gtk.glade.XML(GUI.glade_file, domain=NAME.lower())
+
+ self.dialog = xml.get_widget("details_dialog")
+
+ self.image = xml.get_widget("details_image")
+
+ self.uri_label = xml.get_widget("uri_label")
+ self.name_label = xml.get_widget("name_label")
+ self.folder_label = xml.get_widget("folder_label")
+ self.current_size_label = xml.get_widget("current_size_label")
+ self.total_size_label = xml.get_widget("total_size_label")
+ self.mime_type_label = xml.get_widget("mime_type_label")
+ self.date_started_label = xml.get_widget("date_started_label")
+ self.date_completed_label = xml.get_widget("date_completed_label")
+
+ self.close_button = xml.get_widget("details_close_button")
+
+ def __connect_widgets(self):
+ self.dialog.connect("delete-event", self.__dialog_delete)
+ self.dialog.connect("response", self.__dialog_response)
+
+ self.close_button.connect("clicked", self.__close_button_clicked)
+
+ def __dialog_response(self, dialog, response):
+ self.dialog.destroy()
+
+ def __dialog_delete(self, dialog, event):
+ return True
+
+ def __download_update(self, download, block_count, block_size, total_size):
+ self.current_size_label.set_text("%s (%s bytes)" %
+ (Utils.get_readable_size(download.current_size),
+ download.current_size))
+
+ def __close_button_clicked(self, button):
+ self.dialog.destroy()
+
+# vim: set sw=4 et sts=4 tw=79 fo+=l:
Modified: trunk/gget/Download.py
==============================================================================
--- trunk/gget/Download.py (original)
+++ trunk/gget/Download.py Sat Jul 19 14:19:33 2008
@@ -19,6 +19,8 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import os.path
+import sys
+import datetime
from gettext import gettext as _
import gtk
@@ -34,7 +36,7 @@
CONNECTING = 0
DOWNLOADING = 1
-STOPPED = 2
+CANCELED = 2
PAUSED = 3
COMPLETED = 4
ERROR = 5
@@ -42,10 +44,10 @@
class Download(gobject.GObject):
__gsignals__ = {"update": (gobject.SIGNAL_RUN_LAST, None, (int, int,
int)),
- "speed-changed": (gobject.SIGNAL_RUN_LAST, None, (int,)),
+ "bitrate": (gobject.SIGNAL_RUN_LAST, None, (float,)),
"status-changed": (gobject.SIGNAL_RUN_LAST, None, (int,))}
- def __init__(self, uri, path):
+ def __init__(self, uri, path, date_started="", date_completed=""):
gobject.GObject.__init__(self)
self.config = Configuration()
@@ -67,28 +69,75 @@
self.file = os.path.join(path, self.file_name)
- self.total_size = 0
- self.old_total_size = 0
+ self.canceled = False
+ self.paused = False
+
self.block_count = 0
self.block_size = 0
+ self.current_size = self.__get_current_size()
+ self.total_size = 0
+ self.old_total_size = 0
self.percent_complete = 0
+ self.bit_rate = 0.0
+
self.status = -1
self.mime_type = gnomevfs.get_file_mime_type(self.file_name)
self.pixbuf = GUI.load_icon_from_mime_type(self.mime_type, 32)
+ if date_started == "":
+ self.date_started = datetime.datetime.now()
+ else:
+ self.date_started = datetime.datetime.strptime(date_started,
+ "%Y-%m-%d %H:%M:%S")
+
+ if date_completed == "":
+ self.date_completed = None
+ else:
+ self.date_completed = datetime.datetime.strptime(date_completed,
+ "%Y-%m-%d %H:%M:%S")
+
self.connect("status-changed", self.__status_changed)
def __str__(self):
return self.uri
+ def __get_current_size(self):
+ try:
+ file_info = gnomevfs.get_file_info(self.file)
+ return file_info.size
+ except:
+ return 0
+
+ def get_date_str(self, date):
+ if date == "started":
+ return self.date_started.strftime("%Y-%m-%d %H:%M:%S")
+ else:
+ if self.date_completed:
+ return self.date_completed.strftime("%Y-%m-%d %H:%M:%S")
+ else:
+ return ""
+
+ def cancel(self):
+ """Callback which returns True to cancel the download, False
+ otherwise."""
+ return self.canceled
+
+ def pause(self):
+ """Callback which returns True to pause the download and False to
+ continue/resume."""
+ return self.paused
+
def update(self, block_count, block_size, total_size):
+ """Callback with count of blocks transferred so far, block size in
+ bytes and the total size of the file in bytes."""
Utils.debug_print("Download.update called with block_count: %s \
block_size: %s total_size: %s" % (block_count, block_size,
total_size))
self.block_count = block_count
self.block_size = block_size
+ self.current_size = block_count * block_size
self.old_total_size = self.total_size
self.total_size = total_size
@@ -99,36 +148,51 @@
except ZeroDivisionError:
self.percent_complete = 0
- if self.status != DOWNLOADING and self.percent_complete > 0:
- self.set_status(DOWNLOADING)
-
if self.percent_complete > 100:
self.percent_complete = 100
+ if self.status != DOWNLOADING and self.bitrate != 0.0:
+ self.set_status(DOWNLOADING)
+
if self.percent_complete == 100:
self.set_status(COMPLETED)
Utils.debug_print("Percent complete: %s" % self.percent_complete)
- gtk.gdk.threads_enter()
self.emit("update", int(block_count), int(block_size), int(total_size))
- gtk.gdk.threads_leave()
+
+ def bitrate(self, bit_rate):
+ """Callback with the download bitrate in kilobytes per second."""
+ self.bit_rate = bit_rate
+ self.emit("bitrate", bit_rate)
+
+ def set_canceled(self, canceled):
+ self.canceled = canceled
+ if canceled:
+ self.set_status(CANCELED)
+ else:
+ self.set_status(DOWNLOADING)
+
+ def set_paused(self, paused):
+ self.paused = paused
+ if paused:
+ self.set_status(PAUSED)
+ else:
+ self.set_status(DOWNLOADING)
def set_status(self, status):
self.status = status
Utils.debug_print("Download status for %s changed to: %s (%s)" % (self,
self.get_status_string(), status))
- gtk.gdk.threads_enter()
self.emit("status-changed", status)
- gtk.gdk.threads_leave()
def get_status_string(self, status=None):
if self.status == CONNECTING:
return _("Connecting")
elif self.status == DOWNLOADING:
return _("Downloading")
- elif self.status == STOPPED:
- return _("Stopped")
+ elif self.status == CANCELED:
+ return _("Canceled")
elif self.status == PAUSED:
return _("Paused")
elif self.status == COMPLETED:
@@ -140,6 +204,8 @@
def __status_changed(self, download, status):
if status == COMPLETED:
+ self.date_completed = datetime.datetime.now()
+
if self.config.show_notifications:
Notification(download)
Modified: trunk/gget/DownloadList.py
==============================================================================
--- trunk/gget/DownloadList.py (original)
+++ trunk/gget/DownloadList.py Sat Jul 19 14:19:33 2008
@@ -67,8 +67,11 @@
file_name = download_element.findtext("filename")
total_size = download_element.findtext("size")
status = download_element.findtext("status")
+ date_started = download_element.findtext("date_started")
+ date_completed = download_element.findtext("date_completed")
- download = Download.Download(uri, path)
+ download = Download.Download(uri, path, date_started,
+ date_completed)
download.file_name = file_name
download.total_size = int(total_size)
download.status = int(status)
@@ -117,6 +120,12 @@
size_element.text = download.total_size
status_element = ET.SubElement(download_element, "status")
status_element.text = str(download.status)
+ date_started_element = ET.SubElement(download_element, "date_started")
+ date_started_element.text = download.get_date_str("started")
+ date_completed_element = ET.SubElement(download_element,
+ "date_completed")
+ date_completed_element.text = download.get_date_str("completed")
+
self.__save_xml()
def __download_update(self, download, block_count, block_size,
@@ -168,6 +177,8 @@
def __save_xml(self):
"""Adds a header and indents the xml tree before saving it to disk."""
+ Utils.debug_print("Saved download list to: %s" %
+ self.download_file_path)
file = open(self.download_file_path, "w")
file.write(XML_HEADER)
Utils.indent(self.tree.getroot())
Modified: trunk/gget/DownloadManager.py
==============================================================================
--- trunk/gget/DownloadManager.py (original)
+++ trunk/gget/DownloadManager.py Sat Jul 19 14:19:33 2008
@@ -73,26 +73,36 @@
metalink.HTTP_PROXY = "http://%s:%s" % (self.config.proxy_host, self.config.proxy_port)
def download_added(self, download_list, download):
+ """Called when a new download is added to DownloadList. Starts the
+ download if its not already completed."""
if not download.status == Download.COMPLETED:
self.start_download(download)
def start_download(self, download):
+ """Starts a download in a new thread."""
Utils.debug_print("Starting download %s" % download)
- result = thread.start_new_thread(self.__start_download_in_thread,
- (download,))
+ thread.start_new_thread(self.__start_download, (download,))
+ # self.__start_download(download)
self.emit("download-started", (download))
- # self.__start_download_in_thread(download)
- if not result:
- download.set_status(Download.ERROR)
- print "Failed downloading of file %s" % download.uri
- def __start_download_in_thread(self, download):
+ def __start_download(self, download):
# Python 2.5 seems to have a bug: sys.excepthook is not call from code
# in a thread, see http://spyced.blogspot.com/2007/06/workaround-for-sysexcepthook-bug.html
# sys.excepthook(*sys.exc_info())
download.set_status(Download.CONNECTING)
- metalink.get(download.uri, download.path, handler=download.update)
+ try:
+ result = metalink.get(download.uri, download.path,
+ handlers={"status": download.update,
+ "bitrate": download.bitrate,
+ "cancel": download.cancel,
+ "pause": download.pause})
+ except Exception, e:
+ pass
+
+ if not result:
+ download.set_status(Download.ERROR)
+ print "Failed downloading of file %s" % download.uri
def set_proxy(self, protocol, proxy):
"""Sets the proxy to use for the specified protocol."""
Modified: trunk/gget/GUI.py
==============================================================================
--- trunk/gget/GUI.py (original)
+++ trunk/gget/GUI.py Sat Jul 19 14:19:33 2008
@@ -95,6 +95,16 @@
return value
return None
+def get_selected_values(treeview):
+ values = []
+ selection = treeview.get_selection()
+ (model, paths) = selection.get_selected_rows()
+ for path in paths:
+ iter = model.get_iter(path)
+ if iter:
+ values.append(model.get_value(iter, 0))
+ return values
+
def open_file_on_screen(file, screen):
uri = gnomevfs.make_uri_from_input_with_dirs(file, 2)
gnome.ui.url_show_on_screen(uri, screen)
Modified: trunk/gget/MainWindow.py
==============================================================================
--- trunk/gget/MainWindow.py (original)
+++ trunk/gget/MainWindow.py Sat Jul 19 14:19:33 2008
@@ -35,6 +35,7 @@
from AboutDialog import AboutDialog
from AddDownloadDialog import AddDownloadDialog
from PreferencesDialog import PreferencesDialog
+from DetailsDialog import DetailsDialog
from gget import NAME
# D&D targets
@@ -101,6 +102,8 @@
self.quit_menu_item = xml.get_widget("quit_menu_item")
# Edit menu
+ self.select_all_menu_item = xml.get_widget("select_all_menu_item")
+ self.unselect_all_menu_item = xml.get_widget("unselect_all_menu_item")
self.preferences_menu_item = xml.get_widget("preferences_menu_item")
# Show menu
@@ -124,6 +127,7 @@
self.cancel_tool_button = xml.get_widget("cancel_tool_button")
self.remove_tool_button = xml.get_widget("remove_tool_button")
self.clear_tool_button = xml.get_widget("clear_tool_button")
+ self.details_tool_button = xml.get_widget("details_tool_button")
self.downloads_treeview = xml.get_widget("downloads_treeview")
@@ -133,18 +137,20 @@
"""Constructs the treeview containing downloads."""
self.downloads_model = gtk.ListStore(object)
self.downloads_treeview.set_model(self.downloads_model)
+ self.downloads_treeview_selection = self.downloads_treeview.get_selection()
+ self.downloads_treeview_selection.set_mode(gtk.SELECTION_MULTIPLE)
cell_renderer_pixbuf = gtk.CellRendererPixbuf()
cell_renderer_pixbuf.props.xpad = 3
- cell_renderer_pixbuf.props.ypad = 3
+ # cell_renderer_pixbuf.props.ypad = 3
cell_renderer_text = gtk.CellRendererText()
cell_renderer_text.props.xpad = 3
- cell_renderer_text.props.ypad = 3
+ # cell_renderer_text.props.ypad = 3
cell_renderer_progress = gtk.CellRendererProgress()
- cell_renderer_progress.props.xpad = 3
- cell_renderer_progress.props.ypad = 3
+ # cell_renderer_progress.props.xpad = 3
+ cell_renderer_progress.props.ypad = 8
# Name column
self.name_treeview_column = gtk.TreeViewColumn(_("Name"))
@@ -231,6 +237,16 @@
self.open_folder_imi.show()
self.downloads_treeview_menu.append(self.open_folder_imi)
+ separator_imi2 = gtk.SeparatorMenuItem()
+ separator_imi2.show()
+ self.downloads_treeview_menu.append(separator_imi2)
+
+ self.details_imi = gtk.ImageMenuItem(_("Details"))
+ self.details_imi.get_image().set_from_stock(gtk.STOCK_INFO,
+ gtk.ICON_SIZE_MENU)
+ self.details_imi.show()
+ self.downloads_treeview_menu.append(self.details_imi)
+
def __image_cell_data_func(self, column, cell, model, iter):
"""Data function for the image of the download."""
download = model.get_value(iter, 0)
@@ -249,7 +265,7 @@
def __size_cell_data_func(self, column, cell, model, iter):
"""Data function for the file size of downloads."""
download = model.get_value(iter, 0)
- cell.props.text = Utils.get_readable_size(download.block_count * download.block_size)
+ cell.props.text = Utils.get_readable_size(download.current_size)
def __total_size_cell_data_func(self, column, cell, model, iter):
"""Data function for the file size of downloads."""
@@ -264,7 +280,7 @@
def __speed_cell_data_func(self, column, cell, model, iter):
"""Data function for the speed of downloads."""
download = model.get_value(iter, 0)
- cell.props.text = "N/A"
+ cell.props.text = "%.0fkb/s" % download.bit_rate
def __eta_cell_data_func(self, column, cell, model, iter):
"""Data function for estemated time of arrival (ETA) of downloads."""
@@ -283,6 +299,9 @@
self.quit_menu_item.connect("activate", self.quit)
# Edit menu
+ self.select_all_menu_item.connect("activate", self.__select_all, True)
+ self.unselect_all_menu_item.connect("activate", self.__select_all,
+ False)
self.preferences_menu_item.connect("activate",
self.preferences_menu_item_activate)
@@ -313,15 +332,17 @@
self.add_tool_button.connect("clicked", self.show_add_download_dialog)
self.pause_tool_button.connect("clicked",
self.__pause_tool_button_clicked)
- self.cancel_tool_button.connect("clicked", self.__cancel_download)
+ self.cancel_tool_button.connect("clicked",
+ self.__cancel_selected_downloads)
self.remove_tool_button.connect("clicked",
- self.__remove_selected_download)
+ self.__remove_selected_downloads)
self.clear_tool_button.connect("clicked",
self.__clear_tool_button_clicked)
+ self.details_tool_button.connect("clicked",
+ self.__details_selected_download)
# Download treeview
- selection = self.downloads_treeview.get_selection()
- selection.connect("changed",
+ self.downloads_treeview_selection.connect("changed",
self.__downloads_treeview_selection_changed)
self.downloads_treeview.connect("row-activated",
self.__downloads_treeview_row_activated)
@@ -331,10 +352,12 @@
self.pause_imi.connect("activate", self.__pause_imi_activate)
self.resume_imi.connect("activate", self.__resume_imi_activate)
- self.cancel_imi.connect("activate", self.__cancel_download)
- self.remove_imi.connect("activate", self.__remove_selected_download)
+ self.cancel_imi.connect("activate", self.__cancel_selected_downloads)
+ self.remove_imi.connect("activate", self.__remove_selected_downloads)
self.open_imi.connect("activate", self.__open_imi_activate)
- self.open_folder_imi.connect("activate", self.__open_folder_imi_activate)
+ self.open_folder_imi.connect("activate",
+ self.__open_folder_imi_activate)
+ self.details_imi.connect("activate", self.__details_selected_download)
def __window_configure_event(self, widget, event):
"""Saves the window geometry and position"""
@@ -375,6 +398,13 @@
pd = PreferencesDialog(self.config)
pd.dialog.show()
+ def __select_all(self, menu_item, select_all):
+ """Select/Unselect all downloads"""
+ if select_all:
+ self.downloads_treeview_selection.select_all()
+ else:
+ self.downloads_treeview_selection.unselect_all()
+
def __show_toolbar_menu_item_toggled(self, menu_item):
"""Show/Hide toolbar"""
self.config.show_toolbar = menu_item.get_active()
@@ -414,28 +444,35 @@
def __downloads_treeview_selection_changed(self, selection):
"""When selection changes set sensitivity appropriately."""
- (downloads_model, downloads_iter) = selection.get_selected()
- if downloads_iter:
- # Enable tool buttons and menu items
- self.pause_tool_button.set_sensitive(True)
- self.pause_imi.set_sensitive(True)
- self.cancel_tool_button.set_sensitive(True)
- self.cancel_imi.set_sensitive(True)
- self.remove_tool_button.set_sensitive(True)
- self.remove_imi.set_sensitive(True)
+ num_selected = selection.count_selected_rows()
+ # Disable tool buttons and menu items if nothing is selected, else
+ # enable them
+ if num_selected < 1:
+ self.__download_widgets_set_sensitive(False)
+ elif num_selected == 1:
+ self.__download_widgets_set_sensitive(True)
+ elif num_selected > 1:
+ self.__download_widgets_set_sensitive(True)
+ # Details should only be possible if one row is selected
+ self.details_tool_button.set_sensitive(False)
+ self.details_imi.set_sensitive(False)
# Set informative window title
# download = downloads_model.get_value(downloads_iter, 0)
# if download:
# self.window.set_title("%s %s (%.2f%%)" % (NAME, download.file_name, download.percent_complete))
- else:
- # Disable tool buttons and menu items
- self.pause_tool_button.set_sensitive(False)
- self.pause_imi.set_sensitive(False)
- self.cancel_tool_button.set_sensitive(False)
- self.cancel_imi.set_sensitive(False)
- self.remove_tool_button.set_sensitive(False)
- self.remove_imi.set_sensitive(False)
+
+ def __download_widgets_set_sensitive(self, sensitive):
+ """Sets the sensitivity property for widgets associated with a the
+ downloads treeview."""
+ self.pause_tool_button.set_sensitive(sensitive)
+ self.pause_imi.set_sensitive(sensitive)
+ self.cancel_tool_button.set_sensitive(sensitive)
+ self.cancel_imi.set_sensitive(sensitive)
+ self.remove_tool_button.set_sensitive(sensitive)
+ self.remove_imi.set_sensitive(sensitive)
+ self.details_tool_button.set_sensitive(sensitive)
+ self.details_imi.set_sensitive(sensitive)
def __downloads_treeview_row_activated(self, treeview, path, column):
"""Called when a download is double-clicked. Opens the file with the
@@ -447,8 +484,8 @@
def __downloads_treeview_button_press(self, treeview, event, menu):
"""Show context menu for downloads treeview"""
if event.type == gtk.gdk.BUTTON_PRESS and event.button == 3:
- download = GUI.get_selected_value(self.downloads_treeview)
- if download:
+ n_selected = self.downloads_treeview_selection.count_selected_rows()
+ if n_selected == 1:
menu.popup(None, None, None, event.button, event.time)
def __treeview_column_button_press(self, treeview, event, menu):
@@ -457,16 +494,19 @@
menu.popup(None, None, None, event.button, event.time)
def __pause_tool_button_clicked(self, tool_button):
- download = GUI.get_selected_value(self.downloads_treeview)
- if download:
- stock = tool_button.get_stock_id()
- if stock == gtk.STOCK_MEDIA_PAUSE:
+ downloads = GUI.get_selected_values(self.downloads_treeview)
+ if downloads:
+ pause = (tool_button.get_stock_id() == gtk.STOCK_MEDIA_PAUSE)
+ if pause:
tool_button.set_stock_id(gtk.STOCK_MEDIA_PLAY)
tool_button.set_label(_("Resume"))
else:
tool_button.set_stock_id(gtk.STOCK_MEDIA_PAUSE)
tool_button.set_label(None)
+ for download in downloads:
+ download.set_paused(pause)
+
def __clear_tool_button_clicked(self, tool_button):
self.download_list.remove_completed_downloads()
@@ -475,36 +515,48 @@
if download:
imagemenuitem.hide()
self.resume_imi.show()
+ download.set_paused(True)
def __resume_imi_activate(self, imagemenuitem):
download = GUI.get_selected_value(self.downloads_treeview)
if download:
imagemenuitem.hide()
self.pause_imi.show()
+ download.set_paused(False)
- def __cancel_download(self, widget):
- download = GUI.get_selected_value(self.downloads_treeview)
- if download:
- pass
-
- def __remove_selected_download(self, widget):
- download = GUI.get_selected_value(self.downloads_treeview)
- if download:
+ def __cancel_selected_downloads(self, widget):
+ """Cancels the selected download in DownloadList."""
+ downloads = GUI.get_selected_values(self.downloads_treeview)
+ for download in downloads:
+ download.set_canceled(True)
+
+ def __remove_selected_downloads(self, widget):
+ """Removes the selected download from DownloadList."""
+ downloads = GUI.get_selected_values(self.downloads_treeview)
+ for download in downloads:
self.download_list.remove_download(download)
def __open_imi_activate(self, imagemenuitem):
"""Opens the downloaded file with the associated program."""
- download = GUI.get_selected_value(self.downloads_treeview)
- if download:
- GUI.open_file_on_screen(download.file, imagemenuitem.get_screen())
+ downloads = GUI.get_selected_values(self.downloads_treeview)
+ if downloads:
+ GUI.open_file_on_screen(downloads[0].file,
+ imagemenuitem.get_screen())
def __open_folder_imi_activate(self, imagemenuitem):
"""Opens the folder containing the download."""
- download = GUI.get_selected_value(self.downloads_treeview)
- if download:
- uri = gnomevfs.make_uri_from_input(download.path)
+ downloads = GUI.get_selected_values(self.downloads_treeview)
+ if downloads:
+ uri = gnomevfs.make_uri_from_input(downloads[0].path)
gnome.ui.url_show_on_screen(uri, imagemenuitem.get_screen())
+ def __details_selected_download(self, widget):
+ """Shows details for the selected download. The details option will
+ only be availble when only one download in selected."""
+ downloads = GUI.get_selected_values(self.downloads_treeview)
+ if downloads:
+ DetailsDialog(downloads[0])
+
def quit(self, widget):
"""Quits the application. Called from various places."""
# TODO: Shutdown gracefully
@@ -550,7 +602,8 @@
else:
self.__set_toolbar_style("both")
- def __set_toolbar_style(self, toolbar_style):
+ def __set_toolbar_style(self, toolbar_style="both"):
+ """Sets the toolbar to the specified style."""
if toolbar_style == "icons":
self.toolbar.set_style(gtk.TOOLBAR_ICONS)
elif toolbar_style == "both":
@@ -640,7 +693,7 @@
self.downloads_model.remove(downloads_iter)
GUI.queue_resize(self.downloads_treeview)
break
- downloads_iter = self.downloads_model.iter_.next(downloads_iter)
+ downloads_iter = self.downloads_model.iter_next(downloads_iter)
def __download_update(self, download, block_count, block_size, total_size):
"""Called on download updates. Finds the associated treeview row and
@@ -649,6 +702,8 @@
self.update_download_row(download)
def __download_status_changed(self, download, status):
+ """Called when the status of a download changes. Tells the treeview to
+ update the row with that download."""
self.update_download_row(download)
def update_download_row(self, download):
@@ -664,6 +719,8 @@
downloads_iter = self.downloads_model.iter_next(downloads_iter)
def on_unhandled_exception(self, type, value, tb):
+ """Called if an unhandled exception occurres. Shows the exception in
+ an error dialog and prints the stack trace to stderr."""
try:
list = traceback.format_tb(tb, None) + \
traceback.format_exception_only(type, value)
Modified: trunk/gget/metalink.py
==============================================================================
--- trunk/gget/metalink.py (original)
+++ trunk/gget/metalink.py Sat Jul 19 14:19:33 2008
@@ -23,9 +23,9 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-# Filename: $URL$
-# Last Updated: $Date$
-# Version: $Rev$
+# Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/metalink.py $
+# Last Updated: $Date: 2008-03-24 00:31:39 -0700 (Mon, 24 Mar 2008) $
+# Version: $Rev: 130 $
# Author(s): Neil McNab
#
# Description:
@@ -179,61 +179,44 @@
# - dump FTP data chunks directly to file instead of holding in memory
# - maybe HTTPS proxy support if people need it
########################################################################
+try: import win32api
+except: pass
+try: import win32process
+except ImportError: pass
+import hashlib
+import xml.parsers.expat
+import time
import optparse
-import urllib2
-import urlparse
import os.path
-import xml.dom.minidom
+import os
+import sha
import random
-import sys
-import re
+import threading
+import md5
+import logging
+import gettext
import socket
import base64
-import hashlib
-import httplib
+import re
+import sys
import ftplib
-import locale
+import os.path
+import os
import gettext
-import logging
+import locale
+import sys
+import httplib
import urllib2
-import urlparse
-import hashlib
-import os.path
-import xml.dom.minidom
-import locale
-import threading
-import time
import copy
-import socket
-import ftplib
-import httplib
-import base64
-import sys
-import gettext
+import subprocess
+import math
import StringIO
+import urlparse
+import StringIO
import gzip
-import os
-import StringIO
-import os.path
-import subprocess
-import gettext
-import sys
import locale
-
-try:
- import win32process
-except ImportError: pass
-
-import xml.dom.minidom
-import optparse
-import socket
-import sys
-import os
-import os.path
-import locale
-import gettext
-class Dummy:
- pass
+class Dummy:
+ pass
#!/usr/bin/env python
########################################################################
#
@@ -259,9 +242,9 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-# Filename: $URL$
-# Last Updated: $Date$
-# Version: $Rev$
+# Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/checker.py $
+# Last Updated: $Date: 2008-07-04 22:01:37 +0200 (fre, 04 jul 2008) $
+# Version: $Rev: 194 $
# Author(s): Neil McNab
#
# Description:
@@ -281,6 +264,7 @@
MAX_REDIRECTS = 20
+MAX_THREADS = 10
def translate():
'''
@@ -313,34 +297,41 @@
src = download.complete_url(src)
datasource = urllib2.urlopen(src)
try:
- dom2 = xml.dom.minidom.parse(datasource) # parse an open file
+ #dom2 = xml.dom.minidom.parse(datasource) # parse an open file
+ metalink = xmlutils.Metalink()
+ metalink.parsehandle(datasource)
except:
print _("ERROR parsing XML.")
raise
datasource.close()
- metalink_node = xmlutils.get_subnodes(dom2, ["metalink"])
- try:
- metalink_type = get_attr_from_item(metalink_node, "type")
- except:
- metalink_type = None
- if metalink_type == "dynamic":
- origin = get_attr_from_item(metalink_node, "origin")
+## metalink_node = xmlutils.get_subnodes(dom2, ["metalink"])
+## try:
+## metalink_type = get_attr_from_item(metalink_node, "type")
+## except:
+## metalink_type = None
+
+ if metalink.type == "dynamic":
+ #origin = get_attr_from_item(metalink_node, "origin")
+ origin = metalink.origin
if origin != src:
return check_metalink(origin)
- urllist = xmlutils.get_subnodes(dom2, ["metalink", "files", "file"])
+ #urllist = xmlutils.get_subnodes(dom2, ["metalink", "files", "file"])
+ urllist = metalink.files
if len(urllist) == 0:
print _("No urls to download file from.")
return False
results = {}
for filenode in urllist:
- try:
- size = xmlutils.get_xml_tag_strings(filenode, ["size"])[0]
- except:
- size = None
- name = xmlutils.get_attr_from_item(filenode, "name")
+ size = filenode.size
+## try:
+## size = xmlutils.get_xml_tag_strings(filenode, ["size"])[0]
+## except:
+## size = None
+ #name = xmlutils.get_attr_from_item(filenode, "name")
+ name = filenode.filename
print "=" * 79
print _("File") + ": %s " % name + _("Size") + ": %s" % size
results[name] = check_file_node(filenode)
@@ -369,6 +360,7 @@
textheaders = str(textheaders)
headers = textheaders.split("\n")
+ headers.reverse()
for line in headers:
line = line.strip()
result = line.split(": ")
@@ -386,31 +378,47 @@
Fouth parameter, optional, progress handler callback
Returns dictionary of file paths with headers
'''
- try:
- size = get_xml_tag_strings(item, ["size"])[0]
- except:
- size = None
- urllist = xmlutils.get_subnodes(item, ["resources", "url"])
+## try:
+## size = get_xml_tag_strings(item, ["size"])[0]
+## except:
+## size = None
+ size = item.size
+ #urllist = xmlutils.get_subnodes(item, ["resources", "url"])
+ urllist = item.resources
if len(urllist) == 0:
print _("No urls to download file from.")
return False
+
+ def thread(filename):
+ checker = URLCheck(filename)
+ headers = checker.info()
+ result[checker.geturl()] = check_process(headers, size)
+ redir = get_header(headers, "Redirected")
+ print "-" *79
+ print _("Checked") + ": %s" % filename
+ if redir != None:
+ print _("Redirected") + ": %s" % redir
+ print _("Response Code") + ": %s\t" % result[checker.geturl()][0] + _("Size Check") + ": %s" % result[checker.geturl()][1]
number = 0
filename = {}
-
+
count = 1
result = {}
while (count <= len(urllist)):
- filename = urllist[number].firstChild.nodeValue.strip()
- print "-" *79
- print _("Checking") + ": %s" % filename
- checker = URLCheck(filename)
- headers = checker.info()
- result[checker.geturl()] = check_process(headers, size)
- print _("Response Code") + ": %s\t" % result[checker.geturl()][0] + _("Size Check") + ": %s" % result[checker.geturl()][1]
+ filename = urllist[number].url
+ #don't start too many threads at once
+ while threading.activeCount() > MAX_THREADS:
+ pass
+ mythread = threading.Thread(target = thread, args = [filename])
+ mythread.start()
+ #thread(filename)
number = (number + 1) % len(urllist)
count += 1
-
+
+ # don't return until all threads are finished (except the one main thread)
+ while threading.activeCount() > 1:
+ pass
return result
class URLCheck:
@@ -436,14 +444,19 @@
except socket.error, error:
self.infostring += _("Response") + ": " + _("Connection Error") + "\r\n"
return
-
- resp = conn.getresponse()
+
+ try:
+ resp = conn.getresponse()
+ except socket.timeout:
+ self.infostring += _("Response") + ": " + _("Timeout") + "\r\n"
+ return
# handle redirects here and set self.url
count = 0
while (resp.status == httplib.MOVED_PERMANENTLY or resp.status == httplib.FOUND) and count < MAX_REDIRECTS:
url = resp.getheader("location")
- print _("Redirected") + ": %s" % url
+ #print _("Redirected from ") + self.url + " to %s." % url
+ self.infostring += _("Redirected") + ": %s\r\n" % url
conn.close()
urlparts = urlparse.urlparse(url)
# need to set default port here
@@ -492,7 +505,8 @@
count = 0
while (resp.status == httplib.MOVED_PERMANENTLY or resp.status == httplib.FOUND) and count < MAX_REDIRECTS:
url = resp.getheader("location")
- print _("Redirected") + ": %s" % url
+ #print _("Redirected") + ": %s" % url
+ self.infostring += _("Redirected") + ": %s\r\n" % url
conn.close()
urlparts = urlparse.urlparse(url)
# need to set default port here
@@ -573,15 +587,16 @@
def info(self):
# need response and content-length for HTTP
return self.infostring
-checker = Dummy()
-checker.MAX_REDIRECTS = MAX_REDIRECTS
-checker.URLCheck = URLCheck
-checker._ = _
-checker.check_file_node = check_file_node
-checker.check_metalink = check_metalink
-checker.check_process = check_process
-checker.get_header = get_header
-checker.translate = translate
+checker = Dummy()
+checker.MAX_REDIRECTS = MAX_REDIRECTS
+checker.MAX_THREADS = MAX_THREADS
+checker.URLCheck = URLCheck
+checker._ = _
+checker.check_file_node = check_file_node
+checker.check_metalink = check_metalink
+checker.check_process = check_process
+checker.get_header = get_header
+checker.translate = translate
#!/usr/bin/env python
########################################################################
#
@@ -607,8 +622,8 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-# Filename: $URL$
-# Last Updated: $Date$
+# Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/download.py $
+# Last Updated: $Date: 2008-07-16 08:19:38 +0200 (ons, 16 jul 2008) $
# Author(s): Neil McNab
#
# Description:
@@ -621,11 +636,26 @@
#
# files = download.get("file.metalink", os.getcwd())
#
+# Callback Definitions:
+# def cancel():
+# Returns True to cancel, False otherwise
+# def pause():
+# Returns True to pause, False to continue/resume
+# def status(block_count, block_size, total_size):
+# Same format as urllib.urlretrieve reporthook
+# block_count - a count of blocks transferred so far
+# block_size - a block size in bytes
+# total_size - the total size of the file in bytes
+# def bitrate(bitrate):
+# bitrate - kilobits per second (float)
+#
########################################################################
#import utils
+#import xml.dom.minidom
#import logging
+
USER_AGENT = "Metalink Checker/4.0 +http://www.nabber.org/projects/"
SEGMENTED = True
@@ -634,6 +664,9 @@
MAX_REDIRECTS = 20
CONNECT_RETRY_COUNT = 3
+MAX_CHUNKS = 256
+DEFAULT_CHUNK_SIZE = 262144
+
LANG = []
OS = None
COUNTRY = None
@@ -662,6 +695,101 @@
# See http://www.poeml.de/transmetalink-test/README
MIME_TYPE = "application/metalink+xml"
+##### PROXY SETUP #########
+
+def reg_query(keyname, value=None):
+ if os.name != "nt":
+ return []
+
+ blanklines = 1
+
+ if value == None:
+ tempresult = os.popen2("reg query \"%s\"" % keyname)
+ else:
+ tempresult = os.popen2("reg query \"%s\" /v \"%s\"" % (keyname, value))
+ stdout = tempresult[1]
+ stdout = stdout.readlines()
+
+ # For Windows XP, this was changed in Vista!
+ if stdout[1].startswith("! REG.EXE"):
+ blanklines += 2
+ if value == None:
+ blanklines += 2
+ stdout = stdout[blanklines:]
+
+ return stdout
+
+def get_key_value(key, value):
+ '''
+ Probes registry for uninstall information
+ First parameter, key to look in
+ Second parameter, value name to extract
+ Returns the uninstall command as a string
+ '''
+ # does not handle non-paths yet
+ result = u""
+
+ try:
+ keyid = win32api.RegOpenKeyEx(win32con.HKEY_CURRENT_USER, key)
+ tempvalue = win32api.RegQueryValueEx(keyid, value)
+ win32api.RegCloseKey(keyid)
+ result = unicode(tempvalue[0])
+ except NameError:
+ # alternate method if win32api is not available, probably only works on Windows NT variants
+ stdout = reg_query(u"HKCU\\" + key, value)
+
+ try:
+ # XP vs. Vista
+ if stdout[1].find(u"\t") != -1:
+ lines = stdout[1].split(u"\t")
+ index = 2
+ else:
+ lines = stdout[1].split(u" ")
+ index = 3
+ result = lines[index].strip()
+ except IndexError:
+ result = u""
+ except: pass
+
+ result = unicode(os.path.expandvars(result))
+ return result
+
+def get_proxy_info():
+ global HTTP_PROXY
+ global FTP_PROXY
+ global HTTPS_PROXY
+
+ # from environment variables
+ if os.environ.has_key('http_proxy') and HTTP_PROXY == "":
+ HTTP_PROXY=os.environ['http_proxy']
+ if os.environ.has_key('ftp_proxy') and FTP_PROXY == "":
+ FTP_PROXY=os.environ['ftp_proxy']
+ if os.environ.has_key('https_proxy') and HTTPS_PROXY == "":
+ HTTPS_PROXY=os.environ['https_proxy']
+
+ # from IE in registry
+ proxy_enable = get_key_value("Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings", "ProxyEnable")
+ try:
+ proxy_enable = int(proxy_enable[-1])
+ except IndexError:
+ proxy_enable = False
+
+ if proxy_enable:
+ proxy_string = get_key_value("Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings", "ProxyServer")
+ if proxy_string.find("=") == -1:
+ # if all use the same settings
+ for proxy in ("HTTP_PROXY", "FTP_PROXY", "HTTPS_PROXY"):
+ if getattr(sys.modules[__name__], proxy) == "":
+ setattr(sys.modules[__name__], proxy, "http://" + str(proxy_string))
+ else:
+ proxies = proxy_string.split(";")
+ for proxy in proxies:
+ name, value = proxy.split("=")
+ if getattr(sys.modules[__name__], name.upper() + "_PROXY") == "":
+ setattr(sys.modules[__name__], name.upper() + "_PROXY", "http://" + value)
+
+get_proxy_info()
+
def translate():
'''
Setup translation path
@@ -684,17 +812,17 @@
_ = translate()
-class URL:
- def __init__(self, url, location = "", preference = "", maxconnections = ""):
- if preference == "":
- preference = 1
- if maxconnections == "":
- maxconnections = 1
-
- self.url = url
- self.location = location
- self.preference = int(preference)
- self.maxconnections = int(maxconnections)
+##class URL:
+## def __init__(self, url, location = "", preference = "", maxconnections = ""):
+## if preference == "":
+## preference = 1
+## if maxconnections == "":
+## maxconnections = 1
+##
+## self.url = url
+## self.location = location
+## self.preference = int(preference)
+## self.maxconnections = int(maxconnections)
class DecompressFile(gzip.GzipFile):
@@ -772,22 +900,22 @@
# install this opener
urllib2.install_opener(opener)
-def get(src, path, checksums = {}, force = False, handler = None, segmented = SEGMENTED):
+def get(src, path, checksums = {}, force = False, handlers = {}, segmented = SEGMENTED):
'''
Download a file, decodes metalinks.
First parameter, file to download, URL or file path to download from
Second parameter, file path to save to
- Third parameter, optional, expected MD5SUM
- Fourth parameter, optional, expected SHA1SUM
- Fifth parameter, optional, force a new download even if a valid copy already exists
- Sixth parameter, optional, progress handler callback
+ Third parameter, optional, expected dictionary of checksums
+ Fourth parameter, optional, force a new download even if a valid copy already exists
+ Fifth parameter, optional, progress handler callback
+ Sixth parameter, optional, boolean to try using segmented downloads
Returns list of file paths if download(s) is successful
Returns False otherwise (checksum fails)
raise socket.error e.g. "Operation timed out"
'''
# assume metalink if ends with .metalink
if src.endswith(".metalink"):
- return download_metalink(src, path, force, handler)
+ return download_metalink(src, path, force, handlers, segmented)
else:
# not all servers support HEAD where GET is also supported
# also a WindowsError is thrown if a local file does not exist
@@ -795,7 +923,7 @@
# add head check for metalink type, if MIME_TYPE or application/xml? treat as metalink
if urlhead(src, metalink=True)["content-type"].startswith(MIME_TYPE):
print _("Metalink content-type detected.")
- return download_metalink(src, path, force, handler)
+ return download_metalink(src, path, force, handlers, segmented)
except IOError, e:
pass
except WindowsError, e:
@@ -805,13 +933,13 @@
# parse out filename portion here
filename = os.path.basename(src)
result = download_file(src, os.path.join(path, filename),
- 0, checksums, force, handler, segmented = segmented)
+ 0, checksums, force, handlers, segmented = segmented)
if result:
return [result]
return False
def download_file(url, local_file, size=0, checksums={}, force = False,
- handler = None, segmented = SEGMENTED, chunksums = {}, chunk_size = None):
+ handlers = {}, segmented = SEGMENTED, chunksums = {}, chunk_size = 0):
'''
url {string->URL} locations of the file
local_file string local file name to save to
@@ -826,51 +954,67 @@
Returns False otherwise (checksum fails).
'''
# convert string filename into something we can use
- urllist = {}
- urllist[url] = URL(url)
- return download_file_urls(urllist, local_file, size, checksums, force, handler, segmented, chunksums, chunk_size)
+ #urllist = {}
+ #urllist[url] = URL(url)
+
+ #metalink = xmlutils.Metalink()
+ fileobj = xmlutils.MetalinkFile(local_file)
+ fileobj.set_size(size)
+ fileobj.hashlist = checksums
+ fileobj.pieces = chunksums
+ fileobj.piecelength = chunk_size
+ fileobj.add_url(url)
+ #metalink.files.append(fileobj)
+ return download_file_urls(fileobj, force, handlers, segmented)
+
-def download_file_urls(urllist, local_file, size=0, checksums={}, force = False,
- handler = None, segmented = SEGMENTED, chunksums = {}, chunk_size = None):
+#class Download:
+ #def __init__(self, urllist, local_file, size=0, checksums={}, force = False,
+ #handler = None, segmented = SEGMENTED, chunksums = {}, chunk_size = None):
+ #pass
+
+ #def set_cancel_callback(self, callback):
+ #self.cancel_callback(self,
+
+def download_file_urls(metalinkfile, force = False, handlers = {}, segmented = SEGMENTED):
'''
Download a file.
- urllist {string->URL} file to download, URL or file path to download from
- Second parameter, file path to save to
- Third parameter, optional, expected file size
- Fourth parameter, optional, expected checksum dictionary
- Fifth parameter, optional, force a new download even if a valid copy already exists
- Sixth parameter, optional, progress handler callback
+ MetalinkFile object to download
+ Second parameter, optional, force a new download even if a valid copy already exists
+ Third parameter, optional, progress handler callback
+ Fourth parameter, optional, try to use segmented downloading
Returns file path if download is successful
Returns False otherwise (checksum fails)
'''
- assert isinstance(urllist, dict)
+ #assert isinstance(urllist, dict)
print ""
- print _("Downloading to %s.") % local_file
+ print _("Downloading to %s.") % metalinkfile.filename
- if os.path.exists(local_file) and (not force) and len(checksums) > 0:
- checksum = verify_checksum(local_file, checksums)
+ if os.path.exists(metalinkfile.filename) and (not force) and len(metalinkfile.hashlist) > 0:
+ checksum = verify_checksum(metalinkfile.filename, metalinkfile.hashlist)
if checksum:
- actsize = size
+ actsize = metalinkfile.size
if actsize == 0:
actsize = os.stat(local_file).st_size
if actsize != 0:
- if handler != None:
- handler(1, actsize, actsize)
- return local_file
+ #if handler != None:
+ handlers["status"](1, actsize, actsize)
+ return metalinkfile.filename
else:
- print _("Checksum failed, retrying download of %s.") % os.path.basename(local_file)
+ print _("Checksum failed, retrying download of %s.") % os.path.basename(metalinkfile.filename)
- directory = os.path.dirname(local_file)
+ directory = os.path.dirname(metalinkfile.filename)
if not os.path.isdir(directory):
os.makedirs(directory)
+ if metalinkfile.piecelength == 0:
+ metalinkfile.piecelength = DEFAULT_CHUNK_SIZE
+
seg_result = False
if segmented:
- if chunk_size == None:
- chunk_size = 262144
- manager = Segment_Manager(urllist, local_file, size, reporthook = handler,
- chunksums = chunksums, chunk_size = int(chunk_size))
+ manager = Segment_Manager(metalinkfile)
+ manager.set_callbacks(handlers)
seg_result = manager.run()
if not seg_result:
@@ -878,50 +1022,179 @@
print "\n" + _("Could not download all segments of the file, trying one mirror at a time.")
if (not segmented) or (not seg_result):
+ manager = NormalManager(metalinkfile)
+ manager.set_callbacks(handlers)
+ manager.run()
+
+ if manager.get_status():
+ return metalinkfile.filename
+ return False
+
+class Manager:
+ def __init__(self):
+ self.cancel_handler = None
+ self.pause_handler = None
+ self.status_handler = None
+ self.bitrate_handler = None
+ self.status = True
+ self.end_bitrate()
+
+ def set_cancel_callback(self, handler):
+ self.cancel_handler = handler
+
+ def set_pause_callback(self, handler):
+ self.pause_handler = handler
+
+ def set_status_callback(self, handler):
+ self.status_handler = handler
+
+ def set_bitrate_callback(self, handler):
+ self.bitrate_handler = handler
+
+ def set_callbacks(self, callbackdict):
+ for key in callbackdict.keys():
+ setattr(self, key + "_handler", callbackdict[key])
+
+ def run(self, wait=None):
+ result = True
+ while result:
+ if self.pause_handler != None and self.pause_handler():
+ self.end_bitrate()
+ time.sleep(1)
+ else:
+ if wait != None:
+ time.sleep(wait)
+ result = self.cycle()
+
+ return self.get_status()
+
+ def get_status(self):
+ return self.status
+
+ def close_handler(self):
+ return
+
+ def start_bitrate(self, bytes):
+ '''
+ Pass in current byte count
+ '''
+ self.oldsize = bytes
+ self.oldtime = time.time()
+
+ def end_bitrate(self):
+ self.oldsize = 0
+ self.oldtime = None
+
+ def get_bitrate(self, bytes):
+ '''
+ Pass in current byte count
+ '''
+ if self.oldtime != None and (time.time() - self.oldtime) != 0:
+ return ((bytes - self.oldsize) * 8 / 1024)/(time.time() - self.oldtime)
+ return 0
+
+class NormalManager(Manager):
+ def __init__(self, metalinkfile):
+ Manager.__init__(self)
+ self.local_file = metalinkfile.filename
+ self.size = metalinkfile.size
+ self.chunksums = metalinkfile.get_piece_dict()
+ self.checksums = metalinkfile.hashlist
+ self.urllist = start_sort(metalinkfile.get_url_dict())
+ self.start_number = 0
+ self.number = 0
+ self.count = 1
+
+ def random_start(self):
# do it the old way
# choose a random url tag to start with
#urllist = list(urllist)
#number = int(random.random() * len(urllist))
- urllist = start_sort(urllist)
- number = 0
+ self.start_number = int(random.random() * len(self.urllist))
+ self.number = self.start_number
- count = 1
- while (count <= len(urllist)):
- error = False
- remote_file = complete_url(urllist[number])
- #print remote_file
- result = True
+ def cycle(self):
+ if self.cancel_handler != None and self.cancel_handler():
+ return False
+ try:
+ self.status = True
+ remote_file = complete_url(self.urllist[self.number])
+
+ manager = URLManager(remote_file, self.local_file, self.checksums)
+ manager.set_status_callback(self.status_handler)
+ manager.set_cancel_callback(self.cancel_handler)
+ manager.set_pause_callback(self.pause_handler)
+ manager.set_bitrate_callback(self.bitrate_handler)
+ self.get_bitrate = manager.get_bitrate
+ self.status = manager.run()
+
+ self.number = (self.number + 1) % len(self.urllist)
+ self.count += 1
+
+ return self.count <= len(self.urllist)
+ except KeyboardInterrupt:
+ print "Download Interrupted!"
try:
- urlretrieve(remote_file, local_file, handler)
- except:
- result = False
- error = not result
- number = (number + 1) % len(urllist)
- count += 1
+ manager.close_handler()
+ except: pass
+ return False
+
+class URLManager(Manager):
+ def __init__(self, remote_file, filename, checksums = {}):
+ '''
+ modernized replacement for urllib.urlretrieve() for use with proxy
+ '''
+ Manager.__init__(self)
+ self.filename = filename
+ self.checksums = checksums
+ self.block_size = 1024
+ self.counter = 0
+ self.temp = urlopen(remote_file)
+ headers = self.temp.info()
+
+ try:
+ self.size = int(headers['Content-Length'])
+ except KeyError:
+ self.size = 0
+
+ self.data = open(filename, 'wb')
- if filecheck(local_file, checksums, size, handler) and not error:
- return local_file
-## if verify_checksum(local_file, checksums):
-## actsize = 0
-## try:
-## actsize = os.stat(local_file).st_size
-## except: pass
-##
-## if handler != None:
-## tempsize = size
-## if size == 0:
-## tempsize = actsize
-## handler(1, actsize, tempsize)
-##
-## if (int(actsize) == int(size) or size == 0) and not error:
-## return local_file
-## else:
-## print "\n" + _("Checksum failed for %s.") % os.path.basename(local_file)
+ ### FIXME need to check contents from previous download here
+ self.resume = FileResume(filename + ".temp")
+ self.resume.add_block(0)
+
+ def close_handler(self):
+ self.resume.complete()
+ self.data.close()
+ self.temp.close()
+ if self.status:
+ self.status = filecheck(self.filename, self.checksums, self.size)
+
+ def cycle(self):
+ if self.oldtime == None:
+ self.start_bitrate(self.counter * self.block_size)
+ if self.cancel_handler != None and self.cancel_handler():
+ self.close_handler()
+ return False
+
+ block = self.temp.read(self.block_size)
+ self.data.write(block)
+ self.counter += 1
- if filecheck(local_file, checksums, size, handler):
- return local_file
- return False
+ self.resume.set_block_size(self.counter * self.block_size)
+
+ if self.status_handler != None:
+ self.status_handler(self.counter, self.block_size, self.size)
+
+ if self.bitrate_handler != None:
+ self.bitrate_handler(self.get_bitrate(self.counter * self.block_size))
+ if not block:
+ self.close_handler()
+
+ #print self.get_bitrate(self.counter * self.block_size)
+ return bool(block)
+
def filecheck(local_file, checksums, size, handler = None):
if verify_checksum(local_file, checksums):
actsize = 0
@@ -941,7 +1214,7 @@
print "\n" + _("Checksum failed for %s.") % os.path.basename(local_file)
return False
-def download_metalink(src, path, force = False, handler = None):
+def download_metalink(src, path, force = False, handlers = {}, segmented = SEGMENTED):
'''
Decode a metalink file, can be local or remote
First parameter, file to download, URL or file path to download from
@@ -956,34 +1229,40 @@
datasource = urlopen(src, metalink=True)
except:
return False
- dom2 = xml.dom.minidom.parse(datasource) # parse an open file
+ #dom2 = xml.dom.minidom.parse(datasource) # parse an open file
+ metalink = xmlutils.Metalink()
+ metalink.parsehandle(datasource)
datasource.close()
- metalink_node = xmlutils.get_subnodes(dom2, ["metalink"])
- try:
- metalink_type = xmlutils.get_attr_from_item(metalink_node[0], "type")
- except AttributeError:
- metalink_type = None
-
- if metalink_type == "dynamic":
- origin = xmlutils.get_attr_from_item(metalink_node[0], "origin")
+## metalink_node = xmlutils.get_subnodes(dom2, ["metalink"])
+## try:
+## metalink_type = xmlutils.get_attr_from_item(metalink_node[0], "type")
+## except AttributeError:
+## metalink_type = None
+
+ if metalink.type == "dynamic":
+ origin = metalink.origin
+ #origin = xmlutils.get_attr_from_item(metalink_node[0], "origin")
if origin != src and origin != "":
print _("Downloading update from %s") % origin
- return download_metalink(origin, path, force, handler)
+ return download_metalink(origin, path, force, handlers, segmented)
- urllist = xmlutils.get_subnodes(dom2, ["metalink", "files", "file"])
+ #urllist = xmlutils.get_subnodes(dom2, ["metalink", "files", "file"])
+ urllist = metalink.files
if len(urllist) == 0:
print _("No urls to download file from.")
return False
results = []
for filenode in urllist:
- ostag = xmlutils.get_xml_tag_strings(filenode, ["os"])
- langtag = xmlutils.get_xml_tag_strings(filenode, ["language"])
+ #ostag = xmlutils.get_xml_tag_strings(filenode, ["os"])
+ #langtag = xmlutils.get_xml_tag_strings(filenode, ["language"])
+ ostag = filenode.os
+ langtag = filenode.language
if OS == None or len(ostag) == 0 or ostag[0].lower() == OS.lower():
if "any" in LANG or len(langtag) == 0 or langtag[0].lower() in LANG:
- result = download_file_node(filenode, path, force, handler)
+ result = download_file_node(filenode, path, force, handlers, segmented)
if result:
results.append(result)
if len(results) == 0:
@@ -991,7 +1270,7 @@
return results
-def download_file_node(item, path, force = False, handler = None):
+def download_file_node(item, path, force = False, handler = None, segmented=SEGMENTED):
'''
Downloads a specific version of a program
First parameter, file XML node
@@ -1005,48 +1284,60 @@
# unused: urllist = xmlutils.get_xml_tag_strings(item, ["resources", "url"])
urllist = {}
- for node in xmlutils.get_subnodes(item, ["resources", "url"]):
- url = xmlutils.get_xml_item_strings([node])[0]
- location = xmlutils.get_attr_from_item(node, "location")
- preference = xmlutils.get_attr_from_item(node, "preference")
- maxconnections = xmlutils.get_attr_from_item(node, "maxconnections")
- urllist[url] = URL(url, location, preference, maxconnections)
+## for node in xmlutils.get_subnodes(item, ["resources", "url"]):
+## url = xmlutils.get_xml_item_strings([node])[0]
+## location = xmlutils.get_attr_from_item(node, "location")
+## preference = xmlutils.get_attr_from_item(node, "preference")
+## maxconnections = xmlutils.get_attr_from_item(node, "maxconnections")
+## urllist[url] = URL(url, location, preference, maxconnections)
+
+ for node in item.resources:
+ urllist[node.url] = node
if len(urllist) == 0:
print _("No urls to download file from.")
return False
- hashlist = xmlutils.get_subnodes(item, ["verification", "hash"])
- try:
- size = xmlutils.get_xml_tag_strings(item, ["size"])[0]
- except:
- size = 0
-
- hashes = {}
- for hashitem in hashlist:
- hashes[xmlutils.get_attr_from_item(hashitem, "type")] = hashitem.firstChild.nodeValue.strip()
-
- sigs = xmlutils.get_subnodes(item, ["verification", "signature"])
- for sig in sigs:
- hashes[xmlutils.get_attr_from_item(sig, "type")] = sig.firstChild.nodeValue.strip()
+## hashlist = xmlutils.get_subnodes(item, ["verification", "hash"])
+## try:
+## size = xmlutils.get_xml_tag_strings(item, ["size"])[0]
+## except:
+## size = 0
- local_file = xmlutils.get_attr_from_item(item, "name")
- localfile = path_join(path, local_file)
+ hashes = item.hashlist
+ size = item.size
+
+## hashes = {}
+## for hashitem in hashlist.keys():
+## hashes[hashitem] = hashitem.firstChild.nodeValue.strip()
+
+## sigs = xmlutils.get_subnodes(item, ["verification", "signature"])
+## for sig in sigs:
+## hashes[xmlutils.get_attr_from_item(sig, "type")] = sig.firstChild.nodeValue.strip()
+
+ #local_file = xmlutils.get_attr_from_item(item, "name")
+ local_file = item.filename
+ #localfile = path_join(path, local_file)
+ item.filename = path_join(path, local_file)
#extract chunk checksum information
- try:
- chunksize = int(xmlutils.get_attr_from_item(xmlutils.get_subnodes(item, ["verification", "pieces"])[0], "length"))
- except IndexError:
- chunksize = None
-
+## try:
+## chunksize = int(xmlutils.get_attr_from_item(xmlutils.get_subnodes(item, ["verification", "pieces"])[0], "length"))
+## except IndexError:
+## chunksize = None
+ chunksize = item.piecelength
+
chunksums = {}
- for piece in xmlutils.get_subnodes(item, ["verification", "pieces"]):
- hashtype = xmlutils.get_attr_from_item(piece, "type")
- chunksums[hashtype] = []
- for chunk in xmlutils.get_xml_tag_strings(piece, ["hash"]):
- chunksums[hashtype].append(chunk)
+## for piece in xmlutils.get_subnodes(item, ["verification", "pieces"]):
+## hashtype = xmlutils.get_attr_from_item(piece, "type")
+## chunksums[hashtype] = []
+## for chunk in xmlutils.get_xml_tag_strings(piece, ["hash"]):
+## chunksums[hashtype].append(chunk)
+
+ #for piece in item.pieces:
+ chunksums[item.piecetype] = item.pieces
- return download_file_urls(urllist, localfile, size, hashes, force, handler, SEGMENTED, chunksums, chunksize)
+ return download_file_urls(item, force, handler, segmented)
def complete_url(url):
'''
@@ -1066,7 +1357,7 @@
'''
modernized replacement for urllib.urlretrieve() for use with proxy
'''
- block_size = 4096
+ block_size = 1024
i = 0
counter = 0
temp = urlopen(url)
@@ -1461,35 +1752,46 @@
############# segmented download functions #############
-class Segment_Manager:
- def __init__(self, urls, localfile, size=0, chunk_size = 262144, chunksums = {}, reporthook = None):
- assert isinstance(urls, dict)
+class ThreadSafeFile(file):
+ def __init__(self, *args):
+ file.__init__(self, *args)
+ self.lock = threading.Lock()
+
+ def acquire(self):
+ return self.lock.acquire()
+
+ def release(self):
+ return self.lock.release()
+
+class Segment_Manager(Manager):
+ def __init__(self, metalinkfile):
+ Manager.__init__(self)
self.sockets = []
self.chunks = []
self.limit_per_host = LIMIT_PER_HOST
self.host_limit = HOST_LIMIT
- self.size = int(size)
- self.orig_urls = urls
- self.urls = urls
- self.chunk_size = int(chunk_size)
- self.chunksums = chunksums
- self.reporthook = reporthook
- self.localfile = localfile
+ self.size = int(metalinkfile.size)
+ self.orig_urls = metalinkfile.get_url_dict()
+ self.urls = self.orig_urls
+ self.chunk_size = int(metalinkfile.piecelength)
+ self.chunksums = metalinkfile.get_piece_dict()
+ self.checksums = metalinkfile.hashlist
+ self.localfile = metalinkfile.filename
self.filter_urls()
+ self.status = True
+
# Open the file.
try:
- self.f = open(localfile, "rb+")
+ self.f = ThreadSafeFile(self.localfile, "rb+")
except IOError:
- self.f = open(localfile, "wb+")
+ self.f = ThreadSafeFile(self.localfile, "wb+")
- self.resume = FileResume(localfile + ".temp")
- self.resume.update_block_size(self.chunk_size)
+ self.resume = FileResume(self.localfile + ".temp")
def get_chunksum(self, index):
mylist = {}
-
try:
for key in self.chunksums.keys():
mylist[key] = self.chunksums[key][index]
@@ -1566,28 +1868,54 @@
if self.size == None:
#crap out and do it the old way
self.close_handler()
+ self.status = False
return False
+
+ # can't adjust chunk size if it has chunk hashes tied to that size
+ if len(self.chunksums) == 0 and self.size/self.chunk_size > MAX_CHUNKS:
+ self.chunk_size = self.size/MAX_CHUNKS
+ #print "Set chunk size to %s." % self.chunk_size
+ self.resume.update_block_size(self.chunk_size)
+
+ return Manager.run(self, 0.1)
- while True:
- #print "\ntc:", self.active_count(), len(self.sockets), len(self.urls)
- #if self.active_count() == 0:
- #print self.byte_total(), self.size
- time.sleep(0.1)
+
+ def cycle(self):
+ '''
+ Runs one cycle
+ Returns True if still downloading, False otherwise
+ '''
+ try:
+ bytes = self.byte_total()
+ if self.oldtime == None:
+ self.start_bitrate(bytes)
+
+ # cancel was pressed here
+ if self.cancel_handler != None and self.cancel_handler():
+ self.status = False
+ self.close_handler()
+ return False
+
self.update()
self.resume.extend_blocks(self.chunk_list())
- if self.byte_total() >= self.size and self.active_count() == 0:
+ if bytes >= self.size and self.active_count() == 0:
self.resume.complete()
self.close_handler()
- return True
+ return False
+
#crap out and do it the old way
if len(self.urls) == 0:
+ self.status = False
self.close_handler()
return False
- return False
-## except BaseException, e:
-## logging.warning(unicode(e))
-## return False
+ return True
+
+ except KeyboardInterrupt:
+ print "Download Interrupted!"
+ self.close_handler()
+ return False
+
def update(self):
next = self.next_url()
@@ -1597,8 +1925,11 @@
index = self.get_chunk_index()
if index != None:
- if self.reporthook != None:
- self.reporthook(int(self.byte_total()/self.chunk_size), self.chunk_size, self.size)
+ if self.status_handler != None:
+ self.status_handler(int(self.byte_total()/self.chunk_size), self.chunk_size, self.size)
+
+ if self.bitrate_handler != None:
+ self.bitrate_handler(self.get_bitrate(self.byte_total()))
start = index * self.chunk_size
end = start + self.chunk_size
@@ -1607,11 +1938,13 @@
if next.protocol == "http" or next.protocol == "https":
segment = Http_Host_Segment(next, start, end, self.size, self.get_chunksum(index))
+ segment.set_cancel_callback(self.cancel_handler)
self.chunks[index] = segment
self.segment_init(index)
if next.protocol == "ftp":
#print "allocated to:", index, next.url
segment = Ftp_Host_Segment(next, start, end, self.size, self.get_chunksum(index))
+ segment.set_cancel_callback(self.cancel_handler)
self.chunks[index] = segment
self.segment_init(index)
@@ -1763,6 +2096,9 @@
if size == 0:
os.remove(self.localfile)
os.remove(self.localfile + ".temp")
+ self.status = False
+ elif self.status:
+ self.status = filecheck(self.localfile, self.checksums, size)
#except: pass
class Host_Base:
@@ -1905,19 +2241,31 @@
self.bytes = 0
self.buffer = ""
self.temp = ""
+ self.cancel_handler = None
+
+ def set_cancel_callback(self, handler):
+ self.cancel_handler = handler
+ def check_cancel(self):
+ if self.cancel_handler == None:
+ return False
+ return self.cancel_handler()
+
def avg_bitrate(self):
bits = self.bytes * 8
return bits/self.ttime
def checksum(self):
- lock = threading.Lock()
- lock.acquire()
-
- self.mem.seek(self.byte_start, 0)
- chunkstring = self.mem.read(self.byte_count)
+ if self.check_cancel():
+ return False
- lock.release()
+ try:
+ self.mem.acquire()
+ self.mem.seek(self.byte_start, 0)
+ chunkstring = self.mem.read(self.byte_count)
+ self.mem.release()
+ except ValueError:
+ return False
return verify_chunk_checksum(chunkstring, self.checksums)
@@ -2005,12 +2353,15 @@
while True:
if self.readable():
self.handle_read()
- else:
self.ttime += (time.time() - self.start_time)
+ else:
self.end()
return
def readable(self):
+ if self.check_cancel():
+ return False
+
if self.response == None:
return False
return True
@@ -2040,14 +2391,14 @@
self.bytes += len(tempbuffer)
- lock = threading.Lock()
- lock.acquire()
-
- self.mem.seek(self.byte_start, 0)
- self.mem.write(tempbuffer)
- self.mem.flush()
-
- lock.release()
+ try:
+ self.mem.acquire()
+ self.mem.seek(self.byte_start, 0)
+ self.mem.write(tempbuffer)
+ self.mem.flush()
+ self.mem.release()
+ except ValueError:
+ self.error = _("bad file handle")
self.response = None
@@ -2063,14 +2414,12 @@
##
## writedata = data[:index]
##
-## lock = threading.Lock()
-## lock.acquire()
-##
+## self.mem.acquire()
## self.mem.seek(self.byte_start + self.bytes, 0)
## self.mem.write(writedata)
## self.mem.flush()
##
-## lock.release()
+## self.mem.release()
##
## self.response = None
## else:
@@ -2116,14 +2465,17 @@
while True:
if self.readable():
self.handle_read()
- else:
self.ttime += (time.time() - self.start_time)
+ else:
self.end()
return
#except BaseException, e:
# self.error = utils.get_exception_message(e)
def readable(self):
+ if self.check_cancel():
+ return False
+
if self.response == None:
try:
self.response = self.host.conn.getresponse()
@@ -2178,19 +2530,23 @@
body = data
size = len(body)
- # write out body to file
-
- lock = threading.Lock()
- lock.acquire()
- self.mem.seek(self.byte_start, 0)
- self.mem.write(body)
- self.mem.flush()
-
- lock.release()
+ # write out body to file
+ try:
+ self.mem.acquire()
+ self.mem.seek(self.byte_start + self.bytes, 0)
+ self.mem.write(body)
+ self.mem.flush()
+ self.mem.release()
+ except ValueError:
+ self.error = _("bad file handle")
+ self.response = None
+ return
self.bytes += size
- self.response = None
+ #print self.bytes, self.byte_count
+ if self.bytes >= self.byte_count:
+ self.response = None
########### PROXYING OBJECTS ########################
@@ -2356,489 +2712,497 @@
def close(self):
return self.conn.close()
-
-download = Dummy()
-download.CONNECT_RETRY_COUNT = CONNECT_RETRY_COUNT
-download.COUNTRY = COUNTRY
-download.DecompressFile = DecompressFile
-download.FTP = FTP
-download.FTP_PROXY = FTP_PROXY
-download.FileResume = FileResume
-download.Ftp_Host = Ftp_Host
-download.Ftp_Host_Segment = Ftp_Host_Segment
-download.HOST_LIMIT = HOST_LIMIT
-download.HTTPConnection = HTTPConnection
-download.HTTPSConnection = HTTPSConnection
-download.HTTPS_PROXY = HTTPS_PROXY
-download.HTTP_PROXY = HTTP_PROXY
-download.Host_Base = Host_Base
-download.Host_Segment = Host_Segment
-download.Http_Host = Http_Host
-download.Http_Host_Segment = Http_Host_Segment
-download.LANG = LANG
-download.LIMIT_PER_HOST = LIMIT_PER_HOST
-download.MAX_REDIRECTS = MAX_REDIRECTS
-download.MIME_TYPE = MIME_TYPE
-download.OS = OS
-download.PGP_KEY_DIR = PGP_KEY_DIR
-download.PGP_KEY_EXTS = PGP_KEY_EXTS
-download.PGP_KEY_STORE = PGP_KEY_STORE
-download.PROTOCOLS = PROTOCOLS
-download.SEGMENTED = SEGMENTED
-download.Segment_Manager = Segment_Manager
-download.URL = URL
-download.USER_AGENT = USER_AGENT
-download._ = _
-download.complete_url = complete_url
-download.download_file = download_file
-download.download_file_node = download_file_node
-download.download_file_urls = download_file_urls
-download.download_metalink = download_metalink
-download.filecheck = filecheck
-download.filehash = filehash
-download.get = get
-download.get_transport = get_transport
-download.is_local = is_local
-download.is_remote = is_remote
-download.lang = lang
-download.path_join = path_join
-download.pgp_verify_sig = pgp_verify_sig
-download.set_proxies = set_proxies
-download.sort_prefs = sort_prefs
-download.start_sort = start_sort
-download.translate = translate
-download.urlhead = urlhead
-download.urlopen = urlopen
-download.urlretrieve = urlretrieve
-download.verify_checksum = verify_checksum
-download.verify_chunk_checksum = verify_chunk_checksum
-'''
-From sourceforge pycrypto project:
-http://sourceforge.net/projects/pycrypto/
-
-Code for running GnuPG from Python and dealing with the results.
-
-Detailed info about the format of data to/from gpg may be obtained from the
-file DETAILS in the gnupg source.
-
-Dependencies
- - GPG must be installed
- - http://www.gnupg.org
- - http://www.gpg4win.org
-'''
-
-__rcsid__ = '$Id: GPG.py,v 1.3 2003/11/23 15:03:15 akuchling Exp $'
-
-
-def translate():
- '''
- Setup translation path
- '''
- if __name__=="__main__":
- try:
- base = os.path.basename(__file__)[:-3]
- localedir = os.path.join(os.path.dirname(__file__), "locale")
- except NameError:
- base = os.path.basename(sys.executable)[:-4]
- localedir = os.path.join(os.path.dirname(sys.executable), "locale")
- else:
- temp = __name__.split(".")
- base = temp[-1]
- localedir = os.path.join("/".join(["%s" % k for k in temp[:-1]]), "locale")
-
- #print base, localedir
- t = gettext.translation(base, localedir, [locale.getdefaultlocale()[0]], None, 'en')
- return t.ugettext
-
-_ = translate()
-
-# Default path used for searching for the GPG binary
-DEFAULT_PATH = ['/bin', '/usr/bin', '/usr/local/bin', \
- '${PROGRAMFILES}\\GNU\\GnuPG', '${PROGRAMFILES(X86)}\\GNU\\GnuPG',\
- '${SYSTEMDRIVE}\\cygwin\\bin', '${SYSTEMDRIVE}\\cygwin\\usr\\bin', '${SYSTEMDRIVE}\\cygwin\\usr\\local\\bin']
-
-class Signature:
- "Used to hold information about a signature result"
-
- def __init__(self):
- self.valid = 0
- self.fingerprint = self.creation_date = self.timestamp = None
- self.signature_id = self.key_id = None
- self.username = None
- self.error = None
- self.nopubkey = False
-
- def BADSIG(self, value):
- self.error = "BADSIG"
- self.valid = 0
- self.key_id, self.username = value.split(None, 1)
- def GOODSIG(self, value):
- self.valid = 1
- #self.error = "GOODSIG"
- self.key_id, self.username = value.split(None, 1)
- def VALIDSIG(self, value):
- #print value
- #self.valid = 1
- #self.error = "VALID_SIG"
- self.fingerprint, self.creation_date, self.timestamp, other = value.split(" ", 3)
- def SIG_ID(self, value):
- #self.error = "SIG_ID"
- self.signature_id, self.creation_date, self.timestamp = value.split(" ", 2)
- def NODATA(self, value):
- self.error = _("File not properly loaded for signature.")
- def ERRSIG(self, value):
- #print value
- self.error = _("Signature error.")
- def NO_PUBKEY(self, value):
- self.key_id = value
- self.nopubkey = True
- self.error = _("Signature error, missing public key with id 0x%s.") % value[-8:]
-
- def TRUST_ULTIMATE(self, value):
- '''
- see http://cvs.gnupg.org/cgi-bin/viewcvs.cgi/trunk/doc/DETAILS?rev=289
- Trust settings do NOT determine if a signature is good or not! That is reserved for GOOD_SIG!
- '''
- return
-
- def TRUST_UNDEFINED(self, value):
- self.error = _("Trust undefined")
- #print value.split()
- #raise AssertionError, "File not properly loaded for signature."
-
- def is_valid(self):
- return self.valid
-
-class ImportResult:
- "Used to hold information about a key import result"
-
- counts = '''count no_user_id imported imported_rsa unchanged
- n_uids n_subk n_sigs n_revoc sec_read sec_imported
- sec_dups not_imported'''.split()
- def __init__(self):
- self.imported = []
- self.results = []
- for result in self.counts:
- setattr(self, result, None)
-
- def NODATA(self, value):
- self.results.append({'fingerprint': None,
- 'problem': '0', 'text': 'No valid data found'})
- def IMPORTED(self, value):
- # this duplicates info we already see in import_ok and import_problem
- pass
- ok_reason = {
- '0': 'Not actually changed',
- '1': 'Entirely new key',
- '2': 'New user IDs',
- '4': 'New signatures',
- '8': 'New subkeys',
- '16': 'Contains private key',
- }
- def IMPORT_OK(self, value):
- reason, fingerprint = value.split()
- self.results.append({'fingerprint': fingerprint,
- 'ok': reason, 'text': self.ok_reason[reason]})
- problem_reason = {
- '0': 'No specific reason given',
- '1': 'Invalid Certificate',
- '2': 'Issuer Certificate missing',
- '3': 'Certificate Chain too long',
- '4': 'Error storing certificate',
- }
- def IMPORT_PROBLEM(self, value):
- try:
- reason, fingerprint = value.split()
- except:
- reason = value
- fingerprint = '<unknown>'
- self.results.append({'fingerprint': fingerprint,
- 'problem': reason, 'text': self.problem_reason[reason]})
- def IMPORT_RES(self, value):
- import_res = value.split()
- for i in range(len(self.counts)):
- setattr(self, self.counts[i], int(import_res[i]))
-
- def summary(self):
- l = []
- l.append('%d imported'%self.imported)
- if self.not_imported:
- l.append('%d not imported'%self.not_imported)
- return ', '.join(l)
-
-class ListResult:
- ''' Parse a --list-keys output
-
- Handle pub and uid (relating the latter to the former).
-
- Don't care about (info from src/DETAILS):
-
- crt = X.509 certificate
- crs = X.509 certificate and private key available
- sub = subkey (secondary key)
- sec = secret key
- ssb = secret subkey (secondary key)
- uat = user attribute (same as user id except for field 10).
- sig = signature
- rev = revocation signature
- fpr = fingerprint: (fingerprint is in field 10)
- pkd = public key data (special field format, see below)
- grp = reserved for gpgsm
- rvk = revocation key
- '''
- def __init__(self):
- self.pub_keys = []
- self.pk = None
-
- def pub(self, args):
- keyid = args[4]
- date = args[5]
- uid = args[9]
- self.pk = {'keyid': keyid, 'date': date, 'uids': [uid]}
- self.pub_keys.append(self.pk)
-
- def uid(self, args):
- self.pk['uids'].append(args[9])
-
-class EncryptedMessage:
- ''' Handle a --encrypt command
- '''
- def __init__(self):
- self.data = ''
-
- def BEGIN_ENCRYPTION(self, value):
- pass
- def END_ENCRYPTION(self, value):
- pass
-
-class GPGSubprocess:
- def __init__(self, gpg_binary=None, keyring=None):
- """Initialize an object instance. Options are:
-
- gpg_binary -- full pathname for GPG binary. If not supplied,
- the current value of PATH will be searched, falling back to the
- DEFAULT_PATH class variable if PATH isn't available.
-
- keyring -- full pathname to the public keyring to use in place of
- the default "~/.gnupg/pubring.gpg".
- """
- # If needed, look for the gpg binary along the path
- if gpg_binary is None:
- path = DEFAULT_PATH
- if os.environ.has_key('PATH'):
- temppath = os.environ['PATH']
- path.extend(temppath.split(os.pathsep))
- #else:
- # path = self.DEFAULT_PATH
-
- for pathdir in path:
- pathdir = os.path.expandvars(pathdir)
- fullname = os.path.join(pathdir, 'gpg')
- if os.path.exists(fullname):
- gpg_binary = fullname
- break
-
- if os.path.exists(fullname + ".exe"):
- gpg_binary = fullname + ".exe"
- break
- else:
- raise ValueError, (_("Couldn't find 'gpg' binary on path %s.")
- % repr(path) )
-
- self.gpg_binary = "\"" + gpg_binary + "\""
- self.keyring = keyring
-
- def _open_subprocess(self, *args):
- # Internal method: open a pipe to a GPG subprocess and return
- # the file objects for communicating with it.
- cmd = [self.gpg_binary, '--status-fd 2']
- if self.keyring:
- cmd.append('--keyring "%s" --no-default-keyring'% self.keyring)
-
- cmd.extend(args)
- cmd = ' '.join(cmd)
-
- #print cmd
- shell = True
- if os.name == 'nt':
- shell = False
-
- # From: http://www.py2exe.org/index.cgi/Py2ExeSubprocessInteractions
- creationflags = 0
- try:
- creationflags = win32process.CREATE_NO_WINDOW
- except NameError: pass
-
- process = subprocess.Popen(cmd, shell=shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags = creationflags)
- #child_stdout, child_stdin, child_stderr = #popen2.popen3(cmd)
- #return child_stdout, child_stdin, child_stderr
- #print process.stderr
- return process.stdout, process.stdin, process.stderr
-
- def _read_response(self, child_stdout, response):
- # Internal method: reads all the output from GPG, taking notice
- # only of lines that begin with the magic [GNUPG:] prefix.
- #
- # Calls methods on the response object for each valid token found,
- # with the arg being the remainder of the status line.
- while 1:
- line = child_stdout.readline()
- #print line
- if line == "": break
- line = line.rstrip()
- if line[0:9] == '[GNUPG:] ':
- # Chop off the prefix
- line = line[9:]
- L = line.split(None, 1)
- keyword = L[0]
- if len(L) > 1:
- value = L[1]
- else:
- value = ""
- getattr(response, keyword)(value)
-
- def _handle_gigo(self, args, file, result):
- # Handle a basic data call - pass data to GPG, handle the output
- # including status information. Garbage In, Garbage Out :)
- child_stdout, child_stdin, child_stderr = self._open_subprocess(*args)
-
- # Copy the file to the GPG subprocess
- while 1:
- data = file.read(1024)
- if data == "": break
- child_stdin.write(data)
- child_stdin.close()
-
- # Get the response information
- resp = self._read_response(child_stderr, result)
-
- # Read the contents of the file from GPG's stdout
- result.data = ""
- while 1:
- data = child_stdout.read(1024)
- if data == "": break
- result.data = result.data + data
-
- return result
-
-
- #
- # SIGNATURE VERIFICATION METHODS
- #
- def verify(self, data):
- "Verify the signature on the contents of the string 'data'"
- file = StringIO.StringIO(data)
- return self.verify_file(file)
-
- def verify_file(self, file):
- "Verify the signature on the contents of the file-like object 'file'"
- sig = Signature()
- self._handle_gigo(['--verify -'], file, sig)
- return sig
-
- def verify_file_detached(self, filename, sigtext):
- sig = Signature()
- sigfile = StringIO.StringIO(sigtext)
- self._handle_gigo(["--verify - \"%s\"" % filename], sigfile, sig)
- return sig
-
- #
- # KEY MANAGEMENT
- #
- def import_key(self, key_data):
- ''' import the key_data into our keyring '''
- child_stdout, child_stdin, child_stderr = \
- self._open_subprocess('--import')
-
- child_stdin.write(key_data)
- child_stdin.close()
-
- # Get the response information
- result = ImportResult()
- resp = self._read_response(child_stderr, result)
-
- return result
-
- def list_keys(self):
- ''' list the keys currently in the keyring '''
- child_stdout, child_stdin, child_stderr = \
- self._open_subprocess('--list-keys --with-colons')
- child_stdin.close()
-
- # TODO: there might be some status thingumy here I should handle...
-
- # Get the response information
- result = ListResult()
- valid_keywords = 'pub uid'.split()
- while 1:
- line = child_stdout.readline()
- if not line:
- break
- L = line.strip().split(':')
- if not L:
- continue
- keyword = L[0]
- if keyword in valid_keywords:
- getattr(result, keyword)(L)
-
- return result
-
- #
- # ENCRYPTING DATA
- #
- def encrypt_file(self, file, recipients):
- "Encrypt the message read from the file-like object 'file'"
- args = ['--encrypt --armor']
- for recipient in recipients:
- args.append('--recipient %s'%recipient)
- result = EncryptedMessage()
- self._handle_gigo(args, file, result)
- return result
-
- def encrypt(self, data, recipients):
- "Encrypt the message contained in the string 'data'"
- file = StringIO.StringIO(data)
- return self.encrypt_file(file, recipients)
-
-
- # Not yet implemented, because I don't need these methods
- # The methods certainly don't have all the parameters they'd need.
- def sign(self, data):
- "Sign the contents of the string 'data'"
- pass
-
- def sign_file(self, file):
- "Sign the contents of the file-like object 'file'"
- pass
-
- def decrypt_file(self, file):
- "Decrypt the message read from the file-like object 'file'"
- pass
-
- def decrypt(self, data):
- "Decrypt the message contained in the string 'data'"
- pass
-
-##
-##if __name__ == '__main__':
-## import sys
-## if len(sys.argv) == 1:
-## print 'Usage: GPG.py <signed file>'
-## sys.exit()
-##
-## obj = GPGSubprocess()
-## file = open(sys.argv[1], 'rb')
-## sig = obj.verify_file( file )
-## print sig.__dict__
-GPG = Dummy()
-GPG.DEFAULT_PATH = DEFAULT_PATH
-GPG.EncryptedMessage = EncryptedMessage
-GPG.GPGSubprocess = GPGSubprocess
-GPG.ImportResult = ImportResult
-GPG.ListResult = ListResult
-GPG.Signature = Signature
-GPG._ = _
-GPG.translate = translate
+download = Dummy()
+download.CONNECT_RETRY_COUNT = CONNECT_RETRY_COUNT
+download.COUNTRY = COUNTRY
+download.DEFAULT_CHUNK_SIZE = DEFAULT_CHUNK_SIZE
+download.DecompressFile = DecompressFile
+download.FTP = FTP
+download.FTP_PROXY = FTP_PROXY
+download.FileResume = FileResume
+download.Ftp_Host = Ftp_Host
+download.Ftp_Host_Segment = Ftp_Host_Segment
+download.HOST_LIMIT = HOST_LIMIT
+download.HTTPConnection = HTTPConnection
+download.HTTPSConnection = HTTPSConnection
+download.HTTPS_PROXY = HTTPS_PROXY
+download.HTTP_PROXY = HTTP_PROXY
+download.Host_Base = Host_Base
+download.Host_Segment = Host_Segment
+download.Http_Host = Http_Host
+download.Http_Host_Segment = Http_Host_Segment
+download.LANG = LANG
+download.LIMIT_PER_HOST = LIMIT_PER_HOST
+download.MAX_CHUNKS = MAX_CHUNKS
+download.MAX_REDIRECTS = MAX_REDIRECTS
+download.MIME_TYPE = MIME_TYPE
+download.Manager = Manager
+download.NormalManager = NormalManager
+download.OS = OS
+download.PGP_KEY_DIR = PGP_KEY_DIR
+download.PGP_KEY_EXTS = PGP_KEY_EXTS
+download.PGP_KEY_STORE = PGP_KEY_STORE
+download.PROTOCOLS = PROTOCOLS
+download.SEGMENTED = SEGMENTED
+download.Segment_Manager = Segment_Manager
+download.ThreadSafeFile = ThreadSafeFile
+download.URLManager = URLManager
+download.USER_AGENT = USER_AGENT
+download._ = _
+download.complete_url = complete_url
+download.download_file = download_file
+download.download_file_node = download_file_node
+download.download_file_urls = download_file_urls
+download.download_metalink = download_metalink
+download.filecheck = filecheck
+download.filehash = filehash
+download.get = get
+download.get_key_value = get_key_value
+download.get_proxy_info = get_proxy_info
+download.get_transport = get_transport
+download.is_local = is_local
+download.is_remote = is_remote
+download.lang = lang
+download.path_join = path_join
+download.pgp_verify_sig = pgp_verify_sig
+download.reg_query = reg_query
+download.set_proxies = set_proxies
+download.sort_prefs = sort_prefs
+download.start_sort = start_sort
+download.translate = translate
+download.urlhead = urlhead
+download.urlopen = urlopen
+download.urlretrieve = urlretrieve
+download.verify_checksum = verify_checksum
+download.verify_chunk_checksum = verify_chunk_checksum
+'''
+From sourceforge pycrypto project:
+http://sourceforge.net/projects/pycrypto/
+
+Code for running GnuPG from Python and dealing with the results.
+
+Detailed info about the format of data to/from gpg may be obtained from the
+file DETAILS in the gnupg source.
+
+Dependencies
+ - GPG must be installed
+ - http://www.gnupg.org
+ - http://www.gpg4win.org
+'''
+
+__rcsid__ = '$Id: GPG.py,v 1.3 2003/11/23 15:03:15 akuchling Exp $'
+
+
+
+def translate():
+ '''
+ Setup translation path
+ '''
+ if __name__=="__main__":
+ try:
+ base = os.path.basename(__file__)[:-3]
+ localedir = os.path.join(os.path.dirname(__file__), "locale")
+ except NameError:
+ base = os.path.basename(sys.executable)[:-4]
+ localedir = os.path.join(os.path.dirname(sys.executable), "locale")
+ else:
+ temp = __name__.split(".")
+ base = temp[-1]
+ localedir = os.path.join("/".join(["%s" % k for k in temp[:-1]]), "locale")
+
+ #print base, localedir
+ t = gettext.translation(base, localedir, [locale.getdefaultlocale()[0]], None, 'en')
+ return t.ugettext
+
+_ = translate()
+
+# Default path used for searching for the GPG binary
+DEFAULT_PATH = ['/bin', '/usr/bin', '/usr/local/bin', \
+ '${PROGRAMFILES}\\GNU\\GnuPG', '${PROGRAMFILES(X86)}\\GNU\\GnuPG',\
+ '${SYSTEMDRIVE}\\cygwin\\bin', '${SYSTEMDRIVE}\\cygwin\\usr\\bin', '${SYSTEMDRIVE}\\cygwin\\usr\\local\\bin']
+
+class Signature:
+ "Used to hold information about a signature result"
+
+ def __init__(self):
+ self.valid = 0
+ self.fingerprint = self.creation_date = self.timestamp = None
+ self.signature_id = self.key_id = None
+ self.username = None
+ self.error = None
+ self.nopubkey = False
+
+ def BADSIG(self, value):
+ self.error = "BADSIG"
+ self.valid = 0
+ self.key_id, self.username = value.split(None, 1)
+ def GOODSIG(self, value):
+ self.valid = 1
+ #self.error = "GOODSIG"
+ self.key_id, self.username = value.split(None, 1)
+ def VALIDSIG(self, value):
+ #print value
+ #self.valid = 1
+ #self.error = "VALID_SIG"
+ self.fingerprint, self.creation_date, self.timestamp, other = value.split(" ", 3)
+ def SIG_ID(self, value):
+ #self.error = "SIG_ID"
+ self.signature_id, self.creation_date, self.timestamp = value.split(" ", 2)
+ def NODATA(self, value):
+ self.error = _("File not properly loaded for signature.")
+ def ERRSIG(self, value):
+ #print value
+ self.error = _("Signature error.")
+ def NO_PUBKEY(self, value):
+ self.key_id = value
+ self.nopubkey = True
+ self.error = _("Signature error, missing public key with id 0x%s.") % value[-8:]
+
+ def TRUST_ULTIMATE(self, value):
+ '''
+ see http://cvs.gnupg.org/cgi-bin/viewcvs.cgi/trunk/doc/DETAILS?rev=289
+ Trust settings do NOT determine if a signature is good or not! That is reserved for GOOD_SIG!
+ '''
+ return
+
+ def TRUST_UNDEFINED(self, value):
+ self.error = _("Trust undefined")
+ #print value.split()
+ #raise AssertionError, "File not properly loaded for signature."
+
+ def is_valid(self):
+ return self.valid
+
+class ImportResult:
+ "Used to hold information about a key import result"
+
+ counts = '''count no_user_id imported imported_rsa unchanged
+ n_uids n_subk n_sigs n_revoc sec_read sec_imported
+ sec_dups not_imported'''.split()
+ def __init__(self):
+ self.imported = []
+ self.results = []
+ for result in self.counts:
+ setattr(self, result, None)
+
+ def NODATA(self, value):
+ self.results.append({'fingerprint': None,
+ 'problem': '0', 'text': 'No valid data found'})
+ def IMPORTED(self, value):
+ # this duplicates info we already see in import_ok and import_problem
+ pass
+ ok_reason = {
+ '0': 'Not actually changed',
+ '1': 'Entirely new key',
+ '2': 'New user IDs',
+ '4': 'New signatures',
+ '8': 'New subkeys',
+ '16': 'Contains private key',
+ }
+ def IMPORT_OK(self, value):
+ reason, fingerprint = value.split()
+ self.results.append({'fingerprint': fingerprint,
+ 'ok': reason, 'text': self.ok_reason[reason]})
+ problem_reason = {
+ '0': 'No specific reason given',
+ '1': 'Invalid Certificate',
+ '2': 'Issuer Certificate missing',
+ '3': 'Certificate Chain too long',
+ '4': 'Error storing certificate',
+ }
+ def IMPORT_PROBLEM(self, value):
+ try:
+ reason, fingerprint = value.split()
+ except:
+ reason = value
+ fingerprint = '<unknown>'
+ self.results.append({'fingerprint': fingerprint,
+ 'problem': reason, 'text': self.problem_reason[reason]})
+ def IMPORT_RES(self, value):
+ import_res = value.split()
+ for i in range(len(self.counts)):
+ setattr(self, self.counts[i], int(import_res[i]))
+
+ def summary(self):
+ l = []
+ l.append('%d imported'%self.imported)
+ if self.not_imported:
+ l.append('%d not imported'%self.not_imported)
+ return ', '.join(l)
+
+class ListResult:
+ ''' Parse a --list-keys output
+
+ Handle pub and uid (relating the latter to the former).
+
+ Don't care about (info from src/DETAILS):
+
+ crt = X.509 certificate
+ crs = X.509 certificate and private key available
+ sub = subkey (secondary key)
+ sec = secret key
+ ssb = secret subkey (secondary key)
+ uat = user attribute (same as user id except for field 10).
+ sig = signature
+ rev = revocation signature
+ fpr = fingerprint: (fingerprint is in field 10)
+ pkd = public key data (special field format, see below)
+ grp = reserved for gpgsm
+ rvk = revocation key
+ '''
+ def __init__(self):
+ self.pub_keys = []
+ self.pk = None
+
+ def pub(self, args):
+ keyid = args[4]
+ date = args[5]
+ uid = args[9]
+ self.pk = {'keyid': keyid, 'date': date, 'uids': [uid]}
+ self.pub_keys.append(self.pk)
+
+ def uid(self, args):
+ self.pk['uids'].append(args[9])
+
+class EncryptedMessage:
+ ''' Handle a --encrypt command
+ '''
+ def __init__(self):
+ self.data = ''
+
+ def BEGIN_ENCRYPTION(self, value):
+ pass
+ def END_ENCRYPTION(self, value):
+ pass
+
+class GPGSubprocess:
+ def __init__(self, gpg_binary=None, keyring=None):
+ """Initialize an object instance. Options are:
+
+ gpg_binary -- full pathname for GPG binary. If not supplied,
+ the current value of PATH will be searched, falling back to the
+ DEFAULT_PATH class variable if PATH isn't available.
+
+ keyring -- full pathname to the public keyring to use in place of
+ the default "~/.gnupg/pubring.gpg".
+ """
+ # If needed, look for the gpg binary along the path
+ if gpg_binary is None:
+ path = DEFAULT_PATH
+ if os.environ.has_key('PATH'):
+ temppath = os.environ['PATH']
+ path.extend(temppath.split(os.pathsep))
+ #else:
+ # path = self.DEFAULT_PATH
+
+ for pathdir in path:
+ pathdir = os.path.expandvars(pathdir)
+ fullname = os.path.join(pathdir, 'gpg')
+ if os.path.exists(fullname):
+ gpg_binary = fullname
+ break
+
+ if os.path.exists(fullname + ".exe"):
+ gpg_binary = fullname + ".exe"
+ break
+ else:
+ raise ValueError, (_("Couldn't find 'gpg' binary on path %s.")
+ % repr(path) )
+
+ self.gpg_binary = "\"" + gpg_binary + "\""
+ self.keyring = keyring
+
+ def _open_subprocess(self, *args):
+ # Internal method: open a pipe to a GPG subprocess and return
+ # the file objects for communicating with it.
+ cmd = [self.gpg_binary, '--status-fd 2']
+ if self.keyring:
+ cmd.append('--keyring "%s" --no-default-keyring'% self.keyring)
+
+ cmd.extend(args)
+ cmd = ' '.join(cmd)
+
+ #print cmd
+ shell = True
+ if os.name == 'nt':
+ shell = False
+
+ # From: http://www.py2exe.org/index.cgi/Py2ExeSubprocessInteractions
+ creationflags = 0
+ try:
+ creationflags = win32process.CREATE_NO_WINDOW
+ except NameError: pass
+
+ process = subprocess.Popen(cmd, shell=shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags = creationflags)
+ #child_stdout, child_stdin, child_stderr = #popen2.popen3(cmd)
+ #return child_stdout, child_stdin, child_stderr
+ #print process.stderr
+ return process.stdout, process.stdin, process.stderr
+
+ def _read_response(self, child_stdout, response):
+ # Internal method: reads all the output from GPG, taking notice
+ # only of lines that begin with the magic [GNUPG:] prefix.
+ #
+ # Calls methods on the response object for each valid token found,
+ # with the arg being the remainder of the status line.
+ while 1:
+ line = child_stdout.readline()
+ #print line
+ if line == "": break
+ line = line.rstrip()
+ if line[0:9] == '[GNUPG:] ':
+ # Chop off the prefix
+ line = line[9:]
+ L = line.split(None, 1)
+ keyword = L[0]
+ if len(L) > 1:
+ value = L[1]
+ else:
+ value = ""
+ getattr(response, keyword)(value)
+
+ def _handle_gigo(self, args, file, result):
+ # Handle a basic data call - pass data to GPG, handle the output
+ # including status information. Garbage In, Garbage Out :)
+ child_stdout, child_stdin, child_stderr = self._open_subprocess(*args)
+
+ # Copy the file to the GPG subprocess
+ while 1:
+ data = file.read(1024)
+ if data == "": break
+ child_stdin.write(data)
+ child_stdin.close()
+
+ # Get the response information
+ resp = self._read_response(child_stderr, result)
+
+ # Read the contents of the file from GPG's stdout
+ result.data = ""
+ while 1:
+ data = child_stdout.read(1024)
+ if data == "": break
+ result.data = result.data + data
+
+ return result
+
+
+ #
+ # SIGNATURE VERIFICATION METHODS
+ #
+ def verify(self, data):
+ "Verify the signature on the contents of the string 'data'"
+ file = StringIO.StringIO(data)
+ return self.verify_file(file)
+
+ def verify_file(self, file):
+ "Verify the signature on the contents of the file-like object 'file'"
+ sig = Signature()
+ self._handle_gigo(['--verify -'], file, sig)
+ return sig
+
+ def verify_file_detached(self, filename, sigtext):
+ sig = Signature()
+ sigfile = StringIO.StringIO(sigtext)
+ self._handle_gigo(["--verify - \"%s\"" % filename], sigfile, sig)
+ return sig
+
+ #
+ # KEY MANAGEMENT
+ #
+ def import_key(self, key_data):
+ ''' import the key_data into our keyring '''
+ child_stdout, child_stdin, child_stderr = \
+ self._open_subprocess('--import')
+
+ child_stdin.write(key_data)
+ child_stdin.close()
+
+ # Get the response information
+ result = ImportResult()
+ resp = self._read_response(child_stderr, result)
+
+ return result
+
+ def list_keys(self):
+ ''' list the keys currently in the keyring '''
+ child_stdout, child_stdin, child_stderr = \
+ self._open_subprocess('--list-keys --with-colons')
+ child_stdin.close()
+
+ # TODO: there might be some status thingumy here I should handle...
+
+ # Get the response information
+ result = ListResult()
+ valid_keywords = 'pub uid'.split()
+ while 1:
+ line = child_stdout.readline()
+ if not line:
+ break
+ L = line.strip().split(':')
+ if not L:
+ continue
+ keyword = L[0]
+ if keyword in valid_keywords:
+ getattr(result, keyword)(L)
+
+ return result
+
+ #
+ # ENCRYPTING DATA
+ #
+ def encrypt_file(self, file, recipients):
+ "Encrypt the message read from the file-like object 'file'"
+ args = ['--encrypt --armor']
+ for recipient in recipients:
+ args.append('--recipient %s'%recipient)
+ result = EncryptedMessage()
+ self._handle_gigo(args, file, result)
+ return result
+
+ def encrypt(self, data, recipients):
+ "Encrypt the message contained in the string 'data'"
+ file = StringIO.StringIO(data)
+ return self.encrypt_file(file, recipients)
+
+
+ # Not yet implemented, because I don't need these methods
+ # The methods certainly don't have all the parameters they'd need.
+ def sign(self, data):
+ "Sign the contents of the string 'data'"
+ pass
+
+ def sign_file(self, file):
+ "Sign the contents of the file-like object 'file'"
+ pass
+
+ def decrypt_file(self, file):
+ "Decrypt the message read from the file-like object 'file'"
+ pass
+
+ def decrypt(self, data):
+ "Decrypt the message contained in the string 'data'"
+ pass
+
+##
+##if __name__ == '__main__':
+## import sys
+## if len(sys.argv) == 1:
+## print 'Usage: GPG.py <signed file>'
+## sys.exit()
+##
+## obj = GPGSubprocess()
+## file = open(sys.argv[1], 'rb')
+## sig = obj.verify_file( file )
+## print sig.__dict__
+GPG = Dummy()
+GPG.DEFAULT_PATH = DEFAULT_PATH
+GPG.EncryptedMessage = EncryptedMessage
+GPG.GPGSubprocess = GPGSubprocess
+GPG.ImportResult = ImportResult
+GPG.ListResult = ListResult
+GPG.Signature = Signature
+GPG._ = _
+GPG.translate = translate
#!/usr/bin/env python
########################################################################
#
@@ -2846,7 +3210,7 @@
# URL: http://www.nabber.org/projects/
# E-mail: webmaster nabber org
#
-# Copyright: (C) 2007-2008, Neil McNab
+# Copyright: (C) 2007-2008, Hampus Wessman, Neil McNab
# License: GNU General Public License Version 2
# (http://www.gnu.org/copyleft/gpl.html)
#
@@ -2864,121 +3228,640 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-# Filename: $URL$
-# Last Updated: $Date$
-# Author(s): Neil McNab
+# Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/xmlutils.py $
+# Last Updated: $Date: 2008-07-15 06:54:02 +0200 (tis, 15 jul 2008) $
+# Author(s): Hampus Wessman, Neil McNab
#
# Description:
# Functions for accessing XML formatted data.
#
########################################################################
+##import xml.dom.minidom
+##
+##def get_child_nodes(rootnode, subtag):
+## '''
+## Extract specific child tag names.
+## First parameter, XML node
+## Second parameter, name (string) of child node(s) to look for
+## Returns a list of child nodes
+## '''
+## children = []
+##
+## try:
+## rootnode.childNodes
+## except AttributeError: return children
+##
+## for childnode in rootnode.childNodes:
+## if childnode.nodeName == subtag:
+## children.append(childnode)
+##
+## return children
+##
+##def get_subnodes(rootnode, subtags):
+## '''
+## First parameter, XML node
+## Second parameter, tree in array form for names (string) of child node(s) to look for
+## Returns a list of child nodes (searched recursively)
+## '''
+## children = []
+## child_nodes = get_child_nodes(rootnode, subtags[0])
+## if (len(subtags) == 1):
+## return child_nodes
+##
+## for child in child_nodes:
+## child_nodes = get_subnodes(child, subtags[1:])
+## children.extend(child_nodes)
+##
+## return children
+##
+##def get_texttag_values(xmlfile, tag):
+## '''
+## Get values for selected tags in an XML file
+## First parameter, XML file to parse
+## Second parameter, tag to search for in XML file
+## Returns a list of text values found
+##
+## raise ExpatError if the file cannot be parsed
+## '''
+## looking_for = []
+## try:
+## datasource = open(xmlfile)
+## except IOError:
+## return looking_for
+##
+## dom2 = xml.dom.minidom.parse(datasource) # parse an open file
+## datasource.close()
+## return get_xml_tag_strings(dom2, tag)
+##
+##def get_tags(xmlfile, tag):
+## '''
+## raise ExpatError if the file cannot be parsed
+## '''
+## looking_for = []
+## try:
+## datasource = open(xmlfile)
+## except IOError:
+## return looking_for
+##
+## dom2 = xml.dom.minidom.parse(datasource) # parse an open file
+## datasource.close()
+## return get_subnodes(dom2, tag)
+##
+##def get_xml_tag_strings(item, tag):
+## '''
+## Converts an XML node to a list of text for specified tag
+## First parameter, XML node object
+## Second parameter, tag tree names to search for
+## Returns a list of text value for this tag
+## '''
+## return get_xml_item_strings(get_subnodes(item, tag))
+##
+##def get_xml_item_strings(items):
+## '''
+## Converts XML nodes to text
+## First parameter, list of XML Node objects
+## Returns, list of strings as extracted from text nodes in items
+## '''
+## stringlist = []
+## for myitem in items:
+## stringlist.append(myitem.firstChild.nodeValue.strip())
+## return stringlist
+##
+##def get_attr_from_item(item, name):
+## '''
+## Extract the attribute from the XML node
+## First parameter, item XML node
+## Returns value of the attribute
+## '''
+## local_file = ""
+##
+## for i in range(item.attributes.length):
+## if item.attributes.item(i).name == name:
+## local_file = item.attributes.item(i).value
+##
+## return local_file
-def get_child_nodes(rootnode, subtag):
- '''
- Extract specific child tag names.
- First parameter, XML node
- Second parameter, name (string) of child node(s) to look for
- Returns a list of child nodes
- '''
- children = []
- for childnode in rootnode.childNodes:
- if childnode.nodeName == subtag:
- children.append(childnode)
-
- return children
+# Copyright (c) 2007-2008 Hampus Wessman, Neil McNab.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-def get_subnodes(rootnode, subtags):
- '''
- First parameter, XML node
- Second parameter, tree in array form for names (string) of child node(s) to look for
- Returns a list of child nodes (searched recursively)
- '''
- children = []
- child_nodes = get_child_nodes(rootnode, subtags[0])
- if (len(subtags) == 1):
- return child_nodes
+
+current_version = "1.1.0"
+
+def get_first(x):
+ try:
+ return x[0]
+ except:
+ return x
+
+class Resource:
+ def __init__(self, url, type="default", location="", preference="", maxconnections="", attrs = {}):
+ self.errors = []
+ self.url = url
+ self.location = location
+ if type == "default" or type.strip() == "":
+ if url.endswith(".torrent"):
+ self.type = "bittorrent"
+ else:
+ chars = url.find(":")
+ self.type = url[:chars]
+ else:
+ self.type = type
+ self.preference = str(preference)
+ if maxconnections.strip() == "-" or maxconnections.strip() == "":
+ self.maxconnections = "-"
+ else:
+ self.maxconnections = maxconnections
+
+ for attr in attrs:
+ setattr(self, attr, attrs[attr])
- for child in child_nodes:
- child_nodes = get_subnodes(child, subtags[1:])
- children.extend(child_nodes)
-
- return children
+ def validate(self):
+ valid = True
+ if self.url.strip() == "":
+ self.errors.append("Empty URLs are not allowed!")
+ valid = False
+ allowed_types = ["ftp", "ftps", "http", "https", "rsync", "bittorrent", "magnet", "ed2k"]
+ if not self.type in allowed_types:
+ self.errors.append("Invalid URL: " + self.url + '.')
+ valid = False
+ elif self.type in ['http', 'https', 'ftp', 'ftps', 'bittorrent']:
+ m = re.search(r'\w+://.+\..+/.*', self.url)
+ if m == None:
+ self.errors.append("Invalid URL: " + self.url + '.')
+ valid = False
+ if self.location.strip() != "":
+ iso_locations = ["AF", "AX", "AL", "DZ", "AS", "AD", "AO", "AI", "AQ", "AG", "AR", "AM", "AW", "AU", "AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ", "BM", "BT", "BO", "BA", "BW", "BV", "BR", "IO", "BN", "BG", "BF", "BI", "KH", "CM", "CA", "CV", "KY", "CF", "TD", "CL", "CN", "CX", "CC", "CO", "KM", "CG", "CD", "CK", "CR", "CI", "HR", "CU", "CY", "CZ", "DK", "DJ", "DM", "DO", "EC", "EG", "SV", "GQ", "ER", "EE", "ET", "FK", "FO", "FJ", "FI", "FR", "GF", "PF", "TF", "GA", "GM", "GE", "DE", "GH", "GI", "GR", "GL", "GD", "GP", "GU", "GT", "GG", "GN", "GW", "GY", "HT", "HM", "VA", "HN", "HK", "HU", "IS", "IN", "ID", "IR", "IQ", "IE", "IM", "IL", "IT", "JM", "JP", "JE", "JO", "KZ", "KE", "KI", "KP", "KR", "KW", "KG", "LA", "LV", "LB", "LS", "LR", "LY", "LI", "LT", "LU", "MO", "MK", "MG", "MW", "MY", "MV", "ML", "MT", "MH", "MQ", "MR", "MU", "YT", "MX", "FM", "MD", "MC", "MN", "ME", "MS", "MA", "MZ", "MM", "NA", "NR", "NP", "NL", "AN", "NC", "NZ", "NI", "NE",
"NG", "NU", "NF", "MP", "NO", "OM", "PK", "PW", "PS", "PA", "PG", "PY", "PE", "PH", "PN", "PL", "PT", "PR", "QA", "RE", "RO", "RU", "RW", "SH", "KN", "LC", "PM", "VC", "WS", "SM", "ST", "SA", "SN", "RS", "SC", "SL", "SG", "SK", "SI", "SB", "SO", "ZA", "GS", "ES", "LK", "SD", "SR", "SJ", "SZ", "SE", "CH", "SY", "TW", "TJ", "TZ", "TH", "TL", "TG", "TK", "TO", "TT", "TN", "TR", "TM", "TC", "TV", "UG", "UA", "AE", "GB", "US", "UM", "UY", "UZ", "VU", "VE", "VN", "VG", "VI", "WF", "EH", "YE", "ZM", "ZW", "UK"]
+ if not self.location.upper() in iso_locations:
+ self.errors.append(self.location + " is not a valid country code.")
+ valid = False
+ if self.preference != "":
+ try:
+ pref = int(self.preference)
+ if pref < 0 or pref > 100:
+ self.errors.append("Preference must be between 0 and 100, not " + self.preference + '.')
+ valid = False
+ except:
+ self.errors.append("Preference must be a number, between 0 and 100.")
+ valid = False
+ if self.maxconnections.strip() != "" and self.maxconnections.strip() != "-":
+ try:
+ conns = int(self.maxconnections)
+ if conns < 1:
+ self.errors.append("Max connections must be at least 1, not " + self.maxconnections + '.')
+ valid = False
+ elif conns > 20:
+ self.errors.append("You probably don't want max connections to be as high as " + self.maxconnections + '!')
+ valid = False
+ except:
+ self.errors.append("Max connections must be a positive integer, not " + self.maxconnections + ".")
+ valid = False
+ return valid
-def get_texttag_values(xmlfile, tag):
- '''
- Get values for selected tags in an XML file
- First parameter, XML file to parse
- Second parameter, tag to search for in XML file
- Returns a list of text values found
+class MetalinkFile:
+ def __init__(self, filename, attrs = {}):
+ self.filename = filename
+ self.errors = []
+# self.hash_md5 = ""
+# self.hash_sha1 = ""
+# self.hash_sha256 = ""
+ self.hashlist = {}
+ self.pieces = []
+ self.piecelength = 0
+ self.piecetype = ""
+ self.resources = []
+ self.language = ""
+ self.os = ""
+ self.size = 0
+ self.maxconnections = ""
+ for attr in attrs:
+ setattr(self, attr, attrs[attr])
+
+ def get_filename(self):
+ return self.filename
+
+ def get_checksums(self):
+ return self.hashlist
+
+ def add_checksum(self, name, value):
+ self.hashlist[name] = value
+
+ def set_checksums(self, hashlist):
+ self.hashlist = hashlist
+
+ def compare_checksums(self, checksums):
+ for key in ("sha512","sha384","sha256","sha1","md5"):
+ try:
+ if self.hashlist[key].lower() == checksums[key].lower():
+ return True
+ except KeyError: pass
+ return False
+
+ def get_piece_dict(self):
+ temp = {}
+ temp[self.piecetype] = self.pieces
+ return temp
+
+ def get_url_dict(self):
+ temp = {}
+ for url in self.resources:
+ temp[url.url] = url
+ return temp
+
+ def set_size(self, size):
+ self.size = int(size)
+
+ def get_size(self):
+ return int(self.size)
- raise ExpatError if the file cannot be parsed
- '''
- looking_for = []
- try:
- datasource = open(xmlfile)
- except IOError:
- return looking_for
+ def clear_res(self):
+ self.resources = []
+
+ def add_url(self, url, type="default", location="", preference="", conns="", attrs={}):
+ self.resources.append(Resource(url, type, location, preference, conns, attrs))
+
+ def add_res(self, res):
+ self.resources.append(res)
- dom2 = xml.dom.minidom.parse(datasource) # parse an open file
- datasource.close()
- return get_xml_tag_strings(dom2, tag)
+ def scan_file(self, filename, use_chunks=True, max_chunks=255, chunk_size=256, progresslistener=None):
+ print "\nScanning file..."
+ # Filename and size
+ self.filename = os.path.basename(filename)
+ self.size = os.stat(filename).st_size
+ # Calculate piece length
+ if use_chunks:
+ minlength = chunk_size*1024
+ self.piecelength = 1024
+ while self.size / self.piecelength > max_chunks or self.piecelength < minlength:
+ self.piecelength *= 2
+ print "Using piecelength", self.piecelength, "(" + str(self.piecelength / 1024) + " KiB)"
+ numpieces = self.size / self.piecelength
+ if numpieces < 2: use_chunks = False
+ # Hashes
+ fp = open(filename, "rb")
+ md5hash = md5.new()
+ sha1hash = sha.new()
+ sha256hash = None
+ # Try to use hashlib
+ try:
+ md5hash = hashlib.md5()
+ sha1hash = hashlib.sha1()
+ sha256hash = hashlib.sha256()
+ except:
+ print "Hashlib not available. No support for SHA-256."
+ piecehash = sha.new()
+ piecenum = 0
+ length = 0
+ self.pieces = []
+ self.piecetype = "sha1"
+ num_reads = math.ceil(self.size / 4096.0)
+ reads_per_progress = int(math.ceil(num_reads / 100.0))
+ reads_left = reads_per_progress
+ progress = 0
+ while True:
+ data = fp.read(4096)
+ if data == "": break
+ # Progress updating
+ if progresslistener:
+ reads_left -= 1
+ if reads_left <= 0:
+ reads_left = reads_per_progress
+ progress += 1
+ result = progresslistener.Update(progress)
+ if get_first(result) == False:
+ print "Canceling scan!"
+ return False
+ # Process the data
+ if md5hash != None: md5hash.update(data)
+ if sha1hash != None: sha1hash.update(data)
+ if sha256hash != None: sha256hash.update(data)
+ if use_chunks:
+ left = len(data)
+ while left > 0:
+ if length + left <= self.piecelength:
+ piecehash.update(data)
+ length += left
+ left = 0
+ else:
+ numbytes = self.piecelength - length
+ piecehash.update(data[:numbytes])
+ length += numbytes
+ data = data[numbytes:]
+ left -= numbytes
+ if length == self.piecelength:
+ print "Done with piece hash", len(self.pieces)
+ self.pieces.append(piecehash.hexdigest())
+ piecehash = sha.new()
+ length = 0
+ if use_chunks:
+ if length > 0:
+ print "Done with piece hash", len(self.pieces)
+ self.pieces.append(piecehash.hexdigest())
+ piecehash = sha.new()
+ print "Total number of pieces:", len(self.pieces)
+ fp.close()
+ self.hashlist["md5"] = md5hash.hexdigest()
+ self.hashlist["sha1"] = sha1hash.hexdigest()
+ if sha256hash != None:
+ self.hashlist["sha256"] = sha256hash.hexdigest()
+ if len(self.pieces) < 2: self.pieces = []
+ # Convert to strings
+ self.size = str(self.size)
+ self.piecelength = str(self.piecelength)
+ print "done"
+ if progresslistener: progresslistener.Update(100)
+ return True
-def get_tags(xmlfile, tag):
- '''
- raise ExpatError if the file cannot be parsed
- '''
- looking_for = []
- try:
- datasource = open(xmlfile)
- except IOError:
- return looking_for
+ def validate(self):
+ valid = True
+ if len(self.resources) == 0:
+ self.errors.append("You need to add at least one URL!")
+ valid = False
+ if self.hashlist["md5"].strip() != "":
+ m = re.search(r'[^0-9a-fA-F]', self.hashlist["md5"])
+ if len(self.hashlist["md5"]) != 32 or m != None:
+ self.errors.append("Invalid md5 hash.")
+ valid = False
+ if self.hashlist["sha1"].strip() != "":
+ m = re.search(r'[^0-9a-fA-F]', self.hashlist["sha1"])
+ if len(self.hashlist["sha1"]) != 40 or m != None:
+ self.errors.append("Invalid sha-1 hash.")
+ valid = False
+ if self.size.strip() != "":
+ try:
+ size = int(self.size)
+ if size < 0:
+ self.errors.append("File size must be at least 0, not " + self.size + '.')
+ valid = False
+ except:
+ self.errors.append("File size must be an integer, not " + self.size + ".")
+ valid = False
+ if self.maxconnections.strip() != "" and self.maxconnections.strip() != "-":
+ try:
+ conns = int(self.maxconnections)
+ if conns < 1:
+ self.errors.append("Max connections must be at least 1, not " + self.maxconnections + '.')
+ valid = False
+ elif conns > 20:
+ self.errors.append("You probably don't want max connections to be as high as " + self.maxconnections + '!')
+ valid = False
+ except:
+ self.errors.append("Max connections must be a positive integer, not " + self.maxconnections + ".")
+ valid = False
+ return valid
+
+ def validate_url(self, url):
+ if url.endswith(".torrent"):
+ type = "bittorrent"
+ else:
+ chars = url.find(":")
+ type = url[:chars]
+ allowed_types = ["ftp", "ftps", "http", "https", "rsync", "bittorrent", "magnet", "ed2k"]
+ if not type in allowed_types:
+ return False
+ elif type in ['http', 'https', 'ftp', 'ftps', 'bittorrent']:
+ m = re.search(r'\w+://.+\..+/.*', url)
+ if m == None:
+ return False
+ return True
- dom2 = xml.dom.minidom.parse(datasource) # parse an open file
- datasource.close()
- return get_subnodes(dom2, tag)
+ def generate_file(self):
+ if self.filename.strip() != "":
+ text = ' <file name="' + self.filename + '">\n'
+ else:
+ text = ' <file>\n'
+ # File info
+ if self.size != 0:
+ text += ' <size>'+str(self.size)+'</size>\n'
+ if self.language.strip() != "":
+ text += ' <language>'+self.language+'</language>\n'
+ if self.os.strip() != "":
+ text += ' <os>'+self.os+'</os>\n'
+ # Verification
+# if self.hashlist["md5"].strip() != "" or self.hashlist["sha1"].strip() != "":
+ if len(self.hashlist) > 0 or len(self.pieces) > 0:
+ text += ' <verification>\n'
+ for key in self.hashlist.keys():
+ text += ' <hash type="%s">' % key + self.hashlist[key].lower() + '</hash>\n'
+ #if self.hashlist["md5"].strip() != "":
+ # text += ' <hash type="md5">'+self.hashlist["md5"].lower()+'</hash>\n'
+ #if self.hashlist["sha1"].strip() != "":
+ # text += ' <hash type="sha1">'+self.hashlist["sha1"].lower()+'</hash>\n'
+ #if self.self.hashlist["sha256"].strip() != "":
+ # text += ' <hash type="sha256">'+self.hashlist["sha256"].lower()+'</hash>\n'
+ if len(self.pieces) > 1:
+ text += ' <pieces type="'+self.piecetype+'" length="'+self.piecelength+'">\n'
+ for id in range(len(self.pieces)):
+ text += ' <hash piece="'+str(id)+'">'+self.pieces[id]+'</hash>\n'
+ text += ' </pieces>\n'
+ text += ' </verification>\n'
+ # File list
+ if self.maxconnections.strip() != "" and self.maxconnections.strip() != "-":
+ maxconns = ' maxconnections="'+self.maxconnections+'"'
+ else:
+ maxconns = ""
+ text += ' <resources'+maxconns+'>\n'
+ for res in self.resources:
+ details = ''
+ if res.location.strip() != "":
+ details += ' location="'+res.location.lower()+'"'
+ if res.preference.strip() != "": details += ' preference="'+res.preference+'"'
+ if res.maxconnections.strip() != ""and res.maxconnections.strip() != "-" : details += ' maxconnections="'+res.maxconnections+'"'
+ text += ' <url type="'+res.type+'"'+details+'>'+res.url+'</url>\n'
+ text += ' </resources>\n'
+ text += ' </file>\n'
+ return text
+
+class XMLTag:
+ def __init__(self, name, attrs={}):
+ self.name = name
+ self.attrs = attrs
-def get_xml_tag_strings(item, tag):
- '''
- Converts an XML node to a list of text for specified tag
- First parameter, XML node object
- Second parameter, tag tree names to search for
- Returns a list of text value for this tag
- '''
- return get_xml_item_strings(get_subnodes(item, tag))
-
-def get_xml_item_strings(items):
- '''
- Converts XML nodes to text
- First parameter, list of XML Node objects
- Returns, list of strings as extracted from text nodes in items
- '''
- stringlist = []
- for myitem in items:
- stringlist.append(myitem.firstChild.nodeValue.strip())
- return stringlist
-
-def get_attr_from_item(item, name):
- '''
- Extract the attribute from the XML node
- First parameter, item XML node
- Returns value of the attribute
- '''
- local_file = ""
-
- for i in range(item.attributes.length):
- if item.attributes.item(i).name == name:
- local_file = item.attributes.item(i).value
-
- return local_file
-xmlutils = Dummy()
-xmlutils.get_attr_from_item = get_attr_from_item
-xmlutils.get_child_nodes = get_child_nodes
-xmlutils.get_subnodes = get_subnodes
-xmlutils.get_tags = get_tags
-xmlutils.get_texttag_values = get_texttag_values
-xmlutils.get_xml_item_strings = get_xml_item_strings
-xmlutils.get_xml_tag_strings = get_xml_tag_strings
+ def get_attr(self, name):
+ return self.attrs[name]
+
+class Metalink:
+ def __init__(self):
+ self.errors = []
+ self.files = []
+ self.identity = ""
+ self.publisher_name = ""
+ self.publisher_url = ""
+ self.copyright = ""
+ self.description = ""
+ self.license_name = ""
+ self.license_url = ""
+ self.version = ""
+ self.origin = ""
+ self.type = ""
+ self.upgrade = ""
+ self.tags = ""
+
+ self.p = xml.parsers.expat.ParserCreate()
+ self.parent = []
+
+ self.p.StartElementHandler = self.start_element
+ self.p.EndElementHandler = self.end_element
+ self.p.CharacterDataHandler = self.char_data
+
+ def generate(self):
+ text = '<?xml version="1.0" encoding="utf-8"?>\n'
+ origin = ""
+ if self.origin.strip() != "":
+ origin = 'origin="'+self.origin+'" '
+ typetext = ""
+ if self.type.strip() != "":
+ typetext = 'type="'+self.type+'" '
+ text += '<metalink version="3.0" '+origin + typetext +'generator="Metalink Editor version '+current_version+'" xmlns="http://www.metalinker.org/">\n'
+ text += self.generate_info()
+ text += ' <files>\n'
+ for fileobj in self.files:
+ text += fileobj.generate_file()
+ text += ' </files>\n'
+ text += '</metalink>'
+ try:
+ return text.encode('utf-8')
+ except:
+ return text.decode('latin1').encode('utf-8')
+
+ def generate_info(self):
+ text = ""
+ # Publisher info
+ if self.publisher_name.strip() != "" or self.publisher_url.strip() != "":
+ text += ' <publisher>\n'
+ if self.publisher_name.strip() != "":
+ text += ' <name>' + self.publisher_name + '</name>\n'
+ if self.publisher_url.strip() != "":
+ text += ' <url>' + self.publisher_url + '</url>\n'
+ text += ' </publisher>\n'
+ # License info
+ if self.license_name.strip() != "" or self.license_url.strip() != "":
+ text += ' <license>\n'
+ if self.license_name.strip() != "":
+ text += ' <name>' + self.license_name + '</name>\n'
+ if self.license_url.strip() != "":
+ text += ' <url>' + self.license_url + '</url>\n'
+ text += ' </license>\n'
+ # Release info
+ if self.identity.strip() != "":
+ text += ' <identity>'+self.identity+'</identity>\n'
+ if self.version.strip() != "":
+ text += ' <version>'+self.version+'</version>\n'
+ if self.copyright.strip() != "":
+ text += ' <copyright>'+self.copyright+'</copyright>\n'
+ if self.description.strip() != "":
+ text += ' <description>'+self.description+'</description>\n'
+ if self.upgrade.strip() != "":
+ text += ' <upgrade>'+self.upgrade+'</upgrade>\n'
+ return text
+
+ # 3 handler functions
+ def start_element(self, name, attrs):
+ self.data = ""
+ self.parent.append(XMLTag(name, attrs))
+ if name == "file":
+ fileobj = MetalinkFile(attrs["name"], attrs)
+ self.files.append(fileobj)
+
+ if name == "metalink":
+ try:
+ self.origin = attrs["origin"]
+ except KeyError: pass
+ try:
+ self.type = attrs["type"]
+ except KeyError: pass
+
+ def end_element(self, name):
+ tag = self.parent.pop()
+
+ try:
+ if name == "url" and self.parent[-1].name == "resources":
+ fileobj = self.files[-1]
+ fileobj.add_url(self.data, attrs=tag.attrs)
+ elif name == "tags" and self.parent[-1].name != "file":
+ setattr(self, "tags", self.data)
+ elif name in ("name", "url"):
+ setattr(self, self.parent[-1].name + "_" + name, self.data)
+ elif name in ("identity", "copyright", "description", "version", "upgrade"):
+ setattr(self, name, self.data)
+ elif name == "hash" and self.parent[-1].name == "verification":
+ hashtype = tag.attrs["type"]
+ fileobj = self.files[-1]
+ #setattr(fileobj, "hash_" + hashtype, self.data)
+ fileobj.hashlist[hashtype] = self.data
+ elif name == "signature" and self.parent[-1].name == "verification":
+ hashtype = tag.attrs["type"]
+ fileobj = self.files[-1]
+ #setattr(fileobj, "hash_" + hashtype, self.data)
+ fileobj.hashlist[hashtype] = self.data
+ elif name == "pieces":
+ fileobj = self.files[-1]
+ fileobj.piecetype = tag.attrs["type"]
+ fileobj.piecelength = tag.attrs["length"]
+ elif name == "hash" and self.parent[-1].name == "pieces":
+ fileobj = self.files[-1]
+ fileobj.pieces.append(self.data)
+ elif name in ("os", "size", "language", "tags"):
+ fileobj = self.files[-1]
+ setattr(fileobj, name, self.data)
+ except IndexError: pass
+
+ def char_data(self, data):
+ self.data += data.strip()
+
+ def parsefile(self, filename):
+ handle = open(filename, "rb")
+ self.parsehandle(handle)
+ handle.close()
+
+ def parsehandle(self, handle):
+ return self.p.ParseFile(handle)
+
+ def parse(self, text):
+ self.p.Parse(text)
+
+ def validate(self, *args):
+ valid = True
+ if self.publisher_url.strip() != "":
+ if not self.validate_url(self.publisher_url):
+ self.errors.append("Invalid URL: " + self.publisher_url + '.')
+ valid = False
+ if self.license_url.strip() != "":
+ if not self.validate_url(self.license_url):
+ self.errors.append("Invalid URL: " + self.license_url + '.')
+ valid = False
+
+ for fileobj in self.files:
+ result = fileobj.validate()
+ valid = valid and result
+ self.errors.extend(fileobj.errors)
+ return valid
+
+ def download_size(self):
+ total = 0
+ for fileobj in self.files:
+ total += fileobj.get_size()
+ return total
+xmlutils = Dummy()
+xmlutils.Metalink = Metalink
+xmlutils.MetalinkFile = MetalinkFile
+xmlutils.Resource = Resource
+xmlutils.XMLTag = XMLTag
+xmlutils.current_version = current_version
+xmlutils.get_first = get_first
#!/usr/bin/env python
########################################################################
#
@@ -3004,9 +3887,9 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
-# Filename: $URL$
-# Last Updated: $Date$
-# Version: $Rev$
+# Filename: $URL: https://metalinks.svn.sourceforge.net/svnroot/metalinks/checker/console.py $
+# Last Updated: $Date: 2008-07-04 22:01:37 +0200 (fre, 04 jul 2008) $
+# Version: $Rev: 194 $
# Author(s): Neil McNab
#
# Description:
@@ -3092,9 +3975,8 @@
return
if options.download:
-
- progress = ProgressBar(55)
- result = download.get(options.filevar, os.getcwd(), handler=progress.download_update, segmented = not options.nosegmented)
+ progress = ProgressBar()
+ result = download.get(options.filevar, os.getcwd(), handlers={"status": progress.download_update, "bitrate": progress.set_bitrate}, segmented = not options.nosegmented)
progress.download_end()
if not result:
sys.exit(-1)
@@ -3162,8 +4044,12 @@
class ProgressBar:
- def __init__(self, length = 68):
+ def __init__(self, length = 79):
self.length = length
+ self.bitrate = None
+ self.show_bitrate = True
+ self.show_bytes = True
+ self.show_percent = True
#print ""
#self.update(0, 0)
self.total_size = 0
@@ -3185,13 +4071,32 @@
if total_bytes < 0:
return
- size = int(percent * self.length / 100)
- bar = ("#" * size) + ("-" * (self.length - size))
- output = "[%s] %.0f%% %.2f/%.2f MB" % (bar, percent, current_bytes, total_bytes)
+
+ percenttxt = ""
+ if self.show_percent:
+ percenttxt = " %.0f%%" % percent
+
+ bytes = ""
+ if self.show_bytes:
+ bytes = " %.2f/%.2f MB" % (current_bytes, total_bytes)
+
+ bitinfo = ""
+ if self.bitrate != None and self.show_bitrate:
+ bitinfo = " %.0f kbps" % self.bitrate
+
+ length = self.length - 2 - len(percenttxt) - len(bytes) - len(bitinfo)
+
+ size = int(percent * length / 100)
+ bar = ("#" * size) + ("-" * (length - size))
+ output = "[%s]" % bar
+ output += percenttxt + bytes + bitinfo
self.line_reset()
sys.stdout.write(output)
+ def set_bitrate(self, bitrate):
+ self.bitrate = bitrate
+
def update(self, count, total):
if count > total:
count = total
@@ -3204,14 +4109,22 @@
if total < 0:
return
- size = int(percent * self.length / 100)
- bar = ("#" * size) + ("-" * (self.length - size))
- output = "[%s] %.0f%%" % (bar, percent)
+ percenttxt = ""
+ if self.show_percent:
+ percenttxt = " %.0f%%" % percent
+
+ length = self.length - 2 - len(percenttxt)
+
+ size = int(percent * length / 100)
+ bar = ("#" * size) + ("-" * (length - size))
+ output = "[%s]" % bar
+ output += percenttxt
self.line_reset()
sys.stdout.write(output)
def line_reset(self):
+
sys.stdout.write("\b" * 80)
if os.name != 'nt':
sys.stdout.write("\n")
@@ -3226,10 +4139,10 @@
if __name__ == "__main__":
run()
-console = Dummy()
-console.ProgressBar = ProgressBar
-console.VERSION = VERSION
-console._ = _
-console.print_totals = print_totals
-console.run = run
-console.translate = translate
+console = Dummy()
+console.ProgressBar = ProgressBar
+console.VERSION = VERSION
+console._ = _
+console.print_totals = print_totals
+console.run = run
+console.translate = translate
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]