[kupfer: 5/27] archiveinside: Clean up unarchived files at exit
- From: Ulrik Sverdrup <usverdrup src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [kupfer: 5/27] archiveinside: Clean up unarchived files at exit
- Date: Tue, 16 Feb 2010 20:47:32 +0000 (UTC)
commit c19e438cd1d091cc821e32a2cb426f56e30b6cf8
Author: Ulrik Sverdrup <ulrik sverdrup gmail com>
Date: Fri Jan 22 17:56:01 2010 +0100
archiveinside: Clean up unarchived files at exit
For now, we have no preference whether to do it or not, we simply
clean it up.
kupfer/plugin/archiveinside.py | 34 ++++++++++++++++++++++++++++++----
1 files changed, 30 insertions(+), 4 deletions(-)
---
diff --git a/kupfer/plugin/archiveinside.py b/kupfer/plugin/archiveinside.py
index c40f3ed..944e550 100644
--- a/kupfer/plugin/archiveinside.py
+++ b/kupfer/plugin/archiveinside.py
@@ -7,26 +7,35 @@ Issues to resolve:
* Add option to clean up at Kupfer's exit
* Handle zip, tar.gz and anything we can
"""
-__kupfer_name__ = _("Look inside Archives")
+__kupfer_name__ = _("Deep Archives")
__kupfer_contents__ = ("ArchiveContent", )
-__description__ = _("Recently used documents and bookmarked folders")
+__description__ = _("Allow browsing inside compressed archive files")
__version__ = ""
__author__ = "Ulrik Sverdrup <ulrik sverdrup gmail com>"
import hashlib
import os
+import shutil
import tarfile
from kupfer.objects import Source, FileLeaf
from kupfer.obj.sources import DirectorySource
+from kupfer import pretty
+from kupfer import scheduler
from kupfer import utils
# Limit this to archives of a couple of megabytes
MAX_ARCHIVE_BYTE_SIZE = 15 * 1024**2
+# Wait a year, or until program shutdown for cleaning up
+# archive files
+VERY_LONG_TIME_S = 3600*24*365
+
class ArchiveContent (Source):
- _unarchived_files = []
+ unarchived_files = []
+ end_timer = scheduler.Timer(True)
+
def __init__(self, fileleaf):
Source.__init__(self, _("Content of %s") % fileleaf)
self.path = fileleaf.object
@@ -44,7 +53,8 @@ class ArchiveContent (Source):
if not os.path.exists(pth):
zf = tarfile.TarFile.gzopen(self.path)
zf.extractall(path=pth)
- self._unarchived_files.append(zf)
+ self.unarchived_files.append(pth)
+ self.end_timer.set(VERY_LONG_TIME_S, self.clean_up_unarchived_files)
files = list(DirectorySource(pth, show_hidden=True).get_leaves())
if len(files) == 1 and files[0].has_content():
return files[0].content_source().get_leaves()
@@ -66,3 +76,19 @@ class ArchiveContent (Source):
return cls(leaf)
return None
+ @classmethod
+ def clean_up_unarchived_files(cls):
+ if not cls.unarchived_files:
+ return
+ pretty.print_info(__name__, "Removing extracted archives..")
+ for filetree in set(cls.unarchived_files):
+ pretty.print_info(__name__, "Removing", os.path.basename(filetree))
+ shutil.rmtree(filetree, onerror=cls._clean_up_error_handler)
+ cls.unarchived_files = []
+
+
+ @classmethod
+ def _clean_up_error_handler(cls, func, path, exc_info):
+ pretty.print_error(__name__, "Error in %s deleting %s:" % (func, path))
+ pretty.print_error(__name__, exc_info)
+
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]