conduit r1355 - in trunk: . conduit conduit/dataproviders conduit/datatypes



Author: jstowers
Date: Fri Mar  7 22:36:44 2008
New Revision: 1355
URL: http://svn.gnome.org/viewvc/conduit?rev=1355&view=rev

Log:
2008-03-07  John Stowers  <john stowers gmail com>

	* conduit/Synchronization.py: Refactor error handling to be more robust. 
    Synchronizations should always complete, even if some data is missed. 
	This should improve the reports of conduit synchronization stopping mid
	way.
	
	* conduit/dataproviders/File.py:
	* conduit/datatypes/File.py: Throw a FileTransferError if the user
	cancels the sync.



Modified:
   trunk/ChangeLog
   trunk/conduit/Synchronization.py
   trunk/conduit/dataproviders/File.py
   trunk/conduit/datatypes/File.py

Modified: trunk/conduit/Synchronization.py
==============================================================================
--- trunk/conduit/Synchronization.py	(original)
+++ trunk/conduit/Synchronization.py	Fri Mar  7 22:36:44 2008
@@ -277,20 +277,6 @@
         except Exceptions.SyncronizeError, err:
             log.warn("%s\n%s" % (err, traceback.format_exc()))                     
             self.sinkErrors[sink] = DataProvider.STATUS_DONE_SYNC_ERROR
-            log.debug("ERROR GETTING DATA")
-        except Exceptions.SyncronizeFatalError, err:
-            log.warn("%s\n%s" % (err, traceback.format_exc()))
-            sink.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)                                  
-            source.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)                             
-            #Cannot continue
-            raise Exceptions.StopSync(self.state)                  
-        except Exception:       
-            #Cannot continue
-            log.critical("UNKNOWN SYNCHRONIZATION ERROR\n%s" % traceback.format_exc())
-            sink.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)
-            source.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)
-            raise Exceptions.StopSync(self.state)
-
         return data
 
     def _put_data(self, source, sink, sourceData, sourceDataRid):
@@ -301,6 +287,9 @@
         if sourceData != None:
             try:
                 put_data(source, sink, sourceData, sourceDataRid, False)
+            except Exceptions.SyncronizeError, err:
+                log.warn("%s\n%s" % (err, traceback.format_exc()))                     
+                self.sinkErrors[sink] = DataProvider.STATUS_DONE_SYNC_ERROR
             except Exceptions.SynchronizeConflictError, err:
                 comp = err.comparison
                 if comp == COMPARISON_OLDER:
@@ -328,9 +317,7 @@
             self.sinkErrors[sink] = DataProvider.STATUS_DONE_SYNC_ERROR
         except Exception:       
             log.critical("UNKNOWN CONVERSION ERROR\n%s" % traceback.format_exc())
-            sink.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)
-            source.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)
-            raise Exceptions.StopSync(self.state)
+            self.sinkErrors[sink] = DataProvider.STATUS_DONE_SYNC_ERROR
         return newdata
 
     def _apply_deleted_policy(self, sourceWrapper, sourceDataLUID, sinkWrapper, sinkDataLUID):
@@ -696,15 +683,28 @@
                         self.check_thread_not_cancelled([self.source, sink])
                         #only sync with those sinks that refresh'd OK
                         if sink not in sinkDidntRefreshOK:
-                            #now perform a one or two way sync depending on the user prefs
-                            #and the capabilities of the dataprovider
-                            if  self.cond.is_two_way():
-                                #two way
-                                self.two_way_sync(self.source, sink)
-                            else:
-                                #one way
-                                self.one_way_sync(self.source, sink)
-     
+                            try:
+                                #now perform a one or two way sync depending on the user prefs
+                                #and the capabilities of the dataprovider
+                                if  self.cond.is_two_way():
+                                    #two way
+                                    self.two_way_sync(self.source, sink)
+                                else:
+                                    #one way
+                                    self.one_way_sync(self.source, sink)
+                            except Exceptions.SyncronizeFatalError, err:
+                                log.warn("%s\n%s" % (err, traceback.format_exc()))
+                                sink.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)                                  
+                                self.source.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)                             
+                                #cannot continue with this source, sink pair
+                                continue
+                            except Exception:       
+                                log.critical("UNKNOWN SYNCHRONIZATION ERROR\n%s" % traceback.format_exc())
+                                sink.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)
+                                self.source.module.set_status(DataProvider.STATUS_DONE_SYNC_ERROR)
+                                #cannot continue with this source, sink pair
+                                continue
+
                     #Done go clean up
                     self.state = self.DONE_STATE
 

Modified: trunk/conduit/dataproviders/File.py
==============================================================================
--- trunk/conduit/dataproviders/File.py	(original)
+++ trunk/conduit/dataproviders/File.py	Fri Mar  7 22:36:44 2008
@@ -8,6 +8,7 @@
 import conduit.datatypes.File as File
 import conduit.Vfs as Vfs
 import conduit.Database as DB
+import conduit.Exceptions as Exceptions
 
 TYPE_FILE = "0"
 TYPE_FOLDER = "1"
@@ -229,7 +230,10 @@
                         sizeOnly=self.compareIgnoreMtime
                         )
         if overwrite or comp == DataType.COMPARISON_NEWER:
-            vfsFile.transfer(newURI, True)
+            try:
+                vfsFile.transfer(newURI, True)
+            except File.FileTransferError:
+                raise Exceptions.SyncronizeFatalError("Transfer Cancelled")
 
         return self.get(newURI).get_rid()
 

Modified: trunk/conduit/datatypes/File.py
==============================================================================
--- trunk/conduit/datatypes/File.py	(original)
+++ trunk/conduit/datatypes/File.py	Fri Mar  7 22:36:44 2008
@@ -14,6 +14,9 @@
 import conduit.datatypes.DataType as DataType
 import conduit.Vfs as Vfs
 
+class FileTransferError(Exception):
+    pass
+
 class File(DataType.DataType):
     
     _name_ = "file"
@@ -300,15 +303,18 @@
             Vfs.uri_make_directory_and_parents(parent)
 
         #Copy the file
-        result = gnomevfs.xfer_uri(
-                    source_uri=self.URI,
-                    target_uri=newURI,
-                    xfer_options=gnomevfs.XFER_NEW_UNIQUE_DIRECTORY,
-                    error_mode=gnomevfs.XFER_ERROR_MODE_ABORT,
-                    overwrite_mode=mode,
-                    progress_callback=self._xfer_progress_callback,
-                    data=cancel_function
-                    )
+        try:        
+            result = gnomevfs.xfer_uri(
+                        source_uri=self.URI,
+                        target_uri=newURI,
+                        xfer_options=gnomevfs.XFER_NEW_UNIQUE_DIRECTORY,
+                        error_mode=gnomevfs.XFER_ERROR_MODE_ABORT,
+                        overwrite_mode=mode,
+                        progress_callback=self._xfer_progress_callback,
+                        data=cancel_function
+                        )
+        except gnomevfs.InterruptedError:
+            raise FileTransferError
 
         #close the file and the handle so that the file info is refreshed
         self.URI = newURI



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]