desktop-data-model r7280 - trunk/ddm



Author: otaylor
Date: Wed Apr 23 19:36:31 2008
New Revision: 7280
URL: http://svn.gnome.org/viewvc/desktop-data-model?rev=7280&view=rev

Log:
Improve scheduling of flushes: never schedule a flush when reading
  an item when _ddm_work_item_process() return FALSE; this should
  prevent 100% infinite loops.
Clean up logging of flush scheduling to only log when a flush is newly
 scheduled, and to log more information.


Modified:
   trunk/ddm/ddm-data-model.c
   trunk/ddm/ddm-data-query.c
   trunk/ddm/ddm-work-item.c
   trunk/ddm/ddm-work-item.h

Modified: trunk/ddm/ddm-data-model.c
==============================================================================
--- trunk/ddm/ddm-data-model.c	(original)
+++ trunk/ddm/ddm-data-model.c	Wed Apr 23 19:36:31 2008
@@ -661,11 +661,13 @@
     return (serial_a < serial_b) ? -1 : (serial_a == serial_b ? 0 : 1);
 }
 
-void
-_ddm_data_model_add_work_item (DDMDataModel    *model,
-                               DDMWorkItem     *item)
+static void
+_ddm_data_model_add_work_item_internal (DDMDataModel    *model,
+                                        DDMWorkItem     *item,
+                                        gboolean         schedule_flush)
 {
     GList *l;
+    gint64 serial = _ddm_work_item_get_min_serial(item);
     
     _ddm_work_item_ref(item);
 
@@ -682,7 +684,7 @@
      * because we insert after on the tie-break to avoid reordering
      * when we pull things off the front.
      */
-    if (_ddm_work_item_get_min_serial(item) == -1) {
+    if (serial == -1) {
         for (l = model->work_items->head; l; l = l->next) {
             if (compare_work_items(l->data, item, NULL) > 0)
                 break;
@@ -704,10 +706,22 @@
         else
             g_queue_insert_after(model->work_items, l, item);
     }
-    
 
-    g_debug("Scheduling flush due to work item");
-    ddm_data_model_schedule_flush(model);
+    if (schedule_flush && model->flush_idle == 0 && serial <= model->max_answered_query_serial) {
+        g_debug("Scheduling flush due to work item %s with min_serial "
+                "%" G_GINT64_MODIFIER "d"
+                "; max_answered=%" G_GINT64_MODIFIER "d",
+                _ddm_work_item_get_id_string(item),
+                serial, model->max_answered_query_serial);
+        ddm_data_model_schedule_flush(model);
+    }
+}
+
+void
+_ddm_data_model_add_work_item (DDMDataModel    *model,
+                               DDMWorkItem     *item)
+{
+    _ddm_data_model_add_work_item_internal(model, item, TRUE);
 }
 
 gboolean
@@ -829,8 +843,14 @@
 
         if (shutting_down)
             _ddm_work_item_cancel(item);
-        else if (!_ddm_work_item_process(item))
-            _ddm_data_model_add_work_item(model, item);
+        else if (!_ddm_work_item_process(item)) {
+            /* Re-adding the the same item back to the queue should not schedule
+             * a flush, because being re-added means a) what you are waiting for
+             * has not happened or b) you've requested something new from the
+             * server and you are now waiting for that.
+             */
+            _ddm_data_model_add_work_item_internal(model, item, FALSE);
+        }
 
         _ddm_work_item_unref(item);
     }
@@ -842,7 +862,10 @@
 ddm_data_model_flush_internal(DDMDataModel *model,
                               gboolean      shutting_down)
 {
-    g_debug("Flushing Data Model, shutting_down = %d", shutting_down);
+    if (shutting_down)
+        g_debug("Flushing Data Model on shut down");
+    else
+        g_debug("Flushing Data Model");
 
     if (model->backend->flush)
         model->backend->flush(model, model->backend_data);
@@ -1019,10 +1042,13 @@
 
     /* There may be waiting work items that are satisfied because this query is now answered. 
      */
-    if (model->work_items->length > 0)  {
+    if (model->flush_idle == 0 && model->work_items->length > 0)  {
         gint64 waiting_serial = _ddm_work_item_get_min_serial(model->work_items->head->data);
-        if (waiting_serial > model->max_answered_query_serial && waiting_serial <= serial) {
-            g_debug("Scheduling flush because waiting work item satisfied by %s",
+        if (waiting_serial <= serial) {
+            g_debug("Scheduling flush because waiting work item %s with min_serial %" G_GINT64_MODIFIER "d "
+                    "potentially satisfied by %s",
+                    _ddm_work_item_get_id_string(model->work_items->head->data),
+                    waiting_serial,
                     ddm_data_query_get_id_string(query));
             ddm_data_model_schedule_flush(model);
         }

Modified: trunk/ddm/ddm-data-query.c
==============================================================================
--- trunk/ddm/ddm-data-query.c	(original)
+++ trunk/ddm/ddm-data-query.c	Wed Apr 23 19:36:31 2008
@@ -268,7 +268,11 @@
         ddm_data_resource_mark_received_fetches(l->data, query->fetch, !local);
     }
 
-    _ddm_data_model_query_answered(query->model, query);
+    /* Local responses shouldn't count towards the "max_answered_serial"
+     * that the model maintains to track communication with the server.
+     */
+    if (!local)
+        _ddm_data_model_query_answered(query->model, query);
     
     query->results = g_slist_copy(results);
 

Modified: trunk/ddm/ddm-work-item.c
==============================================================================
--- trunk/ddm/ddm-work-item.c	(original)
+++ trunk/ddm/ddm-work-item.c	Wed Apr 23 19:36:31 2008
@@ -419,3 +419,9 @@
 {
     return item->min_serial;
 }
+
+const char *
+_ddm_work_item_get_id_string (DDMWorkItem *item)
+{
+    return item->id_string;
+}

Modified: trunk/ddm/ddm-work-item.h
==============================================================================
--- trunk/ddm/ddm-work-item.h	(original)
+++ trunk/ddm/ddm-work-item.h	Wed Apr 23 19:36:31 2008
@@ -55,6 +55,8 @@
  * this query serial */
 gint64       _ddm_work_item_get_min_serial   (const DDMWorkItem *item);
 
+const char *_ddm_work_item_get_id_string (DDMWorkItem *item);
+
 G_END_DECLS
 
 #endif /* __DDM_WORK_ITEM_H__ */



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]