[polari/wip/fmuellner/tracker: 15/18] logManager: Support running queries in chunks
- From: Florian Müllner <fmuellner src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [polari/wip/fmuellner/tracker: 15/18] logManager: Support running queries in chunks
- Date: Mon, 13 Mar 2017 03:21:54 +0000 (UTC)
commit 8d3c9e11fcd784e0ac3c57fa697c9d480f9d6802
Author: Carlos Garnacho <carlosg gnome org>
Date: Tue Dec 29 18:14:52 2015 +0100
logManager: Support running queries in chunks
It is not always desirable to retrieve the full result set at once;
in particular for the history in the chat log that uses TplLogWalker
currently, we want to maintain the behavior of only loading the most
recent history initially and fetch additional logs on demand.
To support this, extend the previously added GenericQuery class to
specify an optional limit.
src/logManager.js | 42 +++++++++++++++++++++++++++++++++++++-----
1 files changed, 37 insertions(+), 5 deletions(-)
---
diff --git a/src/logManager.js b/src/logManager.js
index 396255c..ef2472f 100644
--- a/src/logManager.js
+++ b/src/logManager.js
@@ -13,14 +13,18 @@ function getDefault() {
const GenericQuery = new Lang.Class({
Name: 'GenericQuery',
- _init: function(connection) {
+ _init: function(connection, limit = -1) {
this._connection = connection;
this._results = [];
+ this._limit = limit;
+ this._count = 0;
+ this._closed = false;
+ this._cursor = null;
this._task = null;
},
- run: function(sparql, cancellable, callback) {
- this._task = Gio.Task.new(this._connection, cancellable, Lang.bind(this,
+ _createTask: function(cancellable, callback) {
+ return Gio.Task.new(this._connection, cancellable, Lang.bind(this,
function(o, res) {
let success = false;
try {
@@ -32,6 +36,10 @@ const GenericQuery = new Lang.Class({
callback(success ? this._results : []);
this._task = null;
}));
+ },
+
+ run: function(sparql, cancellable, callback) {
+ this._task = this._createTask(cancellable, callback);
this._connection.query_async(sparql, cancellable, Lang.bind(this,
function(c, res) {
@@ -43,11 +51,28 @@ const GenericQuery = new Lang.Class({
return;
}
+ this._cursor = cursor;
cursor.next_async(cancellable,
Lang.bind(this, this._onCursorNext));
}));
},
+ next: function (limit, cancellable, callback) {
+ if (this._task)
+ return false;
+
+ this._results = [];
+ this._count = 0;
+ this._limit = limit;
+ this._task = this._createTask(cancellable, callback);
+ this._cursor.next_async(cancellable, Lang.bind(this, this._onCursorNext));
+ return true;
+ },
+
+ isClosed: function () {
+ return this._closed;
+ },
+
_onCursorNext: function(cursor, res) {
let valid = false;
try {
@@ -58,12 +83,19 @@ const GenericQuery = new Lang.Class({
if (valid) {
this._pushResult(cursor);
- cursor.next_async(this._task.get_cancellable(),
- Lang.bind(this, this._onCursorNext));
+ this._count++;
+
+ if (this._limit <= 0 || this._count < this._limit) {
+ cursor.next_async(this._task.get_cancellable(),
+ Lang.bind(this, this._onCursorNext));
+ } else {
+ this._task.return_boolean(true);
+ }
} else {
cursor.close();
if (!this._task.had_error())
this._task.return_boolean(true);
+ this._closed = true;
}
},
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]