nemo r75 - in trunk: common metadata
- From: arj svn gnome org
- To: svn-commits-list gnome org
- Subject: nemo r75 - in trunk: common metadata
- Date: Tue, 5 Feb 2008 16:38:04 +0000 (GMT)
Author: arj
Date: Tue Feb 5 16:38:03 2008
New Revision: 75
URL: http://svn.gnome.org/viewvc/nemo?rev=75&view=rev
Log:
Optimizations to indexing code
Should make code easier to tweak and generally lowers cpu usage
Modified:
trunk/common/Common.cs
trunk/metadata/MetadataStore.cs
Modified: trunk/common/Common.cs
==============================================================================
--- trunk/common/Common.cs (original)
+++ trunk/common/Common.cs Tue Feb 5 16:38:03 2008
@@ -6,7 +6,7 @@
namespace Nemo
{
- public struct Tuple<TFirst,TSecond>
+ public struct Tuple<TFirst,TSecond> : IEquatable<Tuple<TFirst,TSecond>>
{
public TFirst first;
public TSecond second;
@@ -16,6 +16,21 @@
this.first = first;
this.second = second;
}
+
+ public bool Equals(Tuple<TFirst,TSecond> other)
+ {
+ return first.Equals(other.first)&& second.Equals(other.second);
+ }
+
+ public static bool operator==(Tuple<TFirst,TSecond> lhs, Tuple<TFirst,TSecond> rhs)
+ {
+ return lhs.Equals(rhs);
+ }
+
+ public static bool operator!=(Tuple<TFirst,TSecond> lhs, Tuple<TFirst,TSecond> rhs)
+ {
+ return !(lhs == rhs);
+ }
}
class Helpers
@@ -219,14 +234,15 @@
return zip<int, T>(ints(), ie);
}
- public static bool sorted_lists_equal<T>(List<T> l1, List<T> l2) where T:IComparable
+ public static bool sorted_lists_equal<T>(List<T> l1, List<T> l2) where T:IEquatable<T>
{
if (l1.Count != l2.Count)
return false;
foreach (Tuple<T, T> t in zip<T,T>(l1, l2)) {
- if (t.first.CompareTo(t.second) != 0)
+ if (!t.first.Equals(t.second))
return false;
}
+
return true;
}
}
Modified: trunk/metadata/MetadataStore.cs
==============================================================================
--- trunk/metadata/MetadataStore.cs (original)
+++ trunk/metadata/MetadataStore.cs Tue Feb 5 16:38:03 2008
@@ -119,9 +119,35 @@
private Thread watcher_thread;
private Thread db_runner_thread;
+
+ static readonly int min_number_of_wait_cycles = 10;
+ static readonly int wait_cycles_scaling_factor = 4;
+
+ // helper function to turn an enumeration into a enumerator that will fit into the work queues
+ private IEnumerator<bool> TurnEnumerableIntoEnumerator(IEnumerable<bool> enumerable)
+ {
+ IEnumerator<bool> t = enumerable.GetEnumerator();
+
+ while (t.MoveNext()) {
+ yield return t.Current;
+ }
+
+ // make compiler happy
+ yield break;
+ }
+
+ // helper function to turn a function into a Enumerator that will fit into the work queues
+ private IEnumerator<bool> TurnFunctionIntoEnumerator(VoidFunction func)
+ {
+ func();
- private LinkedList<VoidFunction> background_db_work;
- private LinkedList<VoidFunction> db_work;
+ // make compiler happy
+ yield break;
+ }
+
+ // IEnumerable<bool> to support continuing functions (break functions into pieces to lower cpu load)
+ private LinkedList<IEnumerator<bool>> background_db_work;
+ private LinkedList<IEnumerator<bool>> db_work;
private volatile bool stop_db_runner;
private AutoResetEvent work_ready;
private volatile string watch_path;
@@ -149,6 +175,9 @@
// workaround braindead filter interface on FileSystemWatcher
private bool filename_is_good(string filename)
{
+ if (database.database_filename == filename)
+ return false;
+
foreach (string v in exclude_dirs)
if (filename.Contains(v))
return false;
@@ -214,8 +243,8 @@
public void start(string database_dir)
{
this.database_dir = database_dir;
- db_work = new LinkedList<VoidFunction>();
- background_db_work = new LinkedList<VoidFunction>();
+ db_work = new LinkedList<IEnumerator<bool>>();
+ background_db_work = new LinkedList<IEnumerator<bool>>();
work_ready = new AutoResetEvent(false);
watcher_thread = new Thread(watcher_runner);
@@ -239,9 +268,7 @@
Singleton<Indexing>.Instance.change_status(true);
})();
- add_low_priority_db_work(delegate {
- db_sync_with_filesystem(watch_path);
- } );
+ add_low_priority_db_work(TurnEnumerableIntoEnumerator(db_sync_with_filesystem(watch_path)));
}
// set query to use for query_changed event, note DON'T touch the query
@@ -342,17 +369,24 @@
add_db_work(delegate { callback(database.get_files_from_paths(paths)); });
}
- private void add_low_priority_db_work(VoidFunction item)
+ private void add_low_priority_db_work(IEnumerator<bool> item)
{
lock (background_db_work)
background_db_work.AddLast(item);
work_ready.Set();
}
+ private void add_low_priority_db_work(VoidFunction item)
+ {
+ lock (background_db_work)
+ background_db_work.AddLast(TurnFunctionIntoEnumerator(item));
+ work_ready.Set();
+ }
+
private void add_db_work(VoidFunction item)
{
lock (db_work)
- db_work.AddLast(item);
+ db_work.AddLast(TurnFunctionIntoEnumerator(item));
work_ready.Set();
}
@@ -416,12 +450,15 @@
database = new SqliteDatabase(database_dir);
while (true) {
- VoidFunction item = null;
+ IEnumerator<bool> item = null;
+ bool item_from_db_work = false;
+
lock (db_work) {
if (db_work.Count > 0) {
item = db_work.First.Value;
db_work.RemoveFirst();
+ item_from_db_work = true;
}
}
@@ -443,9 +480,15 @@
if (stop_db_runner)
break;
- else if (item != null)
- item();
-
+ else if (item != null) {
+ if (item.MoveNext()) {
+ if (item_from_db_work)
+ db_work.AddFirst(item);
+ else
+ background_db_work.AddFirst(item);
+ }
+ }
+
bool work_surely_queued = false;
lock (db_work) {
if (db_work.Count > 0)
@@ -474,7 +517,7 @@
private void db_handle_change(string path)
{
try {
- FileInfo f = new FileInfo(path);
+ FileInfo f = new FileInfo(path);
// first filter out strange stuff
FileAttributes a = f.Attributes;
@@ -486,13 +529,13 @@
return;
bool was_in_query = database.file_path_is_in_query(path, current_query);
-
+
long filetype_id = db_determine_filetype_id(path);
bool id_found = false;
long id = database.get_file_id(path, out id_found);
- if (id_found)
+ if (id_found)
{
Helpers.RunInMainThread(delegate {
Singleton<Indexing>.Instance.add_text(String.Format("file updated {0}\n", path));
@@ -506,12 +549,12 @@
Helpers.RunInMainThread(delegate {
Singleton<Indexing>.Instance.add_text(String.Format("file added {0}\n", path));
})();
- database.add_file(path, f.LastAccessTimeUtc,
+ database.add_file(path, f.LastAccessTimeUtc,
f.LastWriteTimeUtc, f.Length, filetype_id);
- db_trigger_type_labels_changed();
+ db_trigger_type_labels_changed();
}
- bool is_in_query = database.file_path_is_in_query(path, current_query);
+ bool is_in_query = database.file_path_is_in_query(path, current_query);
if (is_in_query || was_in_query != is_in_query)
db_schedule_query_result_update();
}
@@ -715,11 +758,13 @@
private void db_trigger_type_labels_changed()
{
- if (type_labels_changed_callback != null)
+ if (type_labels_changed_callback != null) {
type_labels_changed_callback(database.get_used_file_type_categories());
+ }
}
- private bool db_sync_directory_with_filesystem(string directory_path)
+ // returns how many was inserted
+ private IEnumerable<int> db_sync_directory_with_filesystem(string directory_path)
{
// get filesystem files and database files
string[] files = new string[0];
@@ -737,19 +782,18 @@
fsfiles.Sort();
- List<string> dbfiles = database.get_file_paths_in_directory(directory_path);
+ List<string> dbfiles = database.get_file_paths_in_directory(directory_path);
- // compare them
+ // compare them
List<string> update_files = new List<string>();
List<string> delete_files = new List<string>();
List<string> add_files = new List<string>();
List<string>.Enumerator ifs = fsfiles.GetEnumerator();
List<string>.Enumerator idb = dbfiles.GetEnumerator();
- bool morefs, moredb;
- morefs = ifs.MoveNext();
- moredb = idb.MoveNext();
- while (morefs && moredb) {
+ bool morefs = ifs.MoveNext(), moredb = idb.MoveNext();
+
+ while (morefs && moredb) {
int val = ifs.Current.CompareTo(idb.Current);
if (val < 0) {
add_files.Add(ifs.Current);
@@ -775,40 +819,71 @@
delete_files.Add(idb.Current);
moredb = idb.MoveNext();
}
-
+
bool inserted = delete_files.Count > 0 || add_files.Count > 0 || update_files.Count > 0;
+ int nr_changes = 0;
+
// performance
if (inserted)
- database.start_transaction();
+ database.start_transaction();
// and action!
- foreach (string file in delete_files)
+ foreach (string file in delete_files) {
+ if (++nr_changes == 100) {
+ if (inserted)
+ database.end_transaction();
+ yield return nr_changes;
+ if (inserted)
+ database.start_transaction();
+ nr_changes = 0;
+ }
db_handle_delete(file);
+ }
- foreach (string file in add_files)
- db_handle_create(file);
-
- foreach (string file in update_files)
- db_handle_change(file);
+ foreach (string file in add_files) {
+ if (++nr_changes == 100) {
+ if (inserted)
+ database.end_transaction();
+ yield return nr_changes;
+ if (inserted)
+ database.start_transaction();
+ nr_changes = 0;
+ }
+ db_handle_create(file);
+ }
+
+ foreach (string file in update_files) {
+ if (++nr_changes == 100) {
+ if (inserted)
+ database.end_transaction();
+ yield return nr_changes;
+ if (inserted)
+ database.start_transaction();
+ nr_changes = 0;
+ }
+ db_handle_change(file);
+ }
dir_sync_count -= 1;
if (inserted)
database.end_transaction();
-
- return inserted;
+
+ yield return nr_changes;
}
static int dir_sync_count = 0;
int database_work_count;
- private void db_sync_with_filesystem(string directory_path)
+ private IEnumerable<bool> db_sync_with_filesystem(string directory_path)
{
// fix files in this dir
- if (db_sync_directory_with_filesystem(directory_path)) // inserted
- database_work_count = 20; // * 25 ms = 500 ms
-
+ foreach (int nr_changes in db_sync_directory_with_filesystem(directory_path)) {
+ database_work_count = Math.Max(nr_changes/wait_cycles_scaling_factor, min_number_of_wait_cycles);
+ yield return true;
+ }
+
// then recurse
string[] subdirs = Directory.GetDirectories(directory_path);
foreach (string subdir in subdirs) {
@@ -819,7 +894,7 @@
string tmp_subdir = subdir;
if ((System.IO.File.GetAttributes(subdir) & FileAttributes.ReparsePoint) != FileAttributes.ReparsePoint) {
- add_low_priority_db_work(delegate() { db_sync_with_filesystem(tmp_subdir); } );
+ add_low_priority_db_work(TurnEnumerableIntoEnumerator(db_sync_with_filesystem(tmp_subdir)));
dir_sync_count += 1;
}
}
@@ -830,6 +905,8 @@
Singleton<Indexing>.Instance.change_status(false);
})();
}
+
+ yield break;
}
private long db_determine_filetype_id(string path)
@@ -849,9 +926,13 @@
{
IDbConnection db_connection;
+ public string database_filename;
+
public SqliteDatabase(string database_dir)
{
- string str = "Data Source=" + database_dir + "nemo.db";
+ database_filename = database_dir + "nemo.db";
+
+ string str = "Data Source=" + database_filename;
db_connection = new SqliteConnection(str);
@@ -879,6 +960,9 @@
create_db_tables(cmd);
fill_in_default_file_types(cmd);
}
+ else if (version == 1) {
+ update_v1_to_v2(cmd);
+ }
}
}
@@ -910,11 +994,19 @@
return db_connection.CreateCommand();
}
+ private void update_v1_to_v2(IDbCommand cmd)
+ {
+ System.Console.WriteLine("Updating database from version 1 to version 2");
+ cmd.CommandText += "update version set version = 2;";
+ cmd.CommandText += "create index file_types_category on file_types(category);";
+ cmd.ExecuteNonQuery();
+ }
+
private void create_db_tables(IDbCommand cmd)
{
// nothing is in the DB so create all the tables
cmd.CommandText = "create table version(version integer);";
- cmd.CommandText += "insert into version values (1);";
+ cmd.CommandText += "insert into version values (2);";
cmd.CommandText += "create table files ("
+ "id integer primary key, "
@@ -967,6 +1059,7 @@
+ "type text, "
+ "category integer"
+ ");";
+ cmd.CommandText += "create index file_types_category on file_types(category);";
cmd.CommandText += "create index file_types_type_index on file_types(type);";
cmd.CommandText += String.Format("insert into file_types (id, type, category) values (1, \"\", {0});", (int) FileTypeCategory.None);
cmd.CommandText += "create table file_labels ("
@@ -1163,14 +1256,13 @@
public List<string> get_file_paths_in_directory(string directory_path)
{
using (IDbCommand cmd = get_command()) {
- cmd.CommandText = String.Format("select path from files where path like \"{0}{1}%\" and path not like \"{0}{1}%{1}%\"", directory_path, Path.DirectorySeparatorChar);
+ cmd.CommandText = String.Format("select path from files where path like \"{0}{1}%\" and path not like \"{0}{1}%{1}%\" order by path", directory_path, Path.DirectorySeparatorChar);
List<string> res = new List<string>();
using (IDataReader reader = cmd.ExecuteReader()) {
while (reader.Read())
res.Add(reader.GetString(0));
}
- res.Sort();
#if false
Console.WriteLine("=====> {0}", cmd.CommandText);
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]