beagle r4508 - branches/beagle-lucene2_1/beagled
- From: dbera svn gnome org
- To: svn-commits-list gnome org
- Subject: beagle r4508 - branches/beagle-lucene2_1/beagled
- Date: Thu, 21 Feb 2008 00:57:30 +0000 (GMT)
Author: dbera
Date: Thu Feb 21 00:57:30 2008
New Revision: 4508
URL: http://svn.gnome.org/viewvc/beagle?rev=4508&view=rev
Log:
Make Lucene-2.1 specific changes. Now builds.
Modified:
branches/beagle-lucene2_1/beagled/LuceneCommon.cs
branches/beagle-lucene2_1/beagled/LuceneIndexingDriver.cs
branches/beagle-lucene2_1/beagled/LuceneQueryingDriver.cs
Modified: branches/beagle-lucene2_1/beagled/LuceneCommon.cs
==============================================================================
--- branches/beagle-lucene2_1/beagled/LuceneCommon.cs (original)
+++ branches/beagle-lucene2_1/beagled/LuceneCommon.cs Thu Feb 21 00:57:30 2008
@@ -379,7 +379,7 @@
// Create a new store.
Lucene.Net.Store.Directory store;
- store = Lucene.Net.Store.FSDirectory.GetDirectory (path, LockDirectory, true);
+ store = Lucene.Net.Store.FSDirectory.GetDirectory (path, new Lucene.Net.Store.SimpleFSLockFactory (LockDirectory));
// Create an empty index in that store.
IndexWriter writer;
@@ -436,9 +436,15 @@
fingerprint = reader.ReadLine ();
reader.Close ();
+ Lucene.Net.Store.LockFactory lock_factory;
+ if (read_only_mode)
+ lock_factory = Lucene.Net.Store.NoLockFactory.GetNoLockFactory ();
+ else
+ lock_factory = new Lucene.Net.Store.SimpleFSLockFactory (LockDirectory);
+
// Create stores for our indexes.
- primary_store = Lucene.Net.Store.FSDirectory.GetDirectory (PrimaryIndexDirectory, LockDirectory, false, read_only_mode);
- secondary_store = Lucene.Net.Store.FSDirectory.GetDirectory (SecondaryIndexDirectory, LockDirectory, false, read_only_mode);
+ primary_store = Lucene.Net.Store.FSDirectory.GetDirectory (PrimaryIndexDirectory, lock_factory);
+ secondary_store = Lucene.Net.Store.FSDirectory.GetDirectory (SecondaryIndexDirectory, lock_factory);
}
////////////////////////////////////////////////////////////////
@@ -1332,11 +1338,11 @@
if (d1 != 1 || d2 != DateTime.DaysInMonth (y2, m2)) {
LNS.BooleanQuery sub_query;
sub_query = new LNS.BooleanQuery ();
- sub_query.Add (ym_query, true, false);
- sub_query.Add (NewDayQuery (field_name, d1, d2), true, false);
- top_level_query.Add (sub_query, false, false);
+ sub_query.Add (ym_query, LNS.BooleanClause.Occur.MUST);
+ sub_query.Add (NewDayQuery (field_name, d1, d2), LNS.BooleanClause.Occur.MUST);
+ top_level_query.Add (sub_query, LNS.BooleanClause.Occur.SHOULD);
} else {
- top_level_query.Add (ym_query, false, false);
+ top_level_query.Add (ym_query, LNS.BooleanClause.Occur.SHOULD);
}
} else {
@@ -1345,9 +1351,9 @@
if (d1 > 1) {
LNS.BooleanQuery sub_query;
sub_query = new LNS.BooleanQuery ();
- sub_query.Add (NewYearMonthQuery (field_name, y1, m1), true, false);
- sub_query.Add (NewDayQuery (field_name, d1, DateTime.DaysInMonth (y1, m1)), true, false);
- top_level_query.Add (sub_query, false, false);
+ sub_query.Add (NewYearMonthQuery (field_name, y1, m1), LNS.BooleanClause.Occur.MUST);
+ sub_query.Add (NewDayQuery (field_name, d1, DateTime.DaysInMonth (y1, m1)), LNS.BooleanClause.Occur.MUST);
+ top_level_query.Add (sub_query, LNS.BooleanClause.Occur.SHOULD);
++m1;
if (m1 == 13) {
@@ -1360,9 +1366,9 @@
if (d2 < DateTime.DaysInMonth (y2, m2)) {
LNS.BooleanQuery sub_query;
sub_query = new LNS.BooleanQuery ();
- sub_query.Add (NewYearMonthQuery (field_name, y2, m2), true, false);
- sub_query.Add (NewDayQuery (field_name, 1, d2), true, false);
- top_level_query.Add (sub_query, false, false);
+ sub_query.Add (NewYearMonthQuery (field_name, y2, m2), LNS.BooleanClause.Occur.MUST);
+ sub_query.Add (NewDayQuery (field_name, 1, d2), LNS.BooleanClause.Occur.MUST);
+ top_level_query.Add (sub_query, LNS.BooleanClause.Occur.SHOULD);
--m2;
if (m2 == 0) {
@@ -1374,7 +1380,7 @@
// Generate the query for the "middle" of our period, if it is non-empty
if (y1 < y2 || ((y1 == y2) && m1 <= m2))
top_level_query.Add (NewYearMonthQuery (field_name, y1, m1, y2, m2),
- false, false);
+ LNS.BooleanClause.Occur.SHOULD);
}
return top_level_query;
@@ -1432,14 +1438,14 @@
LNS.Query subquery;
subquery = StringToQuery ("Text", part.Text, term_list);
if (subquery != null) {
- p_query.Add (subquery, false, false);
+ p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
added_subquery = true;
}
// FIXME: HotText is ignored for now!
// subquery = StringToQuery ("HotText", part.Text);
// if (subquery != null) {
- // p_query.Add (subquery, false, false);
+ // p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
// added_subquery = true;
// }
}
@@ -1448,10 +1454,10 @@
LNS.Query subquery;
subquery = StringToQuery ("PropertyText", part.Text, term_list);
if (subquery != null) {
- p_query.Add (subquery, false, false);
+ p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
// Properties can live in either index
if (! only_build_primary_query)
- s_query.Add (subquery.Clone () as LNS.Query, false, false);
+ s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD);
added_subquery = true;
}
@@ -1482,10 +1488,10 @@
if (term_list != null)
term_list.Add (term);
subquery = new LNS.TermQuery (term);
- p_query.Add (subquery, false, false);
+ p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
// Properties can live in either index
if (! only_build_primary_query)
- s_query.Add (subquery.Clone () as LNS.Query, false, false);
+ s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD);
} else {
// Reset these so we return a null query
p_query = null;
@@ -1515,26 +1521,26 @@
// Search text content
term = new Term ("Text", query_string_lower);
subquery = new LNS.WildcardQuery (term);
- p_query.Add (subquery, false, false);
+ p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
term_list.Add (term);
// Search text properties
term = new Term ("PropertyText", query_string_lower);
subquery = new LNS.WildcardQuery (term);
- p_query.Add (subquery, false, false);
+ p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
// Properties can live in either index
if (! only_build_primary_query)
- s_query.Add (subquery.Clone () as LNS.Query, false, false);
+ s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD);
term_list.Add (term);
// Search property keywords
term = new Term ("PropertyKeyword", query_string_lower);
term_list.Add (term);
subquery = new LNS.WildcardQuery (term);
- p_query.Add (subquery, false, false);
+ p_query.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
// Properties can live in either index
if (! only_build_primary_query)
- s_query.Add (subquery.Clone () as LNS.Query, false, false);
+ s_query.Add (subquery.Clone () as LNS.Query, LNS.BooleanClause.Occur.SHOULD);
primary_query = p_query;
if (! only_build_primary_query)
@@ -1587,9 +1593,9 @@
term_list, query_part_hook,
out p_subq, out s_subq, out sub_hit_filter);
if (p_subq != null)
- p_query.Add (p_subq, false, false);
+ p_query.Add (p_subq, LNS.BooleanClause.Occur.SHOULD);
if (s_subq != null)
- s_query.Add (s_subq, false, false);
+ s_query.Add (s_subq, LNS.BooleanClause.Occur.SHOULD);
if (sub_hit_filter != null) {
if (or_hit_filter == null)
or_hit_filter = new OrHitFilter ();
@@ -1678,7 +1684,7 @@
int cursor = 0;
if (extra_requirement != null) {
- top_query.Add (extra_requirement, true, false);
+ top_query.Add (extra_requirement, LNS.BooleanClause.Occur.MUST);
++cursor;
}
@@ -1690,7 +1696,7 @@
LNS.BooleanQuery bq;
bq = new LNS.BooleanQuery ();
bottom_queries.Add (bq);
- top_query.Add (bq, false, false);
+ top_query.Add (bq, LNS.BooleanClause.Occur.SHOULD);
}
}
@@ -1708,7 +1714,7 @@
cursor = 0;
}
- target.Add (subquery, false, false);
+ target.Add (subquery, LNS.BooleanClause.Occur.SHOULD);
}
return top_query;
Modified: branches/beagle-lucene2_1/beagled/LuceneIndexingDriver.cs
==============================================================================
--- branches/beagle-lucene2_1/beagled/LuceneIndexingDriver.cs (original)
+++ branches/beagle-lucene2_1/beagled/LuceneIndexingDriver.cs Thu Feb 21 00:57:30 2008
@@ -216,15 +216,15 @@
// For property changes, only secondary index is modified
if (indexable.Type != IndexableType.PropertyChange) {
term = new Term ("Uri", uri_str);
- num_delete = primary_reader.Delete (term);
- secondary_reader.Delete (term);
+ num_delete = primary_reader.DeleteDocuments (term);
+ secondary_reader.DeleteDocuments (term);
}
// When we delete an indexable, also delete any children.
// FIXME: Shouldn't we also delete any children of children, etc.?
term = new Term ("ParentUri", uri_str);
- num_delete += primary_reader.Delete (term);
- secondary_reader.Delete (term);
+ num_delete += primary_reader.DeleteDocuments (term);
+ secondary_reader.DeleteDocuments (term);
// If this is a strict removal (and not a deletion that
// we are doing in anticipation of adding something back),
Modified: branches/beagle-lucene2_1/beagled/LuceneQueryingDriver.cs
==============================================================================
--- branches/beagle-lucene2_1/beagled/LuceneQueryingDriver.cs (original)
+++ branches/beagle-lucene2_1/beagled/LuceneQueryingDriver.cs Thu Feb 21 00:57:30 2008
@@ -209,12 +209,12 @@
case QueryPartLogic.Prohibited:
if (primary_prohibited_part_query == null)
primary_prohibited_part_query = new LNS.BooleanQuery ();
- primary_prohibited_part_query.Add (primary_part_query, false, false);
+ primary_prohibited_part_query.Add (primary_part_query, LNS.BooleanClause.Occur.SHOULD);
if (secondary_part_query != null) {
if (secondary_prohibited_part_query == null)
secondary_prohibited_part_query = new LNS.BooleanQuery ();
- secondary_prohibited_part_query.Add (secondary_part_query, false, false);
+ secondary_prohibited_part_query.Add (secondary_part_query, LNS.BooleanClause.Occur.SHOULD);
}
if (part_hit_filter != null) {
@@ -361,8 +361,7 @@
// Only generate results if we got some matches
if (primary_matches != null && primary_matches.ContainsTrue ()) {
GenerateQueryResults (primary_reader,
- primary_searcher,
- secondary_searcher,
+ secondary_reader,
primary_matches,
result,
term_list,
@@ -426,7 +425,7 @@
LNS.BooleanQuery combined_query;
combined_query = new LNS.BooleanQuery ();
foreach (LNS.Query query in primary_queries)
- combined_query.Add (query, true, false);
+ combined_query.Add (query, LNS.BooleanClause.Occur.MUST);
LuceneBitArray matches;
matches = new LuceneBitArray (primary_searcher, combined_query);
@@ -562,7 +561,7 @@
foreach (Term term in term_list) {
double idf;
- idf = similarity.Ldf (reader.DocFreq (term), reader.MaxDoc ());
+ idf = similarity.Idf (reader.DocFreq (term), reader.MaxDoc ());
int hit_count;
hit_count = hits_by_id.Count;
@@ -598,12 +597,11 @@
//
// Two arrays we need for quickly creating lucene documents and check if they are valid
- static string[] fields_timestamp_uri = { "Timestamp", "Uri" };
- static string[] fields_uri = {"Uri"};
+ static FieldSelector fields_timestamp_uri = new MapFieldSelector (new string[] {"Uri", "Timestamp"});
+ static FieldSelector fields_uri = new MapFieldSelector (new string[] {"Uri"});
private static void GenerateQueryResults (IndexReader primary_reader,
- LNS.IndexSearcher primary_searcher,
- LNS.IndexSearcher secondary_searcher,
+ IndexReader secondary_reader,
BetterBitArray primary_matches,
IQueryResult result,
ICollection query_term_list,
@@ -641,8 +639,7 @@
if (primary_matches.TrueCount > max_results)
final_list_of_hits = ScanRecentDocs (primary_reader,
- primary_searcher,
- secondary_searcher,
+ secondary_reader,
primary_matches,
hits_by_id,
max_results,
@@ -651,8 +648,7 @@
if (final_list_of_hits == null)
final_list_of_hits = FindRecentResults (primary_reader,
- primary_searcher,
- secondary_searcher,
+ secondary_reader,
primary_matches,
hits_by_id,
max_results,
@@ -743,8 +739,7 @@
// for all of them.
private static ArrayList ScanRecentDocs (IndexReader primary_reader,
- LNS.IndexSearcher primary_searcher,
- LNS.IndexSearcher secondary_searcher,
+ IndexReader secondary_reader,
BetterBitArray primary_matches,
Dictionary<int, Hit> hits_by_id,
int max_results,
@@ -763,8 +758,8 @@
Term term;
TermDocs secondary_term_docs = null;
- if (secondary_searcher != null)
- secondary_term_docs = secondary_searcher.Reader.TermDocs ();
+ if (secondary_reader != null)
+ secondary_term_docs = secondary_reader.TermDocs ();
do {
term = enumerator.Term ();
@@ -780,13 +775,13 @@
int doc_id = docs.Doc ();
if (primary_matches.Get (doc_id)) {
- Document doc = primary_searcher.Doc (doc_id);
+ Document doc = primary_reader.Document (doc_id);
// If we have a UriFilter, apply it.
if (uri_filter != null) {
Uri uri;
uri = GetUriFromDocument (doc);
if (uri_filter (uri)) {
- Hit hit = CreateHit (doc, secondary_searcher, secondary_term_docs);
+ Hit hit = CreateHit (doc, secondary_reader, secondary_term_docs);
hits_by_id [doc_id] = hit;
// Add the result, last modified first
results.Add (hit);
@@ -825,8 +820,7 @@
}
private static ArrayList FindRecentResults (IndexReader primary_reader,
- LNS.IndexSearcher primary_searcher,
- LNS.IndexSearcher secondary_searcher,
+ IndexReader secondary_reader,
BetterBitArray primary_matches,
Dictionary<int, Hit> hits_by_id,
int max_results,
@@ -848,8 +842,8 @@
else
all_docs = new ArrayList (primary_matches.TrueCount);
- if (secondary_searcher != null)
- term_docs = secondary_searcher.Reader.TermDocs ();
+ if (secondary_reader != null)
+ term_docs = secondary_reader.TermDocs ();
for (int match_index = primary_matches.Count; ; match_index --) {
// Walk across the matches backwards, since newer
@@ -861,7 +855,7 @@
count++;
- doc = primary_searcher.Doc (match_index, fields_timestamp_uri);
+ doc = primary_reader.Document (match_index, fields_timestamp_uri);
// Check the timestamp --- if we have already reached our
// limit, we might be able to reject it immediately.
@@ -887,7 +881,7 @@
// Get the actual hit now
// doc was created with only 2 fields, so first get the complete lucene document for primary document
- Hit hit = CreateHit (primary_searcher.Doc (match_index), secondary_searcher, term_docs);
+ Hit hit = CreateHit (primary_reader.Document (match_index), secondary_reader, term_docs);
hits_by_id [match_index] = hit;
// Add the document to the appropriate data structure.
@@ -917,12 +911,12 @@
}
private static Hit CreateHit ( Document primary_doc,
- LNS.IndexSearcher secondary_searcher,
+ IndexReader secondary_reader,
TermDocs term_docs)
{
Hit hit = DocumentToHit (primary_doc);
- if (secondary_searcher == null)
+ if (secondary_reader == null)
return hit;
// Get the stringified version of the URI
@@ -932,7 +926,7 @@
// Move to the first (and only) matching term doc
term_docs.Next ();
- Document secondary_doc = secondary_searcher.Doc (term_docs.Doc ());
+ Document secondary_doc = secondary_reader.Document (term_docs.Doc ());
// If we are using the secondary index, now we need to
// merge the properties from the secondary index
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]