[tracker/rss-enclosures] New tests
- From: Roberto Guido <rguido src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [tracker/rss-enclosures] New tests
- Date: Wed, 24 Nov 2010 01:23:43 +0000 (UTC)
commit 0e1bffddd9f1e5292d790910c6d87f7ea469e211
Author: Ivan Frade <ivan frade nokia com>
Date: Wed Aug 18 18:29:24 2010 +0300
New tests
tests/functional-tests/01-insertion.py | 1108 +++++++++++--------------
tests/functional-tests/02-metacontacts.py | 165 ----
tests/functional-tests/03-fts-functions.py | 36 +-
tests/functional-tests/13-threaded-store.py | 145 ++++
tests/functional-tests/14-signals.py | 171 ++++
tests/functional-tests/200-backup-restore.py | 401 +++++++++
tests/functional-tests/300-miner-basic-ops.py | 275 ++++++
tests/functional-tests/310-fts-indexing.py | 308 +++++++
tests/functional-tests/400-extractor.py | 203 +++++
tests/functional-tests/500-writeback.py | 251 ++++++
10 files changed, 2247 insertions(+), 816 deletions(-)
---
diff --git a/tests/functional-tests/01-insertion.py b/tests/functional-tests/01-insertion.py
index c5ba491..ede03b7 100755
--- a/tests/functional-tests/01-insertion.py
+++ b/tests/functional-tests/01-insertion.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python
#
# Copyright (C) 2010, Nokia <ivan frade nokia com>
#
@@ -18,6 +18,10 @@
# 02110-1301, USA.
#
+"""
+Stand-alone tests cases for the store, inserting, removing information
+in pure sparql and checking that the data is really there
+"""
import sys,os,dbus
import unittest
import time
@@ -25,34 +29,21 @@ import random
import string
import datetime
-TRACKER = 'org.freedesktop.Tracker1'
-TRACKER_OBJ = '/org/freedesktop/Tracker1/Resources'
-RESOURCES_IFACE = "org.freedesktop.Tracker1.Resources"
+from common.utils import configuration as cfg
+import unittest2 as ut
+#import unittest as ut
+from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
+class TrackerStoreInsertionTests (CommonTrackerStoreTest):
+ """
+ Insert single and multiple-valued properties, dates (ok and broken)
+ and check the results
+ """
-class TestUpdate (unittest.TestCase):
-
- def setUp(self):
- bus = dbus.SessionBus()
- tracker = bus.get_object(TRACKER, TRACKER_OBJ)
- self.resources = dbus.Interface (tracker,
- dbus_interface=RESOURCES_IFACE)
-
-
- def sparql_update(self,query):
- return self.resources.SparqlUpdate(query)
-
- def query(self,query):
- return self.resources.SparqlQuery(query)
-
-
-""" Insertion test cases """
-class s_insert(TestUpdate):
-
-
- def test_insert_01(self):
-
+ def test_insert_01 (self):
"""
+ Simple insert of two triplets.
+
1. Insert a InformationElement with title.
2. TEST: Query the title of that information element
3. Remove the InformationElement to keep everything as it was before
@@ -63,7 +54,7 @@ class s_insert(TestUpdate):
INSERT { <%s> a nie:InformationElement;
nie:title \"test_insert_01\". }
""" % (uri)
- self.sparql_update (insert)
+ self.tracker.update (insert)
""" verify the inserted item """
query = """
@@ -72,629 +63,486 @@ class s_insert(TestUpdate):
nie:title ?t .
}
""" % (uri)
- results = self.query (query)
+ results = self.tracker.query (query)
self.assertEquals (str(results[0][0]), "test_insert_01")
""" delete the inserted item """
delete = """
- DELETE { <%s> a nie:InformationElement. }
+ DELETE { <%s> a rdfs:Resource. }
""" % (uri)
- self.sparql_update (delete)
+ self.tracker.update (delete)
def test_insert_02(self):
- ''' SparqlUpdate: Insert triples and check using SparqlQuery'''
-
- self.sparql_update('INSERT {<urn:uuid:7646007> a nco:Contact; \
- nco:fullname "Artist_1_update". \
- <file:///media/PIKKUTIKKU/5000_songs_with_metadata_and_album_arts/Artist_1/1_Album/10_song3.mp3> a nmm:MusicPiece,nfo:FileDataObject,nmm:MusicAlbum;\
- nfo:fileName "10_song.mp3"; \
- nfo:fileLastModified "2008-10-23T13:47:02" ; \
- nfo:fileCreated "2008-12-16T12:41:20"; \
- nfo:fileSize 17630; \
- nmm:length 219252; \
- nmm:albumTitle "anything".}')
-
- self.verify_test_insert_02()
-
-
- def verify_test_insert_02(self):
-
- result = self.query('SELECT ?artist ?date WHERE{ \
- ?contact nco:fullname ?artist . \
- ?time nfo:fileCreated ?date . \
- FILTER (?artist = "Artist_1_update") \
- }')
- print result
-
- for i in range(len(result)):
- if result[i][0] == 'Artist_1_update':
- if result[i][1] == 'anything' and result[i][2] == '219252':
- self.assert_(True,'Pass')
- return
- else:
- if i < range(len(result) - 1):
- continue
- else:
- self.fail('Fail %s' %result)
-
+ """
+ Insert of a bigger set of triplets (linking two objects)
+ """
- def test_insert_03(self):
- ''' SparqlUpdate: Insert triples and check using SparqlQuery'''
-
- self.sparql_update('INSERT {<urn:uuid:7646004> a nco:Contact; \
- nco:fullname "Artist_4_update". \
- <file:///media/PIKKUTIKKU/5000_songs_with_metadata_and_album_arts/Artist_4/4_Album/4_song_1.mp3> a nmm:MusicPiece,nfo:FileDataObject;\
- nfo:fileName "4_song_1.mp3"; \
- nfo:fileCreated "2008-12-16T12:41:20"; \
- nfo:fileLastModified "2008-12-23T13:47:02" ; \
- nfo:fileSize 17630; \
- nmm:musicAlbum "4_Album_update"; \
- nmm:trackNumber "11"; \
- nmm:length 219252; \
- nmm:performer <urn:uuid:7646004>.}')
-
- self.verify_test_insert_03()
-
-
- def verify_test_insert_03(self):
-
- result = self.query('SELECT ?artist ?album ?len ?trkNo ?fname ?fSz WHERE{ \
- ?contact nco:fullname ?artist . \
- ?song nmm:musicAlbum ?album ; \
- nmm:length ?len ;\
- nmm:trackNumber ?trkNo.\
- ?songfile nfo:fileName ?fname ;\
- nfo:fileSize ?fSz .\
- FILTER (?fname = "4_song_1.mp3") \
- }')
- print len(result)
-
- for i in range(len(result)):
- if result[i][0] == 'Artist_4_update':
- if result[i][1] == '4_Album_update' and result[i][2] == '219252' and result[i][3] == '11' and result[i][4] == '4_song_1.mp3' and result[i][5] == '17630':
- self.assert_(True,'Pass')
- return
- else:
- if i < range(len(result) - 1):
- continue
- else:
- self.fail('Fail %s' %result)
+ self.tracker.update("""
+ INSERT {
+ <urn:uuid:bob-dylan> a nmm:Artist;
+ nmm:artistName 'Bob Dylan'.
+
+ <file:///a/b/c/10_song3.mp3> a nmm:MusicPiece, nfo:FileDataObject;
+ nfo:fileName 'subterranean-homesick-blues.mp3';
+ nfo:fileLastModified '2008-10-23T13:47:02' ;
+ nfo:fileCreated '2008-12-16T12:41:20' ;
+ nfo:fileSize 17630 ;
+ nmm:length 219252 ;
+ nie:title 'Subterranean homesick blues';
+ nmm:performer <urn:uuid:bob-dylan>.
+ }
+ """)
+
+ QUERY = """
+ SELECT ?uri ?title ?length WHERE {
+ ?uri a nmm:MusicPiece ;
+ nmm:performer <urn:uuid:bob-dylan> ;
+ nie:title ?title ;
+ nmm:length ?length .
+ }
+ """
- def test_insert_04(self):
- """Insert, delete same single valued properties multiple times."""
-
- """ Delete single valued properties of music file."""
- self.sparql_update('DELETE { <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:usageCounter ?v } WHERE { \
- <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:usageCounter ?v . }')
- self.sparql_update('DELETE { <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:contentAccessed ?v } WHERE { \
- <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:contentAccessed ?v . }')
-
+ result = self.tracker.query (QUERY)
+ self.assertEquals (len (result), 1)
+ self.assertEquals (len (result[0]), 3) # uri, title, length
+ self.assertEquals (result[0][0], "file:///a/b/c/10_song3.mp3")
+ self.assertEquals (result[0][1], "Subterranean homesick blues")
+ self.assertEquals (result[0][2], "219252")
+
+ self.tracker.update ("""
+ DELETE {
+ <urn:uuid:bob-dylan> a rdfs:Resource.
+ <file:///a/b/c/10_song3.mp3> a rdfs:Resource.
+ }
+ """)
- """ Insert the same single valued properties of music file."""
- self.sparql_update('INSERT { \
- <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> a nmm:MusicPiece,nfo:FileDataObject;\
- nie:usageCounter "1"; \
- nie:contentAccessed "2000-01-01T00:40:47Z" . }')
-
- """ Delete again the single valued properties of music file."""
- self.sparql_update('DELETE { <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:usageCounter ?v } WHERE { \
- <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:usageCounter ?v . }')
- self.sparql_update('DELETE { <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:contentAccessed ?v } WHERE { \
- <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> nie:contentAccessed ?v . }')
-
- """ Insert the same single valued properties of music file with different values."""
- self.sparql_update('INSERT { \
- <file:///media/PIKKUTIKKU/album_4/4_song4.mp3> a nmm:MusicPiece,nfo:FileDataObject;\
- nie:usageCounter "2"; \
- nie:contentAccessed "2000-01-01T00:40:48Z" . }')
+ def test_insert_03(self):
+ """
+ Checking all the values are inserted
+ """
- """ Query for the property values and verify whether the last change is applied."""
- result = self.query('SELECT ?song ?time WHERE{ \
- ?song nie:usageCounter "2"; \
- nie:contentAccessed ?time . \
- }')
- print len(result)
+ self.tracker.update("""
+ INSERT {
+ <urn:uuid:7646004> a nmm:Artist;
+ nmm:artistName 'John Lennon' .
+
+ <urn:uuid:123123123> a nmm:MusicAlbum ;
+ nmm:albumTitle 'Imagine' .
+
+ <file:///a/b/c/imagine.mp3> a nmm:MusicPiece, nfo:FileDataObject;
+ nfo:fileName 'imagine.mp3';
+ nfo:fileCreated '2008-12-16T12:41:20';
+ nfo:fileLastModified '2008-12-23T13:47:02' ;
+ nfo:fileSize 17630;
+ nmm:musicAlbum <urn:uuid:123123123>;
+ nmm:trackNumber '11';
+ nmm:length 219252;
+ nmm:performer <urn:uuid:7646004>.
+ }
+
+ """)
+
+ QUERY = """
+ SELECT ?artist ?length ?trackN ?album ?size ?flm ?fc ?filename WHERE {
+ <file:///a/b/c/imagine.mp3> a nmm:MusicPiece ;
+ nmm:performer ?x ;
+ nmm:length ?length ;
+ nmm:trackNumber ?trackN ;
+ nmm:musicAlbum ?y ;
+ nfo:fileSize ?size ;
+ nfo:fileLastModified ?flm ;
+ nfo:fileCreated ?fc ;
+ nfo:fileName ?filename.
+
+ ?x nmm:artistName ?artist .
+ ?y nmm:albumTitle ?album.
+ }
+ """
+ result = self.tracker.query(QUERY)
+
+ self.assertEquals (len (result), 1)
+ self.assertEquals (len (result[0]), 8)
+ self.assertEquals (result[0][0], "John Lennon")
+ self.assertEquals (result[0][1], "219252")
+ self.assertEquals (result[0][2], "11")
+ self.assertEquals (result[0][3], "Imagine")
+ self.assertEquals (result[0][4], "17630")
+ # FIXME Tracker returns this translated to the current timezone
+ #self.assertEquals (result[0][5], "2008-12-23T11:47:02Z")
+ #self.assertEquals (result[0][6], "2008-12-16T10:41:20Z")
+ self.assertEquals (result[0][7], "imagine.mp3")
+
+ self.tracker.update ("""
+ DELETE {
+ <urn:uuid:123123123> a rdfs:Resource .
+ }
- for i in range(len(result)):
- if result[i][0] == 'file:///media/PIKKUTIKKU/album_4/4_song4.mp3':
- if result[i][1] == '2000-01-01T00:40:48Z':
- self.assert_(True,'Pass')
- else:
- self.fail('Fail %s' %result)
+ DELETE {
+ <file:///a/b/c/imagine.mp3> a rdfs:Resource.
+ }
+ """)
+
- """Date-Time storage testing """
- def test_insert_date_01(self):
+ def test_insert_04(self):
+ """
+ Insert, delete same single valued properties multiple times.
+ """
+ for i in range (0, 3):
+ # Delete single valued properties of music file.
+ self.tracker.update("""
+ DELETE {
+ <test://instance-1> nie:usageCounter ?v
+ } WHERE {
+ <test://instance-1> nie:usageCounter ?v .
+ }
+ DELETE {
+ <test://instance-1> nie:contentAccessed ?w .
+ } WHERE {
+ <test://instance-1> nie:contentAccessed ?w .
+ }
+ """)
+
+ # Insert the same single valued properties of music file.
+ self.tracker.update("""
+ INSERT {
+ <test://instance-1> a nmm:MusicPiece, nfo:FileDataObject;
+ nie:usageCounter '%d';
+ nie:contentAccessed '2000-01-01T00:4%d:47Z' .
+ }""" % (i, i))
+
+ # Query for the property values and verify whether the last change is applied.
+ result = self.tracker.query ("""
+ SELECT ?playcount ?date WHERE {
+ <test://instance-1> a nmm:MusicPiece ;
+ nie:usageCounter ?playcount ;
+ nie:contentAccessed ?date.
+ }""")
+
+ self.assertEquals (len (result), 1)
+ self.assertEquals (len (result[0]), 2)
+ self.assertEquals (int (result[0][0]), i)
+ self.assertEquals (result[0][1], "2000-01-01T00:4%d:47Z" % (i))
+
+ self.tracker.update ("""
+ DELETE { <test://instance-1> a rdfs:Resource. }
+ """)
+
+ def __insert_valid_date_test (self, datestring, year, month, day, hours, minutes, seconds, timezone):
+ """
+ Insert a property with datestring value, retrieve its components and validate against
+ the expected results (all the other parameters)
+ """
+ testId = random.randint (10, 1000)
+ self.tracker.update ("""
+ INSERT {
+ <test://instance-insert-date-%d> a nie:InformationElement;
+ nie:informationElementDate '%s'.
+ }
+ """ % (testId, datestring))
+
+ result = self.tracker.query ("""
+ SELECT ?s fn:year-from-dateTime (?v)
+ fn:month-from-dateTime (?v)
+ fn:day-from-dateTime (?v)
+ fn:hours-from-dateTime (?v)
+ fn:minutes-from-dateTime (?v)
+ fn:seconds-from-dateTime (?v)
+ fn:timezone-from-dateTime (?v)
+ WHERE {
+ ?s a nie:InformationElement;
+ nie:informationElementDate ?v .
+ }
+ """)
+ try:
+ self.assertEquals (len (result), 1)
+ self.assertEquals (len (result[0]), 8)
+ self.assertEquals (result[0][0], "test://instance-insert-date-%d" % (testId))
+ self.assertEquals (result[0][1], year)
+ self.assertEquals (result[0][2], month)
+ self.assertEquals (result[0][3], day)
+ self.assertEquals (result[0][4], hours)
+ self.assertEquals (result[0][5], minutes)
+ self.assertEquals (result[0][6], seconds)
+ # FIXME To validate this we need to take into account the locale
+ # self.assertEquals (result[0][7], timezone)
+ finally:
+ self.tracker.update ("""
+ DELETE { <test://instance-insert-date-%d> a rdfs:Resource. }
+ """ % (testId))
+
+ """Date-Time storage testing """
+ def test_insert_date_01 (self):
"""
1. Insert a InformationElement with date having local timezone info.
2. TEST: Query and verify the various componentes of date
"""
-
- uri = "tracker:test_date_01"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06T13:14:15+0400". }
- """ % (uri)
- self.sparql_update (insert)
-
- """ verify the inserted item """
-
- query = 'SELECT ?s fn:year-from-dateTime (?v) \
- fn:month-from-dateTime (?v) \
- fn:day-from-dateTime (?v) \
- fn:hours-from-dateTime (?v) \
- fn:minutes-from-dateTime (?v) \
- fn:seconds-from-dateTime (?v) \
- fn:timezone-from-dateTime (?v) \
- WHERE { ?s a nie:InformationElement; \
- nie:informationElementDate ?v . \
- }'
- result = self.query (query)
- print result
-
- for i in range(len(result)):
- if result[i][0] == 'tracker:test_date_01':
- if result[i][1] == '2004' and result[i][2] == '05' and result[i][3] == '06' and result[i][4] == '13' and result[i][5] == '14' and result[i][6] == '15' and result[i][7] == '14400' :
- self.assert_(True,'Pass')
- else:
- self.fail('Fail %s' %result)
-
- def test_insert_date_02(self):
-
+ self.__insert_valid_date_test ("2004-05-06T13:14:15+0400",
+ "2004", "05", "06", "13", "14", "15", "14400")
+
+
+ def test_insert_date_02 (self):
"""
- 1. Insert a InformationElement with invalid year in date.
+ 1. Insert a InformationElement with date ending with "Z" in TZD.
2. TEST: Query and verify the various componentes of date
"""
+ self.__insert_valid_date_test ("2004-05-06T13:14:15Z",
+ "2004", "05", "06", "13", "14", "15", "0")
- uri = "tracker:test_date_02"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "204-05-06T13:14:15+0400". }
- """ % (uri)
- try :
- self.resources.SparqlUpdate(insert)
-
- except :
- print "error in query execution"
- self.assert_(True,'error in query execution')
-
- """ verify whether the item is inserted"""
-
- query = 'SELECT ?s fn:year-from-dateTime (?v) \
- fn:month-from-dateTime (?v) \
- fn:day-from-dateTime (?v) \
- fn:hours-from-dateTime (?v) \
- fn:minutes-from-dateTime (?v) \
- fn:seconds-from-dateTime (?v) \
- fn:timezone-from-dateTime (?v) \
- WHERE { ?s a nie:InformationElement; \
- nie:informationElementDate ?v . \
- }'
- result = self.query (query)
- print result
-
- for i in range(len(result)):
- if result[i][0] == 'tracker:test_date_02':
- if result[i][1] != '204':
- self.assert_(True,'Pass')
- else:
- self.fail('Fail %s' %result)
-
-
- def test_insert_date_03(self):
-
+ def test_insert_date_03 (self):
"""
- 1. Insert a InformationElement with date ending with "Z" in TZD.
+ 1. Insert a InformationElement with date ending with no TZD.
2. TEST: Query and verify the various componentes of date
"""
+ self.__insert_valid_date_test ("2004-05-06T13:14:15",
+ "2004", "05", "06", "13", "14", "15", "10800") # HEL timezone?
- uri = "tracker:test_date_03"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06T13:14:15Z". }
- """ % (uri)
- self.sparql_update (insert)
-
- """ verify the inserted item """
-
- query = 'SELECT ?s fn:year-from-dateTime (?v) \
- fn:month-from-dateTime (?v) \
- fn:day-from-dateTime (?v) \
- fn:hours-from-dateTime (?v) \
- fn:minutes-from-dateTime (?v) \
- fn:seconds-from-dateTime (?v) \
- fn:timezone-from-dateTime (?v) \
- WHERE { ?s a nie:InformationElement; \
- nie:informationElementDate ?v . \
- }'
- result = self.query (query)
- print result
-
- for i in range(len(result)):
- if result[i][0] == 'tracker:test_date_03':
- if result[i][1] == '2004' and result[i][2] == '05' and result[i][3] == '06' and result[i][4] == '13' and result[i][5] == '14' and result[i][6] == '15' and result[i][7] == '0' :
- self.assert_(True,'Pass')
- else:
- self.fail('Fail %s' %result)
-
-
- def test_insert_date_04(self):
+ # ut skipIf (1, "It times out in the daemon. Investigate")
+ def test_insert_date_04 (self):
+ """
+ 1. Insert a InformationElement with date having local timezone info
+ with some minutes in it.
+ 2. TEST: Query and verify the various componentes of date
+ """
+ self.__insert_valid_date_test ("2004-05-06T13:14:15+0230",
+ "2004", "05", "06", "13", "14", "15", "9000")
+
+
+ # ut skipIf (1, "It times out in the daemon. Investigate")
+ def __test_insert_date_05 (self):
+ """
+ 1. Insert a InformationElement with date having local timezone info in negative.
+ 2. TEST: Query and verify the various componentes of date
+ """
+ self.__insert_valid_date_test ("2004-05-06T13:14:15-0230",
+ "2004", "05", "06", "13", "14", "15", "-9000")
+
+
+ def __insert_invalid_date_test (self, datestring):
+ self.assertRaises (Exception, self.tracker.update, """
+ INSERT {
+ <test://instance-insert-invalid-date-01> a nie:InformationElement;
+ nie:informationElementDate '204-05-06T13:14:15+0400'.
+ }
+ """)
+
+ result = self.tracker.query ("""
+ SELECT ?s fn:year-from-dateTime (?v)
+ fn:month-from-dateTime (?v)
+ fn:day-from-dateTime (?v)
+ fn:hours-from-dateTime (?v)
+ fn:minutes-from-dateTime (?v)
+ fn:seconds-from-dateTime (?v)
+ fn:timezone-from-dateTime (?v)
+ WHERE {
+ ?s a nie:InformationElement ;
+ nie:informationElementDate ?v .
+ }
+ """)
+ self.assertEquals (len (result), 0)
+
+ # ut skipIf (1, "It times out in the daemon. Investigate")
+ def test_insert_invalid_date_01 (self):
"""
- 1. Insert a InformationElement with date without TZD.
+ 1. Insert a InformationElement with invalid year in date.
2. TEST: Query and verify the various componentes of date
"""
+ self.__insert_invalid_date_test ("204-05-06T13:14:15+0400")
- uri = "tracker:test_date_04"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06T13:14:15". }
- """ % (uri)
- self.sparql_update (insert)
-
- """ verify the inserted item """
-
- query = 'SELECT ?s fn:year-from-dateTime (?v) \
- fn:month-from-dateTime (?v) \
- fn:day-from-dateTime (?v) \
- fn:hours-from-dateTime (?v) \
- fn:minutes-from-dateTime (?v) \
- fn:seconds-from-dateTime (?v) \
- fn:timezone-from-dateTime (?v) \
- WHERE { ?s a nie:InformationElement; \
- nie:informationElementDate ?v . \
- }'
- result = self.query (query)
- print result
-
- for i in range(len(result)):
- if result[i][0] == 'tracker:test_date_04':
- if result[i][1] == '2004' and result[i][2] == '05' and result[i][3] == '06' and result[i][4] == '13' and result[i][5] == '14' and result[i][6] == '15':
- self.assert_(True,'Pass')
- else:
- self.fail('Fail %s' %result)
-
-
- def test_insert_date_05(self):
+ # ut skipIf (1, "It times out in the daemon. Investigate")
+ def test_insert_invalid_date_02 (self):
"""
1. Insert a InformationElement with date without time.
2. TEST: Query and verify the various componentes of date
"""
+ self.__insert_invalid_date_test ("2004-05-06")
- uri = "tracker:test_date_05"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06". }
- """ % (uri)
- try :
- self.resources.SparqlUpdate(insert)
- except :
- print "error in query execution"
- self.assert_(True,'error in query execution')
-
- else:
- self.fail('Query successfully executed')
-
-
-
- def test_insert_date_06(self):
+ # ut skipIf (1, "It times out in the daemon. Investigate")
+ def test_insert_invalid_date_03 (self):
"""
1. Insert a InformationElement with date without time but only the "T" separator.
"""
-
- uri = "tracker:test_date_06"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06T". }
- """ % (uri)
-
- try :
- self.resources.SparqlUpdate(insert)
-
- except :
- print "error in query execution"
- self.assert_(True,'error in query execution')
-
- else:
- self.fail('Query successfully executed')
+ self.__insert_invalid_date_test ("2004-05-06T")
-
- def test_insert_date_07(self):
-
+ # ut skipIf (1, "It times out in the daemon. Investigate")
+ def test_insert_invalid_date_04 (self):
"""
- 1. Insert a InformationElement with date having local timezone info
- with some minutes in it.
- 2. TEST: Query and verify the various componentes of date
+ 1. Insert a InformationElement with date without time but only the "T" separator.
"""
+ self.__insert_invalid_date_test ("2004-05-06T1g:14:15-0200")
+
- uri = "tracker:test_date_07"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06T13:14:15+0230". }
- """ % (uri)
- self.sparql_update (insert)
-
- """ verify the inserted item """
-
- query = 'SELECT ?s fn:year-from-dateTime (?v) \
- fn:month-from-dateTime (?v) \
- fn:day-from-dateTime (?v) \
- fn:hours-from-dateTime (?v) \
- fn:minutes-from-dateTime (?v) \
- fn:seconds-from-dateTime (?v) \
- fn:timezone-from-dateTime (?v) \
- WHERE { ?s a nie:InformationElement; \
- nie:informationElementDate ?v . \
- }'
- result = self.query (query)
-
- for i in range(len(result)):
- if result[i][0] == 'tracker:test_date_07':
- if result[i][1] == '2004' and result[i][2] == '05' and result[i][3] == '06' and result[i][4] == '13' and result[i][5] == '14' and result[i][6] == '15' and result[i][7] == '9000':
- self.assert_(True,'Pass')
- else:
- print result[i]
- self.fail('Fail %s' %result)
-
- def test_insert_date_08(self):
-
- """
- 1. Insert a InformationElement with date having
- local timezone info in negative.
- 2. TEST: Query and verify the various componentes of date
+class TrackerStoreDeleteTests (CommonTrackerStoreTest):
+ """
+ Use DELETE in Sparql and check the information is actually removed
+ """
+ def test_delete_01 (self):
"""
-
- uri = "tracker:test_date_08"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06T13:14:15-0230". }
- """ % (uri)
- self.sparql_update (insert)
-
- """ verify the inserted item """
-
- query = 'SELECT ?s fn:year-from-dateTime (?v) \
- fn:month-from-dateTime (?v) \
- fn:day-from-dateTime (?v) \
- fn:hours-from-dateTime (?v) \
- fn:minutes-from-dateTime (?v) \
- fn:seconds-from-dateTime (?v) \
- fn:timezone-from-dateTime (?v) \
- WHERE { ?s a nie:InformationElement; \
- nie:informationElementDate ?v . \
- }'
- result = self.query (query)
-
- for i in range(len(result)):
- if result[i][0] == 'tracker:test_date_08':
- if result[i][1] == '2004' and result[i][2] == '05' and result[i][3] == '06' and result[i][4] == '13' and result[i][5] == '14' and result[i][6] == '15' and result[i][7] == '-9000':
- self.assert_(True,'Pass')
- else:
- print result[i]
- self.fail('Fail %s' %result)
-
- def test_insert_date_09(self):
-
- """
- 1. Insert a InformationElement with date having some letters instead of numbers
- 2. TEST: Query and verify the various componentes of date
+ Insert triples and Delete a triple. Verify the deletion with a query
"""
- uri = "tracker:test_date_09"
- insert = """
- INSERT { <%s> a nie:InformationElement;
- nie:informationElementDate "2004-05-06T1g:14:15-0200". }
- """ % (uri)
-
- try :
- self.resources.SparqlUpdate(insert)
-
- except :
- print "error in query execution"
- self.assert_(True,'error in query execution')
+ # first insert
+ self.tracker.update ("""
+ INSERT {
+ <urn:uuid:7646001> a nco:Contact;
+ nco:fullname 'Artist_1_delete'.
+ <test://instance-test-delete-01> a nmm:MusicPiece, nfo:FileDataObject;
+ nfo:fileName '11_song_del.mp3';
+ nfo:genre 'Classic delete';
+ nmm:musicAlbum '1_Album_delete';
+ nmm:performer <urn:uuid:7646001>.
+ }
+ """)
- else:
- self.fail('Query successfully executed')
-
+ # verify the insertion
+ result = self.tracker.query ("""
+ SELECT ?u WHERE {
+ ?u a nmm:MusicPiece ;
+ nfo:genre 'Classic delete' .
+ }
+ """)
+ self.assertEquals (len (result), 1)
+ self.assertEquals (len (result[0]), 1)
+ self.assertEquals (result[0][0], "test://instance-test-delete-01")
+
+ # now delete
+ self.tracker.update("""
+ DELETE {
+ <test://instance-test-delete-01> a rdfs:Resource.
+ }
+ """)
-""" Deletion test cases """
-class s_delete(TestUpdate):
-
- def test_delete_01(self):
-
- ''' Insert triples and Delete a triple. Verify the deletion with a query'''
-
- """first insert """
- self.sparql_update('INSERT {<urn:uuid:7646001> a nco:Contact; \
- nco:fullname "Artist_1_delete". \
- <file:///media/PIKKUTIKKU/5000_songs_with_metadata_and_album_arts/Artist_1/1_Album/11_song_del.mp3> a nmm:MusicPiece,nfo:FileDataObject;\
- nfo:fileName "11_song_del.mp3"; \
- nfo:genre "Classic delete"; \
- nmm:musicAlbum "1_Album_delete"; \
- nmm:performer <urn:uuid:7646001>.}')
-
- """verify the insertion """
- self.verify_test_insert_delete_01()
-
- """now delete """
- self.sparql_update('DELETE { \
- <file:///media/PIKKUTIKKU/5000_songs_with_metadata_and_album_arts/Artist_1/1_Album/11_song_del.mp3> nfo:genre "Classic delete".}')
- print " After deleting a triple"
- """verify the deletion """
- self.verify_test_delete_01()
-
-
- def verify_test_insert_delete_01(self):
- result = self.query('SELECT ?fname ?genre WHERE { \
- ?songfile nfo:fileName ?fname ;\
- nfo:genre ?genre .\
- FILTER (?genre = "Classic delete") \
- }')
- if result != []:
- for i in range(len(result)):
- if result[i][0] == '11_song_del.mp3':
- if result[i][1] == "Classic delete":
- self.assert_(True,'Pass')
- else:
- self.fail('File not inserted, so failing the \'Delete genre \' testcase')
- else:
- self.fail('File not inserted, so failing the \'Delete genre \' testcase')
-
- def verify_test_delete_01(self):
-
- result = self.query('SELECT ?fn WHERE { \
- ?f a nmm:MusicPiece . \
- ?f nfo:fileName ?fn ;\
- nfo:genre ?genre .\
- FILTER (?genre = "Classic delete") \
- }' )
-
- if result != []:
- for i in range(len(result)):
- if result[i][0] == '11_song_del.mp3':
- self.fail('Fail %s' %result)
- else:
- self.assert_(True,'Pass')
- else:
- self.assert_(True,'Pass')
+ # Check the instance is not there
+ result = self.tracker.query ("""
+ SELECT ?u WHERE {
+ ?u a nmm:MusicPiece ;
+ nfo:genre 'Classic delete' .
+ }
+ """)
+ self.assertEquals (len (result), 0)
def test_delete_02 (self):
-
- """Delete a MusicAlbum and count the album """
- """
+ """
+ Delete a MusicAlbum and count the album
+
1. add a music album.
2. count the number of albums
3. delete an album
2. count the number of albums
"""
- """Add a music album """
- self.sparql_update('INSERT {<06_Album_delete> a nmm:MusicAlbum;\
- nmm:albumTitle "06_Album_delete".}')
+ initial = self.tracker.count_instances ("nmm:MusicAlbum")
- """get the count of music albums"""
- result = self.query('SELECT ?album WHERE { \
- ?album a nmm:MusicAlbum. \
- } ')
- count_before_del = len(result)
- print len(result)
- print result
+ """Add a music album """
+ self.tracker.update ("""
+ INSERT {
+ <test://instance-delete-02> a nmm:MusicAlbum;
+ nmm:albumTitle '06_Album_delete'.
+ }
+ """)
+ after_insert = self.tracker.count_instances ("nmm:MusicAlbum")
+ self.assertEquals (initial+1, after_insert)
- """Delete the added music album """
- self.sparql_update('DELETE { \
- <06_Album_delete> a nmm:MusicAlbum.}')
+ """Delete the added music album """
+ self.tracker.update("""
+ DELETE {
+ <test://instance-delete-02> a nmm:MusicAlbum.
+ }
+ """)
"""get the count of music albums"""
- result = self.query('SELECT ?album WHERE { \
- ?album a nmm:MusicAlbum. \
- } ')
+ after_removal = self.tracker.count_instances ("nmm:MusicAlbum")
- count_after_del = len(result)
- print len(result)
+ self.assertEquals (after_removal, initial)
- self.assertEquals (count_before_del - 1, count_after_del)
-
-""" Batch Update test cases """
-class s_batch_update(TestUpdate):
+class TrackerStoreBatchUpdateTest (CommonTrackerStoreTest):
+ """
+ Insert data using the BatchSparqlUpdate method in the store
+ """
def test_batch_insert_01(self):
- """batch insertion of 100 contacts:
- 1. delete those existing contacts which we want to insert again.
- 2. insert 100 contacts.
- 3. delete the inserted contacts.
"""
+ batch insertion of 100 contacts:
+ 1. insert 100 contacts.
+ 2. delete the inserted contacts.
+ """
+ NUMBER_OF_TEST_CONTACTS = 3
+ # query no. of existing contacts. (predefined instances in the DB)
+ count_before_insert = self.tracker.count_instances ("nco:PersonContact")
+
+ # insert contacts.
+ CONTACT_TEMPLATE = """
+ <test://instance-contact-%d> a nco:PersonContact ;
+ nco:nameGiven 'Contact-name %d';
+ nco:nameFamily 'Contact-family %d';
+ nie:generator 'test-instance-to-remove' ;
+ nco:contactUID '%d';
+ nco:hasPhoneNumber <tel:%s> .
+ """
- """delete those existing contacts which we want to insert again.
- the uid creation is same here as it's created during insertion."""
-
- for j in range(100) :
- uid = j*1000+1234
- delete = 'DELETE {?contact a rdfs:Resource} WHERE {?contact nco:contactUID <%s> }' %uid
- self.sparql_update (delete)
-
- """ querry no. of existing contacts. """
- result = self.query ('SELECT ?c ?Fname ?Gname ?number WHERE { \
- ?c a nco:PersonContact ; \
- nco:nameGiven ?Gname ; \
- nco:nameFamily ?Fname; \
- nco:hasPhoneNumber ?number. \
- } LIMIT 1000')
-
- count_before_insert = len(result)
- print "contact count before insert %d" %count_before_insert
-
- """insert 100 contacts."""
- INSERT_SPARQL = "; ".join([
- "<%s> a nco:PersonContact",
- "nco:nameGiven '%s'",
- "nco:nameFamily '%s'",
- "nco:contactUID %s",
- "nco:hasPhoneNumber <tel:%s>."
- ])
- a="\n"
- """Preparing a list of Contacts """
- for j in range(100) :
- contact= str(random.randint(0, sys.maxint))
- names1=['christopher','paul','timothy','stephen','michael','andrew','harold','douglas','timothy','walter','kevin','joshua','robert','matthew','broderick','lacy','rashad','darro','antonia','chas']
-
- names2=['cyril','ronny','stevie','lon','freeman','erin','duncan','kennith','carmine','augustine','young','chadwick','wilburn','jonas','lazaro','brooks','ariel','dusty','tracey','scottie','seymour']
- firstname=random.choice(names1)
- lastname=random.choice(names2)
- contactUID= j*1000+1234
- PhoneNumber= str(random.randint(0, sys.maxint))
- sparql_insert=INSERT_SPARQL % (contact,firstname,lastname, contactUID,PhoneNumber)
- a=a+sparql_insert
- INSERT= "INSERT{" + a + "}"
-
- self.resources.BatchSparqlUpdate(INSERT)
-
- """ querry no. of existing contacts. """
- result = self.query ('SELECT ?c ?Fname ?Gname ?number WHERE { \
- ?c a nco:PersonContact ; \
- nco:nameGiven ?Gname ; \
- nco:nameFamily ?Fname; \
- nco:hasPhoneNumber ?number. \
- } LIMIT 1000')
-
- count_after_insert = len(result)
- print "contact count after insert %d" %count_after_insert
-
- """ cleanup the inserted contacts """
- for j in range(100) :
- uid = j*1000+1234
- delete = 'DELETE {?contact a rdfs:Resource} WHERE {?contact nco:contactUID <%s> }' %uid
- self.sparql_update (delete)
-
- """test verification """
- if count_after_insert == 1000:
- if count_after_insert >= count_before_insert:
- self.assert_(True,'Pass')
- else:
- self.fail('batch insertion failed')
- else:
- if count_after_insert == count_before_insert + 100:
- self.assert_(True,'Pass')
- else:
- self.fail('batch insertion failed')
-
-
-class phone_no (TestUpdate):
- def test_phone_01 (self):
- """1. Setting the maemo:localPhoneNumber property to last 7 digits of phone number.
- 2. Receiving a message from a contact whose localPhoneNumber is saved.
- 3. Querying for the local phone number.
+ global contact_list
+ contact_list = []
+ def complete_contact (contact_template):
+ random_phone = "".join ([str(random.randint (0, 9)) for i in range (0, 9)])
+ contact_counter = random.randint (0, 10000)
+
+ # Avoid duplicates
+ while contact_counter in contact_list:
+ contact_counter = random.randint (0, 10000)
+ contact_list.append (contact_counter)
+
+ return contact_template % (contact_counter,
+ contact_counter,
+ contact_counter,
+ contact_counter,
+ random_phone)
+
+ contacts = map (complete_contact, [CONTACT_TEMPLATE] * NUMBER_OF_TEST_CONTACTS)
+ INSERT_SPARQL = "\n".join (["INSERT {"] + contacts +["}"])
+ self.tracker.batch_update (INSERT_SPARQL)
+
+ # Check all instances are in
+ count_after_insert = self.tracker.count_instances ("nco:PersonContact")
+ self.assertEquals (count_before_insert + NUMBER_OF_TEST_CONTACTS, count_after_insert)
+
+ """ Delete the inserted contacts """
+ DELETE_SPARQL = """
+ DELETE {
+ ?x a rdfs:Resource .
+ } WHERE {
+ ?x a nco:PersonContact ;
+ nie:generator 'test-instance-to-remove' .
+ }
+ """
+ self.tracker.update (DELETE_SPARQL)
+ count_final = self.tracker.count_instances ("nco:PersonContact")
+ self.assertEquals (count_before_insert, count_final)
+
+class TrackerStorePhoneNumberTest (CommonTrackerStoreTest):
+ """
+ Tests around phone numbers (maemo specific). Inserting correct/incorrect ones
+ and running query to get the contact from the number.
+ """
+
+ @ut.skipIf (not cfg.haveMaemo, "This test uses maemo:specific properties")
+ def test_phone_01 (self):
+ """
+ 1. Setting the maemo:localPhoneNumber property to last 7 digits of phone number.
+ 2. Receiving a message from a contact whose localPhoneNumber is saved.
+ 3. Query messages from the local phone number
"""
PhoneNumber = str(random.randint (0, sys.maxint))
UUID = str(time.time())
@@ -707,69 +555,63 @@ class phone_no (TestUpdate):
Given_Name = 'test_GN_' + `ID`
Family_Name = 'test_FN_' + `ID`
- INSERT_SPARQL = """ INSERT {
- <tel:%s> a nco:PhoneNumber ;
- nco:phoneNumber '%s' .
- <urn:uuid:%s> a nco:PersonContact;
- nco:contactUID <contact:test_%s>;
- nco:nameFamily '%s' ;
- nco:nameGiven '%s'.
- <urn:uuid:%s> nco:hasPhoneNumber <tel:%s>.
- <tel:%s> maemo:localPhoneNumber '%s'
- }"""
- sparql_insert = INSERT_SPARQL % (PhoneNumber,PhoneNumber,UUID,UUID1,Given_Name,Family_Name,UUID,PhoneNumber,PhoneNumber,localNumber)
- try :
- self.resources.SparqlUpdate(sparql_insert)
- except :
- self.fail('Insertion is not successful')
-
- INSERT_SPARQL1 = """ INSERT {
- <urn:uuid:%s> a nmo:Message ;
- nmo:from [a nco:Contact ;
- nco:hasPhoneNumber <tel:%s>];
- nmo:receivedDate '%s' ;
- nmo:plainTextMessageContent 'hello' }"""
-
- sparql_insert = INSERT_SPARQL1 % ( UUID2,PhoneNumber,Received)
- try :
- self.resources.SparqlUpdate(sparql_insert)
- except :
- self.fail('Insertion is not successful')
-
- QUERY_SPARQL = """ SELECT ?local
- WHERE { ?msg a nmo:Message .
- ?c a nco:Contact;
- nco:hasPhoneNumber <tel:%s>.
- <tel:%s> maemo:localPhoneNumber ?local
-
- } """
- QUERY= QUERY_SPARQL %(PhoneNumber,PhoneNumber)
- result = self.resources.SparqlQuery(QUERY)
- self.assert_(result[0][0].find(localNumber)!=-1 , 'Query is not succesful')
-
+ INSERT_CONTACT_PHONE = """
+ INSERT {
+ <tel:123456789> a nco:PhoneNumber ;
+ nco:phoneNumber '00358555444333' ;
+ maemo:localPhoneNumber '5444333'.
+
+ <test://test_phone_1/contact> a nco:PersonContact;
+ nco:contactUID '112';
+ nco:nameFamily 'Family-name' ;
+ nco:nameGiven 'Given-name'.
+ <test://test_phone_1/contact> nco:hasPhoneNumber <tel:123456789>.
+ }
+ """
+ self.tracker.update (INSERT_CONTACT_PHONE)
+
+ INSERT_MESSAGE = """
+ INSERT {
+ <test://test_phone_1/message> a nmo:Message ;
+ nmo:from [a nco:Contact ; nco:hasPhoneNumber <tel:123456789>];
+ nmo:receivedDate '2010-01-02T10:13:00Z' ;
+ nmo:plainTextMessageContent 'hello'
+ }
+ """
+ self.tracker.update (INSERT_MESSAGE)
+
+ QUERY_SPARQL = """
+ SELECT ?msg WHERE {
+ ?msg a nmo:Message;
+ nmo:from ?c .
+ ?c nco:hasPhoneNumber ?n .
+ ?n maemo:localPhoneNumber '5444333'.
+ } """
+ result = self.tracker.query (QUERY_SPARQL)
+ self.assertEquals (len (result), 1)
+ self.assertEquals (len (result[0]), 1)
+ self.assertEquals (result[0][0], "test://test_phone_1/message")
+
+
+ @ut.skipIf (not cfg.haveMaemo, "This test uses maemo:specific properties")
def test_phone_02 (self):
-
- """ Inserting a local phone number which have spaces """
-
- INSERT_SPARQL = """ INSERT {
- <tel+3333333333> a nco:PhoneNumber ;
- nco:phoneNumber <tel+3333333333> .
- <urn:uuid:9876> a nco:PersonContact;
+ """
+ Inserting a local phone number which have spaces
+ """
+ INSERT_SPARQL = """
+ INSERT {
+ <tel+3333333333> a nco:PhoneNumber ;
+ nco:phoneNumber <tel+3333333333> ;
+ maemo:localPhoneNumber '333 333'.
+
+ <test://test_phone_02/contact> a nco:PersonContact;
nco:nameFamily 'test_name_01' ;
- nco:nameGiven 'test_name_02'.
- <urn:uuid:98765> nco:hasPhoneNumber <tel+3333333333>.
- <tel+3333333333> maemo:localPhoneNumber <333 333> }"""
-
- try:
- self.resources.SparqlUpdate(INSERT_SPARQL)
- except :
- print "error in query execution"
- self.assert_(True,'error in query execution')
- else:
- self.fail('Query successfully executed')
-
-
+ nco:nameGiven 'test_name_02';
+ nco:hasPhoneNumber <tel+3333333333> .
+ }
+ """
+ self.assertRaises (Exception, self.tracker.update (INSERT_SPARQL))
if __name__ == "__main__":
- unittest.main()
+ ut.main()
diff --git a/tests/functional-tests/03-fts-functions.py b/tests/functional-tests/03-fts-functions.py
index c8c86b6..cce6197 100755
--- a/tests/functional-tests/03-fts-functions.py
+++ b/tests/functional-tests/03-fts-functions.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/python
#
# Copyright (C) 2010, Nokia <ivan frade nokia com>
#
@@ -18,23 +18,23 @@
# 02110-1301, USA.
#
+"""
+These tests use only the store. They insert instances with known text
+and run sparql with fts functions to check the results.
+"""
import dbus
import unittest
import random
-TRACKER = 'org.freedesktop.Tracker1'
-TRACKER_OBJ = '/org/freedesktop/Tracker1/Resources'
-RESOURCES_IFACE = "org.freedesktop.Tracker1.Resources"
-
-class TestFTSFunctions (unittest.TestCase):
-
- def setUp (self):
- bus = dbus.SessionBus ()
- tracker = bus.get_object (TRACKER, TRACKER_OBJ)
- self.resources = dbus.Interface (tracker,
- dbus_interface=RESOURCES_IFACE);
-
+from common.utils import configuration as cfg
+import unittest2 as ut
+#import unittest as ut
+from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
+class TestFTSFunctions (CommonTrackerStoreTest):
+ """
+ Insert data with text and check the fts:xxxx functions are returning the expected results
+ """
def test_fts_rank (self):
"""
1. Insert a Contact1 with 'abcdefxyz' as fullname and nickname
@@ -58,7 +58,7 @@ class TestFTSFunctions (unittest.TestCase):
nco:nickname 'abcdefxyz abcdefxyz' .
}
"""
- self.resources.SparqlUpdate (insert_sparql)
+ self.tracker.update (insert_sparql)
query = """
SELECT ?contact WHERE {
@@ -66,7 +66,7 @@ class TestFTSFunctions (unittest.TestCase):
fts:match 'abcdefxyz' .
} ORDER BY DESC (fts:rank(?contact))
"""
- results = self.resources.SparqlQuery (query)
+ results = self.tracker.query (query)
self.assertEquals (len(results), 3)
self.assertEquals (results[0][0], "contact://test/fts-function/rank/3")
@@ -104,7 +104,7 @@ class TestFTSFunctions (unittest.TestCase):
nco:nickname 'abcdefxyz abcdefxyz' .
}
"""
- self.resources.SparqlUpdate (insert_sparql)
+ self.tracker.update (insert_sparql)
query = """
SELECT fts:offsets (?contact) WHERE {
@@ -112,7 +112,7 @@ class TestFTSFunctions (unittest.TestCase):
fts:match 'abcdefxyz' .
}
"""
- results = self.resources.SparqlQuery (query)
+ results = self.tracker.query (query)
self.assertEquals (len(results), 3)
self.assertEquals (len (results[0][0].split(",")), 4) # (u'151,1,161,1')
self.assertEquals (len (results[1][0].split(",")), 2) # (u'161,1')
@@ -129,4 +129,4 @@ class TestFTSFunctions (unittest.TestCase):
if __name__ == '__main__':
- unittest.main()
+ ut.main()
diff --git a/tests/functional-tests/13-threaded-store.py b/tests/functional-tests/13-threaded-store.py
new file mode 100755
index 0000000..7caddeb
--- /dev/null
+++ b/tests/functional-tests/13-threaded-store.py
@@ -0,0 +1,145 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2010, Nokia <ivan frade nokia com>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+"""
+Test that the threads in the daemon are working:
+ A very long query shouldn't block smaller queries.
+"""
+import os, dbus
+import gobject
+import glib
+import time
+from dbus.mainloop.glib import DBusGMainLoop
+
+from common.utils import configuration as cfg
+import unittest2 as ut
+#import unittest as ut
+from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
+
+MAX_TEST_TIME = 60 # seconds to finish the tests (to avoid infinite waitings)
+
+AMOUNT_SIMPLE_QUERIES = 10
+COMPLEX_QUERY_TIMEOUT = 15000 # ms (How long do we wait for an answer to the complex query)
+SIMPLE_QUERY_FREQ = 2 # seconds (How freq do we send a simple query to the daemon)
+
+class TestThreadedStore (CommonTrackerStoreTest):
+ """
+ When the database is big, running a complex query takes ages.
+ After cancelling the query, any following query is queued
+
+ Reported in bug NB#183499
+ """
+ def setUp (self):
+ self.main_loop = gobject.MainLoop ()
+ self.simple_queries_counter = AMOUNT_SIMPLE_QUERIES
+ self.simple_queries_answers = 0
+
+ def __populate_database (self):
+
+ self.assertTrue (os.path.exists ('ttl'))
+ for ttl_file in ["010-nco_EmailAddress.ttl",
+ "011-nco_PostalAddress.ttl",
+ "012-nco_PhoneNumber.ttl",
+ "014-nco_ContactEmail.ttl",
+ "015-nco_ContactCall.ttl",
+ "018-nco_PersonContact.ttl",
+ "012-nco_PhoneNumber.ttl",
+ "016-nco_ContactIM.ttl"]:
+ full_path = os.path.abspath(os.path.join ("ttl", ttl_file))
+ print full_path
+ self.tracker.get_tracker_iface ().Load ("file://" + full_path,
+ timeout=30000)
+
+ def test_complex_query (self):
+ start = time.time ()
+ self.__populate_database ()
+ end = time.time ()
+ print "Loading: %.3f sec." % (end-start)
+
+ COMPLEX_QUERY = """
+ SELECT ?url nie:url(?photo) nco:imContactStatusMessage (?url)
+ tracker:coalesce(nco:nameFamily (?url), nco:nameFamily (?url), nco:nameGiven (?org), ?email, ?phone, nco:blogUrl (?url))
+ WHERE {
+ { ?url a nco:PersonContact.
+ ?url fts:match 'fami*'.
+ } UNION {
+ ?url a nco:PersonContact.
+ ?url nco:hasEmailAddress ?add.
+ ?add fts:match 'fami*'.
+ } UNION {
+ ?url a nco:PersonContact.
+ ?url nco:hasPostalAddress ?post.
+ ?post fts:match 'fami*'.
+ }
+ OPTIONAL { ?url nco:photo ?photo.}
+ OPTIONAL { ?url nco:org ?org. }
+ OPTIONAL { ?url maemo:relevance ?relevance.}
+ OPTIONAL { ?url nco:hasPhoneNumber ?hasphone. ?hasPhone nco:phoneNumber ?phone.}
+ OPTIONAL { ?url nco:hasEmailAddress ?hasemail. ?hasemail nco:emailAddress ?email.}
+ } ORDER BY ?relevance LIMIT 100"""
+
+ # Standard timeout
+ print "Send complex query"
+ self.complex_start = time.time ()
+ self.tracker.get_tracker_iface ().SparqlQuery (COMPLEX_QUERY, timeout=COMPLEX_QUERY_TIMEOUT,
+ reply_handler=self.reply_complex,
+ error_handler=self.error_handler_complex)
+
+ self.timeout_id = glib.timeout_add_seconds (MAX_TEST_TIME, self.__timeout_on_idle)
+ glib.timeout_add_seconds (SIMPLE_QUERY_FREQ, self.__simple_query)
+ self.main_loop.run ()
+
+ def __simple_query (self):
+ print "Send simple query (%d)" % (self.simple_queries_counter)
+ SIMPLE_QUERY = "SELECT ?name WHERE { ?u a nco:PersonContact; nco:fullname ?name. }"
+ self.tracker.get_tracker_iface ().SparqlQuery (SIMPLE_QUERY,
+ timeout=10000,
+ reply_handler=self.reply_simple,
+ error_handler=self.error_handler)
+ self.simple_queries_counter -= 1
+ if (self.simple_queries_counter == 0):
+ print "Stop sending queries (wait)"
+ return False
+ return True
+
+ def reply_simple (self, results):
+ print "Simple query answered"
+ self.assertNotEquals (len (results), 0)
+ self.simple_queries_answers += 1
+ if (self.simple_queries_answers == AMOUNT_SIMPLE_QUERIES):
+ print "All simple queries answered"
+ self.main_loop.quit ()
+
+ def reply_complex (self, results):
+ print "Complex query: %.3f" % (time.time () - self.complex_start)
+
+ def error_handler (self, error_msg):
+ print "ERROR in dbus call", error_msg
+
+ def error_handler_complex (self, error_msg):
+ print "Complex query timedout in DBus (", error_msg, ")"
+
+ def __timeout_on_idle (self):
+ print "Timeout... asumming idle"
+ self.main_loop.quit ()
+ return False
+
+
+if __name__ == "__main__":
+ ut.main ()
diff --git a/tests/functional-tests/14-signals.py b/tests/functional-tests/14-signals.py
new file mode 100755
index 0000000..f224ea5
--- /dev/null
+++ b/tests/functional-tests/14-signals.py
@@ -0,0 +1,171 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2010, Nokia <ivan frade nokia com>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+"""
+Test that after insertion/remove/updates in the store, the signals
+are emitted. Theses tests are not extensive (only few selected signals
+are tested)
+"""
+
+import unittest2 as ut
+from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
+from common.utils import configuration as cfg
+
+import gobject
+import glib
+import dbus
+from dbus.mainloop.glib import DBusGMainLoop
+
+SUBJECTS_ADDED_SIGNAL = "SubjectsAdded"
+SUBJECTS_REMOVED_SIGNAL = "SubjectsRemoved"
+SUBJECTS_CHANGED_SIGNAL = "SubjectsChanged"
+
+NCO_CONTACT_PATH = "/org/freedesktop/Tracker1/Resources/Classes/nco/Contact"
+SIGNALS_IFACE = "org.freedesktop.Tracker1.Resources.Class"
+
+REASONABLE_TIMEOUT = 10 # Time waiting for the signal to be emitted
+
+class TrackerStoreSignalsTests (CommonTrackerStoreTest):
+ """
+ Insert/update/remove instances from nco:PersonContact
+ and check that the signals are emitted
+ """
+ def setUp (self):
+ self.loop = gobject.MainLoop()
+ dbus_loop = DBusGMainLoop(set_as_default=True)
+ self.bus = dbus.SessionBus (dbus_loop)
+ self.timeout_id = 0
+
+ def __connect_signal (self, signal_name, callback):
+ """
+ After connecting to the signal, call self.__wait_for_signal.
+ That function will wait in a loop, so make sure that the callback
+ calls self.loop.quit ()
+ """
+ if not signal_name in [SUBJECTS_ADDED_SIGNAL, SUBJECTS_REMOVED_SIGNAL, SUBJECTS_CHANGED_SIGNAL]:
+ print "What kind of signal are you trying to connect?!"
+ assert False
+
+ self.cb_id = self.bus.add_signal_receiver (callback,
+ signal_name=signal_name,
+ path = NCO_CONTACT_PATH,
+ dbus_interface = SIGNALS_IFACE)
+
+ def __wait_for_signal (self):
+ """
+ In the callback of the signals, there should be a self.loop.quit ()
+ """
+ self.timeout_id = glib.timeout_add_seconds (REASONABLE_TIMEOUT, self.__timeout_on_idle)
+ self.loop.run ()
+
+ def __timeout_on_idle (self):
+ self.loop.quit ()
+ self.fail ("Timeout, the signal never came!")
+
+
+ def __disconnect_signals_after_test (fn):
+ """
+ Here maybe i got a bit carried away with python instrospection.
+ This decorator makes the function run in a try/finally, and disconnect
+ all the signals afterwards.
+
+ It means that the signal callbacks just need to ensure the results are fine.
+ Don't touch this unless you know what are you doing.
+ """
+ def new (self, *args):
+ try:
+ fn (self, *args)
+ finally:
+ if (self.timeout_id != 0):
+ glib.source_remove (self.timeout_id )
+ self.timeout_id = 0
+ self.loop.quit ()
+ self.bus._clean_up_signal_match (self.cb_id)
+ return new
+
+
+ @__disconnect_signals_after_test
+ def __contact_added_cb (self, contacts_added):
+ self.assertEquals (len (contacts_added), 1)
+ self.assertIn ("test://signals-contact-add", contacts_added)
+
+ def test_01_insert_contact (self):
+ CONTACT = """
+ INSERT {
+ <test://signals-contact-add> a nco:PersonContact ;
+ nco:nameGiven 'Contact-name added';
+ nco:nameFamily 'Contact-family added';
+ nie:generator 'test-14-signals' ;
+ nco:contactUID '1321321312312';
+ nco:hasPhoneNumber <tel:555555555> .
+ }
+ """
+ self.__connect_signal (SUBJECTS_ADDED_SIGNAL, self.__contact_added_cb)
+ self.tracker.update (CONTACT)
+ self.__wait_for_signal ()
+
+ self.tracker.update ("""
+ DELETE { <test://signals-contact-add> a rdfs:Resource }
+ """)
+
+
+ @__disconnect_signals_after_test
+ def __contact_removed_cb (self, contacts_removed):
+ self.assertEquals (len (contacts_removed), 1)
+ self.assertIn ("test://signals-contact-remove", contacts_removed)
+
+ def test_02_remove_contact (self):
+ CONTACT = """
+ INSERT {
+ <test://signals-contact-remove> a nco:PersonContact ;
+ nco:nameGiven 'Contact-name removed';
+ nco:nameFamily 'Contact-family removed'.
+ }
+ """
+ self.tracker.update (CONTACT)
+
+ self.__connect_signal (SUBJECTS_REMOVED_SIGNAL, self.__contact_removed_cb)
+ self.tracker.update ("""
+ DELETE { <test://signals-contact-remove> a rdfs:Resource }
+ """)
+ self.__wait_for_signal ()
+
+
+
+ @__disconnect_signals_after_test
+ def __contact_updated_cb (self, contacts_updated, props_updated):
+ self.assertEquals (len (contacts_updated), 1)
+ self.assertIn ("test://signals-contact-update", contacts_updated)
+
+ self.assertEquals (len (props_updated), 1)
+ self.assertIn ("http://www.semanticdesktop.org/ontologies/2007/03/22/nco#fullname", props_updated)
+
+ def test_03_update_contact (self):
+ self.tracker.update ("INSERT { <test://signals-contact-update> a nco:PersonContact }")
+
+ self.__connect_signal (SUBJECTS_CHANGED_SIGNAL, self.__contact_updated_cb)
+ self.tracker.update ("INSERT { <test://signals-contact-update> nco:fullname 'wohoo'}")
+ self.__wait_for_signal ()
+
+ self.tracker.update ("DELETE { <test://signals-contact-update> a rdfs:Resource}")
+
+
+if __name__ == "__main__":
+ ut.main()
+
diff --git a/tests/functional-tests/200-backup-restore.py b/tests/functional-tests/200-backup-restore.py
new file mode 100755
index 0000000..b0492ef
--- /dev/null
+++ b/tests/functional-tests/200-backup-restore.py
@@ -0,0 +1,401 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2010, Nokia <ivan frade nokia com>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+import os
+import dbus # For the exception handling
+
+from common.utils.system import TrackerSystemAbstraction
+from common.utils.helpers import StoreHelper
+from common.utils import configuration as cfg
+from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
+import unittest2 as ut
+
+
+"""
+Call backup, restore, force the journal replay and check the data is correct afterwards
+"""
+class BackupRestoreTest (CommonTrackerStoreTest):
+ """
+ Backup and restore to/from valid/invalid files
+ """
+ def setUp (self):
+ self.TEST_INSTANCE = "test://backup-restore/1"
+ self.BACKUP_FILE = "file://" + os.path.join (cfg.TEST_TMP_DIR, "tracker-backup-test-1")
+
+ if (os.path.exists (self.BACKUP_FILE)):
+ os.unlink (self.BACKUP_FILE)
+
+ def __insert_test_instance (self):
+ self.tracker.update ("INSERT { <%s> a nco:Contact; nco:fullname 'test-backup' } "
+ % (self.TEST_INSTANCE))
+
+ def __delete_test_instance (self):
+ self.tracker.update ("DELETE { <%s> a rdfs:Resource } " % (self.TEST_INSTANCE))
+
+ def __is_test_instance_there (self):
+ result = self.tracker.query ("SELECT ?u WHERE { ?u a nco:Contact; nco:fullname 'test-backup'}")
+ if (len (result) == 1 and len (result[0]) == 1 and result[0][0] == self.TEST_INSTANCE):
+ return True
+ return False
+
+ def test_backup_01(self):
+ """
+ Inserted data is restored after backup
+
+ 1.Insert contact
+ 2.Take backup.
+ 3.Delete contact. (check it is not there)
+ 4.Restore the file.
+ 5.Check the contact is back there
+ """
+
+ self.__insert_test_instance ()
+ instances_before = self.tracker.count_instances ("nco:Contact")
+
+ self.tracker.backup (self.BACKUP_FILE)
+
+ self.__delete_test_instance ()
+ instances_now = self.tracker.count_instances ("nco:Contact")
+
+ self.assertEquals (instances_before-1, instances_now)
+
+ self.tracker.restore (self.BACKUP_FILE)
+
+ instances_after = self.tracker.count_instances ("nco:Contact")
+
+ self.assertEquals (instances_before, instances_after)
+ self.assertTrue (self.__is_test_instance_there ())
+
+ # Clean the DB for the next test
+ self.__delete_test_instance ()
+
+
+ def test_backup_02 (self):
+ """
+ Data inserted after backup is lost in restore
+
+ 1.Take backup of db.
+ 2.Insert a contact.
+ 3.Restore the db.
+ 4.Search for the contact inserted.
+ """
+
+ # Precondition: test backup contact shouldn't be there
+ self.assertFalse (self.__is_test_instance_there ())
+
+ self.tracker.backup (self.BACKUP_FILE)
+
+ self.__insert_test_instance ()
+ self.assertTrue (self.__is_test_instance_there ())
+
+ self.tracker.restore (self.BACKUP_FILE)
+
+ self.assertFalse (self.__is_test_instance_there ())
+
+
+
+ def test_backup_03 (self):
+ """
+ Restore from a random text file
+ """
+ TEST_FILE = os.path.join (cfg.TEST_TMP_DIR, "trash_file")
+ trashfile = open (TEST_FILE, "w")
+ trashfile.write ("Here some useless text that obviously is NOT a backup")
+ trashfile.close ()
+
+ self.assertRaises (dbus.DBusException,
+ self.tracker.restore,
+ "file://" + TEST_FILE)
+ os.unlink (TEST_FILE)
+
+ def test_backup_04 (self):
+ """
+ Restore from a random binary file
+ """
+ TEST_FILE = os.path.join (cfg.TEST_TMP_DIR, "trash_file.dat")
+
+ import struct
+ trashfile = open (TEST_FILE, "wb")
+ for n in range (0, 50):
+ data = struct.pack ('i', n)
+ trashfile.write (data)
+ trashfile.close ()
+
+ instances_before = self.tracker.count_instances ("nie:InformationElement")
+ self.assertRaises (dbus.DBusException,
+ self.tracker.restore,
+ "file://" + TEST_FILE)
+
+ os.unlink (TEST_FILE)
+
+ def test_backup_05(self):
+ """
+ Take backup of db to a invalid path.
+ Expected: Backup should not be taken and tracker should behave normally.
+ """
+ self.assertRaises (dbus.DBusException,
+ self.tracker.backup,
+ "file://%s/this/is/a/non-existant/folder/backup" % (cfg.TEST_TMP_DIR))
+
+
+ def test_backup_06 (self):
+ """
+ Try to restore an invalid path
+ """
+ self.assertRaises (dbus.DBusException,
+ self.tracker.restore,
+ "file://%s/this/is/a/non-existant/folder/backup" % (cfg.TEST_TMP_DIR))
+
+
+ def test_backup_07(self):
+ """
+ Restore after removing the DBs and journal
+
+ 1.Insert a contact.
+ 2.Take backup of db.
+ 4.Delete the database
+ 5.Restore the db.
+ 6.Search for the contact inserted.
+ """
+ self.__insert_test_instance ()
+ instances_before = self.tracker.count_instances ("nco:Contact")
+ self.tracker.backup (self.BACKUP_FILE)
+
+ self.system.tracker_store_remove_dbs ()
+ self.system.tracker_store_remove_journal ()
+ self.system.tracker_store_brutal_restart ()
+
+ instances_before_restore = self.tracker.count_instances ("nco:Contact")
+ self.assertNotEqual (instances_before_restore, instances_before)
+
+ self.tracker.restore (self.BACKUP_FILE)
+ self.assertTrue (self.__is_test_instance_there ())
+
+ self.__delete_test_instance ()
+
+
+ def test_backup_08 (self):
+ """
+ Restore after corrupting DB
+
+ 1.Insert a contact.
+ 2.Take backup of db.
+ 5.Restore the db.
+ 6.Search for the contact inserted.
+ """
+ self.__insert_test_instance ()
+ instances_before = self.tracker.count_instances ("nco:Contact")
+ self.tracker.backup (self.BACKUP_FILE)
+
+ self.system.tracker_store_corrupt_dbs ()
+ self.system.tracker_store_remove_journal ()
+ self.system.tracker_store_brutal_restart ()
+
+ instances_before_restore = self.tracker.count_instances ("nco:Contact")
+ self.assertNotEqual (instances_before_restore, instances_before)
+
+ self.tracker.restore (self.BACKUP_FILE)
+ self.assertTrue (self.__is_test_instance_there ())
+
+ # DB to the original state
+ self.__delete_test_instance ()
+
+
+## def test_backup_09(self):
+## """
+## 1.Insert a contact.
+## 2.Take backup of db.
+## 3.Corrupt the databse.
+## 4.Restore the db.
+## 5.Search for the contact inserted.
+## Expected:Contact should be listed in tracker search.
+## """
+
+## urn = 'urn:uuid:'+`random.randint(0, sys.maxint)`
+## insert_sparql = "INSERT { <%s> a nco:Contact } "
+## insert = insert_sparql %(urn)
+## self.sparql_update(insert)
+
+## result = commands.getoutput('tracker-stats | grep nco:Contact')
+## stats1 = result.split()
+
+## self.backup.Save(BACKUP_1)
+
+## delete_sparql = "DELETE { <%s> a nco:Contact } "
+## delete = delete_sparql %(urn)
+## self.sparql_update(delete)
+
+## print ("Corrupting the database")
+
+## commands.getoutput('cp ' + configuration.TEST_DATA_IMAGES +'test-image-1.jpg' + ' ' + database +'meta.db' )
+## try :
+## self.db_restore(BACKUP_1)
+## except :
+## print "Restore is not scuccessful"
+
+## print "querying for the contact"
+## query_sparql = "SElECT ?c WHERE {?c a nco:Contact .FILTER (REGEX(?c, '%s' )) }"
+## query = query_sparql %(urn)
+## result = self.query(query)
+## self.assert_(result[0][0].find(urn)!=-1 , "Restore is not successful" )
+
+## def test_backup_10(self):
+
+## """
+## 1.Insert a contact.
+## 2.Take backup of db.
+## 3.Kill tracker-store process.
+## 4.Corrupt the databse.
+## 5.Restore the db.
+## 6.Search for the contact inserted.
+## Expected:Contact should be listed in tracker search.
+## """
+
+## urn = random.randint(0, sys.maxint)
+
+## insert_sparql = "INSERT { <%s> a nco:Contact } "
+## insert = insert_sparql %(urn)
+
+## self.sparql_update(insert)
+
+## result = commands.getoutput('tracker-stats | grep nco:Contact')
+## stats1 = result.split()
+
+## self.backup.Save(BACKUP_1)
+
+## delete_sparql = "DELETE { <%s> a nco:Contact } "
+## delete = delete_sparql %(urn)
+## self.sparql_update(delete)
+
+## self.kill_store()
+
+## print ("corrupting the database")
+## commands.getoutput('cp ' + configuration.TEST_DATA_IMAGES +'test-image-1.jpg' + ' ' + database +'meta.db' )
+
+## try :
+## self.db_restore(BACKUP_1)
+## except :
+## print "Restore is not scuccessful"
+
+## result = commands.getoutput('tracker-stats | grep nco:Contact ')
+## query_sparql = "SElECT ?c WHERE {?c a nco:Contact .FILTER (REGEX(?c, '%s' )) }"
+## query = query_sparql %(urn)
+## result = self.query(query)
+## self.assert_(result[0][0].find(urn)!=-1 , "Restore is not successful")
+
+ def test_backup_11(self):
+ """
+ Backup ignores the file extension
+
+ 1.Insert a contact.
+ 2.Take backup of db in .jpg format.
+ 3.Restore the db.
+ 4.Search for the contact inserted.
+ """
+ BACKUP_JPG_EXT = "file://%s/tracker-test-backup.jpg" % (cfg.TEST_TMP_DIR)
+
+ self.__insert_test_instance ()
+
+ instances_before = self.tracker.count_instances ("nco:Contact")
+
+ self.tracker.backup (BACKUP_JPG_EXT)
+
+ self.__delete_test_instance ()
+ instances_now = self.tracker.count_instances ("nco:Contact")
+ self.assertEquals (instances_before, instances_now+1)
+
+ self.tracker.restore (BACKUP_JPG_EXT)
+ instances_after = self.tracker.count_instances ("nco:Contact")
+ self.assertEquals (instances_before, instances_after)
+
+ # Restore the DB to the original state
+ self.__delete_test_instance ()
+
+
+
+class JournalReplayTest (CommonTrackerStoreTest):
+ """
+ Force journal replaying and check that the DB is correct aftewards
+ """
+
+ def test_journal_01 (self) :
+ """
+ Journal replaying when the DB is corrupted
+
+ Insert few data (to have more than the pre-defined instances)
+ Check instances of different classes
+ Replace the DB with a random file
+ Restart the daemon
+ Check instances of different classes
+ """
+ self.tracker.update ("INSERT { <test://journal-replay/01> a nco:Contact. }")
+
+ emails = self.tracker.count_instances ("nmo:Email")
+ ie = self.tracker.count_instances ("nie:InformationElement")
+ contacts = self.tracker.count_instances ("nco:Contact")
+
+ self.system.tracker_store_corrupt_dbs ()
+ self.system.tracker_store_brutal_restart ()
+ ## Start it twice... the first time it detects the broken DB and aborts
+ self.system.tracker_store_brutal_restart ()
+
+ emails_now = self.tracker.count_instances ("nmo:Email")
+ ie_now = self.tracker.count_instances ("nie:InformationElement")
+ contacts_now = self.tracker.count_instances ("nco:Contact")
+
+ self.assertEquals (emails, emails_now)
+ self.assertEquals (ie, ie_now)
+ self.assertEquals (contacts, contacts_now)
+
+ self.tracker.update ("DELETE { <test://journal-replay/01> a rdfs:Resource. }")
+
+ def test_journal_02 (self) :
+ """
+ Journal replaying when the DB disappears
+
+ Insert few data (to have more than the pre-defined instances)
+ Check instances of different classes
+ Remove the DB
+ Restart the daemon
+ Check instances of different classes
+ """
+ self.tracker.update ("INSERT { <test://journal-replay/02> a nco:Contact. }")
+
+ emails = self.tracker.count_instances ("nmo:Email")
+ ie = self.tracker.count_instances ("nie:InformationElement")
+ contacts = self.tracker.count_instances ("nco:Contact")
+
+ self.system.tracker_store_remove_dbs ()
+ self.system.tracker_store_brutal_restart ()
+
+ emails_now = self.tracker.count_instances ("nmo:Email")
+ ie_now = self.tracker.count_instances ("nie:InformationElement")
+ contacts_now = self.tracker.count_instances ("nco:Contact")
+
+ self.assertEquals (emails, emails_now)
+ self.assertEquals (ie, ie_now)
+ self.assertEquals (contacts, contacts_now)
+
+ self.tracker.update ("DELETE { <test://journal-replay/02> a rdfs:Resource. }")
+
+if __name__ == "__main__":
+ ut.main()
+
+
+
diff --git a/tests/functional-tests/300-miner-basic-ops.py b/tests/functional-tests/300-miner-basic-ops.py
new file mode 100755
index 0000000..aaa0f86
--- /dev/null
+++ b/tests/functional-tests/300-miner-basic-ops.py
@@ -0,0 +1,275 @@
+#!/usr/bin/python
+
+# Copyright (C) 2010, Nokia (ivan frade nokia com)
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+# Boston, MA 02110-1301, USA.
+
+#
+# TODO:
+# These tests are for files... we need to write them for folders!
+#
+"""
+Monitor a test directory and copy/move/remove/update files and folders there.
+Check the basic data of the files is updated accordingly in tracker.
+"""
+import os
+import shutil
+import time
+
+import unittest2 as ut
+from common.utils.minertest import CommonTrackerMinerTest, BASEDIR, uri, path
+
+class MinerCrawlTest (CommonTrackerMinerTest):
+ """
+ Test cases to check if miner is able to monitor files that are created, deleted or moved
+ """
+ def __get_text_documents (self):
+ return self.tracker.query ("""
+ SELECT ?url WHERE {
+ ?u a nfo:TextDocument ;
+ nie:url ?url.
+ }
+ """)
+
+
+ """
+ Boot the miner with the correct configuration and check everything is fine
+ """
+ def test_01_initial_crawling (self):
+ """
+ The precreated files and folders should be there
+ """
+ # Maybe the information hasn't been committed yet
+ time.sleep (1)
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 3)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+
+ # We don't check (yet) folders, because Applications module is injecting results
+
+
+## class copy(TestUpdate):
+## FIXME all tests in one class because the miner-fs restarting takes some time (~5 sec)
+## Maybe we can move the miner-fs initialization to setUpModule and then move these
+## tests to different classes
+
+ def test_02_copy_from_unmonitored_to_monitored (self):
+ """
+ Copy an file from unmonitored directory to monitored directory
+ and verify if data base is updated accordingly
+ """
+ source = os.path.join (BASEDIR, "test-no-monitored", "file0.txt")
+ dest = os.path.join (BASEDIR, "test-monitored", "file0.txt")
+ shutil.copyfile (source, dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ # verify if miner indexed this file.
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 4)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/file0.txt"), unpacked_result)
+
+ # Clean the new file so the test directory is as before
+ print "Remove and wait"
+ os.remove (dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ def test_03_copy_from_monitored_to_unmonitored (self):
+ """
+ Copy an file from a monitored location to an unmonitored location
+ Nothing should change
+ """
+
+ # Copy from monitored to unmonitored
+ source = os.path.join (BASEDIR, "test-monitored", "file1.txt")
+ dest = os.path.join (BASEDIR, "test-no-monitored", "file1.txt")
+ shutil.copyfile (source, dest)
+
+ time.sleep (1)
+ # Nothing changed
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 3)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+
+ # Clean the file
+ os.remove (dest)
+
+ def test_04_copy_from_monitored_to_monitored (self):
+ """
+ Copy a file between monitored directories
+ """
+ source = os.path.join (BASEDIR, "test-monitored", "file1.txt")
+ dest = os.path.join (BASEDIR, "test-monitored", "dir1", "dir2", "file-test04.txt")
+ shutil.copyfile (source, dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 4)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file-test04.txt"), unpacked_result)
+
+ # Clean the file
+ os.remove (dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+ self.assertEquals (3, self.tracker.count_instances ("nfo:TextDocument"))
+
+
+ def test_05_move_from_unmonitored_to_monitored (self):
+ """
+ Move a file from unmonitored to monitored directory
+ """
+ source = os.path.join (BASEDIR, "test-no-monitored", "file0.txt")
+ dest = os.path.join (BASEDIR, "test-monitored", "dir1", "file-test05.txt")
+ shutil.move (source, dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 4)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/file-test05.txt"), unpacked_result)
+
+ # Clean the file
+ os.remove (dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+ self.assertEquals (3, self.tracker.count_instances ("nfo:TextDocument"))
+
+## """ move operation and tracker-miner response test cases """
+## class move(TestUpdate):
+
+
+ def test_06_move_from_monitored_to_unmonitored (self):
+ """
+ Move a file from monitored to unmonitored directory
+ """
+ source = os.path.join (BASEDIR, "test-monitored", "dir1", "file2.txt")
+ dest = os.path.join (BASEDIR, "test-no-monitored", "file2.txt")
+ shutil.move (source, dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 2)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+
+ # Restore the file
+ shutil.move (dest, source)
+ self.system.tracker_miner_fs_wait_for_idle ()
+ self.assertEquals (3, self.tracker.count_instances ("nfo:TextDocument"))
+
+
+ def test_07_move_from_monitored_to_monitored (self):
+ """
+ Move a file between monitored directories
+ """
+ source = os.path.join (BASEDIR, "test-monitored", "dir1", "file2.txt")
+ dest = os.path.join (BASEDIR, "test-monitored", "file2.txt")
+ shutil.move (source, dest)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 3)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/file2.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+
+ # Restore the file
+ shutil.move (dest, source)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 3)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
+
+
+ def test_08_deletion_single_file (self):
+ """
+ Delete one of the files
+ """
+ victim = os.path.join (BASEDIR, "test-monitored", "dir1", "file2.txt")
+ os.remove (victim)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 2)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+
+ # Restore the file
+ f = open (victim, "w")
+ f.write ("Don't panic, everything is fine")
+ f.close ()
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ def test_09_deletion_directory (self):
+ """
+ Delete a directory
+ """
+ victim = os.path.join (BASEDIR, "test-monitored", "dir1")
+ shutil.rmtree (victim)
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 1)
+ unpacked_result = [ r[0] for r in result]
+ self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+
+ # Restore the dirs
+ # Wait after each operation to be sure of the results
+ os.makedirs (os.path.join (BASEDIR, "test-monitored", "dir1"))
+ self.system.tracker_miner_fs_wait_for_idle ()
+ os.makedirs (os.path.join (BASEDIR, "test-monitored", "dir1", "dir2"))
+ self.system.tracker_miner_fs_wait_for_idle ()
+ for f in ["test-monitored/dir1/file2.txt",
+ "test-monitored/dir1/dir2/file3.txt"]:
+ filename = os.path.join (BASEDIR, f)
+ writer = open (filename, "w")
+ writer.write ("Don't panic, everything is fine")
+ writer.close ()
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ # Wait a bit more... some time one idle is not enough
+ self.system.tracker_miner_fs_wait_for_idle (3)
+
+ # Check everything is fine
+ result = self.__get_text_documents ()
+ self.assertEquals (len (result), 3)
+
+if __name__ == "__main__":
+ print """
+ Tests for Copy/move/delete operations of FILES between monitored/unmonitored locations.
+
+ We need to do the same for DIRECTORIES!
+ """
+ ut.main()
diff --git a/tests/functional-tests/310-fts-indexing.py b/tests/functional-tests/310-fts-indexing.py
new file mode 100755
index 0000000..334ab88
--- /dev/null
+++ b/tests/functional-tests/310-fts-indexing.py
@@ -0,0 +1,308 @@
+#!/usr/bin/python
+#-*- coding: utf-8 -*-
+
+# Copyright (C) 2010, Nokia (ivan frade nokia com)
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+# Boston, MA 02110-1301, USA.
+
+#
+# TODO:
+# These tests are for files... we need to write them for folders!
+#
+"""
+Monitor a directory, copy/move/remove/update text files and check that
+the text contents are updated accordingly in the indexes.
+"""
+import os
+import shutil
+import locale
+
+import unittest2 as ut
+from common.utils.minertest import CommonTrackerMinerTest, BASEDIR, uri, path, DEFAULT_TEXT
+from common.utils import configuration as cfg
+
+class CommonMinerFTS (CommonTrackerMinerTest):
+ """
+ Superclass to share methods. Shouldn't be run by itself.
+ """
+ def setUp (self):
+ self.testfile = "test-monitored/miner-fts-test.txt"
+ if os.path.exists (path (self.testfile)):
+ os.remove (path (self.testfile))
+ # Shouldn't we wait here for the miner to idle? (it works without it)
+
+ def tearDown (self):
+ #if os.path.exists (path (self.testfile)):
+ # os.remove (path (self.testfile))
+ pass
+
+ def set_text (self, text):
+ f = open (path (self.testfile), "w")
+ f.write (text)
+ f.close ()
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ def search_word (self, word):
+ """
+ Return list of URIs with the word in them
+ """
+ print word
+ results = self.tracker.query ("""
+ SELECT ?url WHERE {
+ ?u a nfo:TextDocument ;
+ nie:url ?url ;
+ fts:match '%s'.
+ }
+ """ % (word))
+ return [r[0] for r in results]
+
+ def basic_test (self, text, word):
+ """
+ Save the text on the testfile, search the word
+ and assert the testfile is only result.
+
+ Be careful with the default contents of the text files
+ ( see common/utils/minertest.py DEFAULT_TEXT )
+ """
+ self.set_text (text)
+ results = self.search_word (word)
+ self.assertEquals (len (results), 1)
+ self.assertIn ( uri (self.testfile), results)
+
+
+
+class MinerFTSBasicTest (CommonMinerFTS):
+ """
+ Tests different contents in a single file
+ """
+
+ def test_01_single_word (self):
+ TEXT = "automobile"
+ self.basic_test (TEXT, TEXT)
+
+ def test_02_multiple_words (self):
+ TEXT = "automobile with unlimited power"
+ self.set_text (TEXT)
+
+ results = self.search_word ("automobile")
+ self.assertEquals (len (results), 1)
+ self.assertIn (uri (self.testfile), results)
+
+ results = self.search_word ("unlimited")
+ self.assertEquals (len (results), 1)
+ self.assertIn (uri (self.testfile), results)
+
+
+ def test_03_long_word (self):
+ # TEXT is longer than the 20 characters specified in the fts configuration
+ TEXT = "fsfsfsdfskfweeqrewqkmnbbvkdasdjefjewriqjfnc"
+ self.set_text (TEXT)
+
+ results = self.search_word (TEXT)
+ self.assertEquals (len (results), 0)
+
+ def test_04_non_existent_word (self):
+ TEXT = "This a trick"
+ self.set_text (TEXT)
+ results = self.search_word ("trikc")
+ self.assertEquals (len (results), 0)
+
+
+ def test_05_word_in_multiple_files (self):
+ # Safeguard, in the case we modify the DEFAULT_TEXT later...
+ assert "content" in DEFAULT_TEXT
+
+ self.set_text (DEFAULT_TEXT)
+ results = self.search_word ("content")
+ self.assertEquals (len (results), 4)
+ self.assertIn ( uri (self.testfile), results)
+ self.assertIn ( uri ("test-monitored/file1.txt"), results)
+ self.assertIn ( uri ("test-monitored/dir1/file2.txt"), results)
+ self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), results)
+
+ def test_06_word_multiple_times_in_file (self):
+ TEXT = "automobile is red. automobile is big. automobile is great!"
+ self.basic_test (TEXT, "automobile")
+
+ def test_07_sentence (self):
+ TEXT = "plastic is fantastic"
+ self.basic_test (TEXT, TEXT)
+
+ def test_08_partial_sentence (self):
+ TEXT = "plastic is fantastic"
+ self.basic_test (TEXT, "is fantastic")
+
+ def test_09_strange_word (self):
+ # FIXME Not sure what are we testing here
+ TEXT = "'summer.time'"
+ self.basic_test (TEXT, "summer.time")
+
+ # Skip the test 'search for .'
+
+ def test_10_mixed_letters_and_numbers (self):
+ TEXT = "abc123"
+ self.basic_test (TEXT, "abc123")
+
+ def test_11_ignore_numbers (self):
+ TEXT = "palabra 123123"
+ self.set_text (TEXT)
+ results = self.search_word ("123123")
+ self.assertEquals (len (results), 0)
+
+
+class MinerFTSFileOperationsTest (CommonMinerFTS):
+ """
+ Move, update, delete the files and check the text indexes are updated accordingly.
+ """
+
+ def test_01_removal_of_file (self):
+ """
+ When removing the file, its text contents disappear from the index
+ """
+ TEXT = "automobile is red and big and whatnot"
+ self.basic_test (TEXT, "automobile")
+
+ os.remove ( path (self.testfile))
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ results = self.search_word ("automobile")
+ self.assertEquals (len (results), 0)
+
+ def test_02_empty_the_file (self):
+ """
+ Emptying the file, the indexed words are also removed
+ """
+ TEXT = "automobile is red and big and whatnot"
+ self.basic_test (TEXT, "automobile")
+
+ self.set_text ("")
+ results = self.search_word ("automobile")
+ self.assertEquals (len (results), 0)
+
+ def test_03_update_the_file (self):
+ """
+ Changing the contents of the file, updates the index
+ """
+ TEXT = "automobile is red and big and whatnot"
+ self.basic_test (TEXT, "automobile")
+
+ self.set_text ("airplane is blue and small and wonderful")
+ results = self.search_word ("automobile")
+ self.assertEquals (len (results), 0)
+
+ results = self.search_word ("airplane")
+ self.assertEquals (len (results), 1)
+
+ # Skip the test_text_13... feel, feet, fee in three diff files and search feet
+
+ def __recreate_file (self, filename, content):
+ if os.path.exists (filename):
+ os.remove (filename)
+
+ f = open (filename, "w")
+ f.write (content)
+ f.close ()
+
+
+ def test_04_on_unmonitored_file (self):
+ """
+ Set text in an unmonitored file. There should be no results.
+ """
+ TEXT = "automobile is red"
+
+ TEST_15_FILE = "test-no-monitored/fts-indexing-test-15.txt"
+ self.__recreate_file (path (TEST_15_FILE), TEXT)
+
+ results = self.search_word ("automobile")
+ self.assertEquals (len (results), 0)
+
+ os.remove (path (TEST_15_FILE))
+
+ def test_05_move_file_unmonitored_monitored (self):
+ """
+ Move file from unmonitored location to monitored location and index should be updated
+ """
+
+ # Maybe the miner hasn't finished yet with the setUp deletion!
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ TEXT = "airplane is beautiful"
+ TEST_16_SOURCE = "test-no-monitored/fts-indexing-text-16.txt"
+ TEST_16_DEST = "test-monitored/fts-indexing-text-16.txt"
+
+ self.__recreate_file (path (TEST_16_SOURCE), TEXT)
+
+ results = self.search_word ("airplane")
+ self.assertEquals (len (results), 0)
+
+ shutil.copyfile ( path (TEST_16_SOURCE), path (TEST_16_DEST))
+ self.system.tracker_miner_fs_wait_for_idle ()
+
+ results = self.search_word ("airplane")
+ self.assertEquals (len (results), 1)
+
+ os.remove ( path (TEST_16_SOURCE))
+ os.remove ( path (TEST_16_DEST))
+
+ # skip test for a file in a hidden directory
+
+class MinerFTSStopwordsTest (CommonMinerFTS):
+ """
+ Search for stopwords in a file
+ """
+
+ def __get_some_stopwords (self):
+
+ langcode, encoding = locale.getdefaultlocale ()
+ if "_" in langcode:
+ langcode = langcode.split ("_")[0]
+
+ stopwordsfile = os.path.join (cfg.DATADIR, "tracker", "languages", "stopwords." + langcode)
+
+ if not os.path.exists (stopwordsfile):
+ self.skipTest ("No stopwords for the current locale ('%s' doesn't exist)" % (stopwordsfile))
+ return []
+
+ stopwords = []
+ counter = 0
+ for line in open (stopwordsfile, "r"):
+ if len (line) > 4:
+ stopwords.append (line[:-1])
+ counter += 1
+
+ if counter > 5:
+ break
+
+ return stopwords
+
+ def test_01_stopwords (self):
+ stopwords = self.__get_some_stopwords ()
+ TEXT = " ".join (["this a completely normal text automobile"] + stopwords)
+
+ self.set_text (TEXT)
+ results = self.search_word ("automobile")
+ self.assertEquals (len (results), 1)
+ print stopwords
+ for i in range (0, len (stopwords)):
+ results = self.search_word (stopwords[i])
+ self.assertEquals (len (results), 0)
+
+ ## FIXME add all the special character tests!
+ ## http://git.gnome.org/browse/tracker/commit/?id=81c0d3bd754a6b20ac72323481767dc5b4a6217b
+
+
+if __name__ == "__main__":
+ ut.main ()
diff --git a/tests/functional-tests/400-extractor.py b/tests/functional-tests/400-extractor.py
new file mode 100755
index 0000000..6e3f6d1
--- /dev/null
+++ b/tests/functional-tests/400-extractor.py
@@ -0,0 +1,203 @@
+#!/usr/bin/python
+#
+# Copyright (C) 2010, Nokia <ivan frade nokia com>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+"""
+For a collection of files, call the extractor and check that the expected
+metadata is extracted. Load dynamically the test information from a data
+directory (containing xxx.expected files)
+"""
+from common.utils import configuration as cfg
+from common.utils.helpers import ExtractorHelper
+import unittest2 as ut
+import os
+import types
+import sys
+
+import ConfigParser
+
+class ExtractionTestCase (ut.TestCase):
+ """
+ Test checks if the tracker extractor is able to retrieve metadata
+ """
+ def __init__ (self, methodName='runTest', descfile=None):
+ """
+ Descfile is the description file in a relative path
+ """
+ ut.TestCase.__init__ (self, methodName)
+
+ # Load the description file
+ assert descfile
+ self.rel_description = descfile
+ self.configParser = self.__load_description_file (self.rel_description)
+
+ # Add a method to the class called after the description file
+ methodName = self.rel_description.lower()[:-len(".expected")].replace (" ", "_")[-60:]
+
+ if (self.__is_expected_failure ()):
+ setattr (self,
+ methodName,
+ self.expected_failure_test_extraction)
+ else:
+ setattr (self,
+ methodName,
+ self.generic_test_extraction)
+
+ # unittest framework will run the test called "self._testMethodName"
+ # So we set that variable to our new name
+ self._testMethodName = methodName
+
+ def runTest (self):
+ """
+ Empty function pointer, that should NEVER be called. It is required to exist by unittest.
+ """
+ assert False
+
+ def __load_description_file (self, descfile):
+ configParser = ConfigParser.RawConfigParser ()
+ # Make it case sensitive:
+ configParser.optionxform = str
+
+ abs_description = os.path.abspath (descfile)
+ loaded_files = configParser.read (abs_description)
+ if not abs_description in loaded_files:
+ raise Exception("Unable to load %s" % (abs_description))
+
+ return configParser
+
+ def __is_expected_failure (self):
+ assert self.configParser
+ return self.configParser.has_option ("TestFile", "ExpectedFailure")
+
+ def __get_bugnumber (self):
+ assert self.configParser
+ if self.configParser.has_option ("TestFile", "Bugzilla"):
+ return "'" + self.configParser.get ("TestFile", "Bugzilla") + "'"
+ else:
+ return None
+
+
+
+ def setUp (self):
+ self.extractor = ExtractorHelper ()
+
+ def expected_failure_test_extraction (self):
+ try:
+ self.generic_test_extraction ()
+ except Exception:
+ raise ut.case._ExpectedFailure(sys.exc_info())
+
+ if self.__get_bugnumber ():
+ raise Exception ("Unexpected success. Maybe bug: " + self.__get_bugnumber () + " has been fixed?")
+ else:
+ raise Exception ("Unexpected success. Check " + self.rel_description)
+
+ def generic_test_extraction (self):
+ abs_description = os.path.abspath (self.rel_description)
+
+ # Filename contains the file to extract, in a relative path to the description file
+ desc_root, desc_file = os.path.split (abs_description)
+ self.file_to_extract = ""
+ try:
+ self.file_to_extract = os.path.join (desc_root, self.configParser.get ("TestFile", "Filename"))
+ except Exception, e:
+ self.fail ("%s in %s"
+ % (e, abs_description))
+ result = self.extractor.get_metadata ("file://" + self.file_to_extract, "")
+
+ self.__assert_extraction_ok (result)
+
+
+ def assertDictHasKey (self, d, key, msg=None):
+ if not d.has_key (key):
+ standardMsg = "Missing: %s\n" % (key)
+ self.fail (self._formatMessage (msg, standardMsg))
+ else:
+ return
+
+ def __assert_extraction_ok (self, result):
+ self.__check_section ("Metadata", result)
+
+ if (cfg.haveMaemo and self.configParser.has_section ("Meego")):
+ self.__check_section ("Meego", result)
+
+
+ def __check_section (self, section, result):
+ error_missing_prop = "Property '%s' hasn't been extracted from file \n'%s'\n (requested on '%s' [%s])"
+ error_wrong_value = "on property '%s' from file %s\n (requested on: '%s' [%s])"
+ error_extra_prop = "Property '%s' was explicitely banned for file \n'%s'\n (requested on '%s' [%s])"
+ error_extra_prop_v = "Property '%s' with value '%s' was explicitely banned for file \n'%s'\n (requested on %s' [%s])"
+
+ expected_pairs = [ (k.replace ("_", ":"), v)
+ for (k,v) in self.configParser.items (section)
+ if not k.startswith ("!")]
+ unexpected_pairs = [ (k[1:].replace ("_", ":"), v)
+ for (k,v) in self.configParser.items (section)
+ if k.startswith ("!")]
+
+ for (prop, value) in expected_pairs:
+ self.assertDictHasKey (result, prop,
+ error_missing_prop % (prop,
+ self.file_to_extract,
+ self.rel_description,
+ section))
+ self.assertIn (value, result [prop],
+ error_wrong_value % (prop,
+ self.file_to_extract,
+ self.rel_description,
+ section))
+
+ for (prop, value) in unexpected_pairs:
+ # There is no prop, or it is but not with that value
+ if (value == ""):
+ self.assertFalse (result.has_key (prop), error_extra_prop % (prop,
+ self.file_to_extract,
+ self.rel_description,
+ section))
+ else:
+ self.assertNotIn (value, result [prop], error_extra_prop_v % (prop,
+ value,
+ self.file_to_extract,
+ self.rel_description,
+ section))
+
+
+if __name__ == "__main__":
+ ##
+ # Traverse the TEST_DATA_PATH directory looking for .description files
+ # Add a new TestCase to the suite per .description file and run the suite.
+ #
+ # Is we do this inside a single TestCase an error in one test would stop the whole
+ # testing.
+ ##
+ if (os.path.exists (os.getcwd() + "/test-extraction-data")):
+ # Use local directory if available
+ TEST_DATA_PATH = os.getcwd() + "/test-extraction-data"
+ else:
+ TEST_DATA_PATH = os.path.join (cfg.DATADIR, "tracker-tests",
+ "test-extraction-data")
+ print "Loading test descriptions from", TEST_DATA_PATH
+ extractionTestSuite = ut.TestSuite ()
+ for root, dirs, files in os.walk (TEST_DATA_PATH):
+ descriptions = [os.path.join (root, f) for f in files if f.endswith ("expected")]
+ for descfile in descriptions:
+ tc = ExtractionTestCase(descfile=descfile)
+ extractionTestSuite.addTest(tc)
+ result = ut.TextTestRunner (verbosity=1).run (extractionTestSuite)
+ sys.exit(not result.wasSuccessful())
+
diff --git a/tests/functional-tests/500-writeback.py b/tests/functional-tests/500-writeback.py
new file mode 100755
index 0000000..620c7e5
--- /dev/null
+++ b/tests/functional-tests/500-writeback.py
@@ -0,0 +1,251 @@
+#!/usr/bin/python
+
+# Copyright (C) 2010, Nokia (ivan frade nokia com)
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+# Boston, MA 02110-1301, USA.
+#
+"""
+Write values in tracker and check the actual values are written
+on the files. Note that these tests are highly platform dependant.
+"""
+import sys, os, dbus
+import time
+import shutil
+
+from common.utils.system import TrackerSystemAbstraction
+from common.utils.helpers import StoreHelper, ExtractorHelper
+from common.utils import configuration as cfg
+import unittest2 as ut
+from common.utils.expectedFailure import expectedFailureBug
+
+BASEDIR = os.environ['HOME']
+REASONABLE_TIMEOUT = 5 # Seconds we wait for tracker-writeback to do the work
+
+def uri (filename):
+ return "file://" + os.path.join (BASEDIR, filename)
+
+
+class CommonTrackerWritebackTest (ut.TestCase):
+ """
+ Superclass to share methods. Shouldn't be run by itself.
+ """
+
+ @classmethod
+ def __prepare_directories (self):
+ #
+ # ~/test-writeback-monitored/
+ # ~/test-writeback-no-monitored/
+ #
+
+ for d in ["test-writeback-monitored",
+ "test-writeback-no-monitored"]:
+ directory = os.path.join (BASEDIR, d)
+ if (os.path.exists (directory)):
+ shutil.rmtree (directory)
+ os.makedirs (directory)
+
+
+ if (os.path.exists (os.getcwd() + "/test-writeback-data")):
+ # Use local directory if available
+ datadir = os.getcwd() + "/test-writeback-data"
+ else:
+ datadir = os.path.join (cfg.DATADIR, "tracker-tests",
+ "test-writeback-data")
+
+ for root, dirs, testfile in os.walk (datadir):
+
+ def is_valid_file (f):
+ return not (tf.endswith ("~") or tf.startswith ("Makefile"))
+
+ valid_files = [os.path.join (root, tf) for tf in testfile if is_valid_file (tf)]
+ for f in valid_files:
+ print "Copying", f, os.path.join (BASEDIR, "test-writeback-monitored")
+ shutil.copy (f, os.path.join (BASEDIR, "test-writeback-monitored"))
+
+
+ @classmethod
+ def setUpClass (self):
+ #print "Starting the daemon in test mode"
+ self.__prepare_directories ()
+
+ self.system = TrackerSystemAbstraction ()
+
+ if (os.path.exists (os.getcwd() + "/test-configurations/writeback")):
+ # Use local directory if available
+ confdir = os.getcwd() + "/test-configurations/writeback"
+ else:
+ confdir = os.path.join (cfg.DATADIR, "tracker-tests",
+ "test-configurations", "writeback")
+ self.system.tracker_writeback_testing_start (confdir)
+ # Returns when ready
+ print "Ready to go!"
+
+ @classmethod
+ def tearDownClass (self):
+ #print "Stopping the daemon in test mode (Doing nothing now)"
+ self.system.tracker_writeback_testing_stop ()
+
+
+class WritebackMonitoredTest (CommonTrackerWritebackTest):
+ """
+ Write in tracker store the properties witih writeback support and check
+ that the new values are actually in the file
+ """
+ def setUp (self):
+ self.tracker = StoreHelper ()
+ self.extractor = ExtractorHelper ()
+
+ def tearDown (self):
+ # Give it more time between tests to avoid random failures?
+ pass
+
+ def __clean_property (self, property_name, fileuri, expectFailure=True):
+ """
+ Remove the property for the fileuri (file://...)
+ """
+ CLEAN = """
+ DELETE { ?u %s ?whatever }
+ WHERE {
+ ?u nie:url '%s' ;
+ %s ?whatever .
+
+ }
+ """
+ try:
+ self.tracker.update (CLEAN % (property_name, fileuri, property_name))
+ except Exception, e:
+ print e
+ assert expectFailure
+
+
+ def __writeback_test (self, filename, mimetype, prop, expectedKey=None):
+ """
+ Set a value in @prop for the @filename. Then ask tracker-extractor
+ for metadata and check in the results dictionary if the property is there.
+
+ Note: given the special translation of some property-names in the dictionary
+ with extracted metadata, there is an optional parameter @expectedKey
+ to specify what property to check in the dictionary. If None, then
+ the @prop is used.
+ """
+
+ TEST_VALUE = prop.replace (":","") + "test"
+ SPARQL_TMPL = """
+ INSERT { ?u %s '%s' }
+ WHERE { ?u nie:url '%s' }
+ """
+ self.__clean_property (prop, uri(filename))
+ self.tracker.update (SPARQL_TMPL % (prop, TEST_VALUE, uri(filename)))
+
+ # There is no way to know when the operation is finished
+ time.sleep (REASONABLE_TIMEOUT)
+
+ results = self.extractor.get_metadata (uri (filename), mimetype)
+ keyDict = expectedKey or prop
+ self.assertEquals (results[keyDict][0], TEST_VALUE)
+ self.__clean_property (prop, uri(filename), False)
+
+
+ def __writeback_hasTag_test (self, filename, mimetype):
+
+ SPARQL_TMPL = """
+ INSERT {
+ <test://writeback-hasTag-test/1> a nao:Tag ;
+ nao:prefLabel "testTag" .
+
+ ?u nao:hasTag <test://writeback-hasTag-test/1> .
+ } WHERE {
+ ?u nie:url '%s' .
+ }
+ """
+
+ CLEAN_VALUE = """
+ DELETE {
+ <test://writeback-hasTag-test/1> a rdfs:Resource.
+ ?u nao:hasTag <test://writeback-hasTag-test/1> .
+ } WHERE {
+ ?u nao:hasTag <test://writeback-hasTag-test/1> .
+ }
+ """
+
+ self.tracker.update (SPARQL_TMPL % (uri (filename)))
+
+ time.sleep (REASONABLE_TIMEOUT)
+
+ results = self.extractor.get_metadata (uri (filename), mimetype)
+ self.assertIn ("testTag", results ["nao:hasTag:prefLabel"])
+
+
+ # JPEG test
+ def test_001_jpeg_title (self):
+ FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
+ self.__writeback_test (FILENAME, "image/jpeg", "nie:title")
+
+ def test_002_jpeg_description (self):
+ FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
+ self.__writeback_test (FILENAME, "image/jpeg", "nie:description")
+
+ def test_003_jpeg_keyword (self):
+ FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
+ self.__writeback_test (FILENAME, "image/jpeg", "nie:keyword", "nao:hasTag:prefLabel")
+
+ def test_004_jpeg_hasTag (self):
+ FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
+ self.__writeback_hasTag_test (FILENAME, "image/jpeg")
+
+
+ # TIFF tests
+ def test_011_tiff_title (self):
+ FILENAME = "test-writeback-monitored/writeback-test-2.tif"
+ self.__writeback_test (FILENAME, "image/tiff", "nie:title")
+
+ def test_012_tiff_description (self):
+ FILENAME = "test-writeback-monitored/writeback-test-2.tif"
+ self.__writeback_test (FILENAME, "image/tiff", "nie:description")
+
+ def test_013_tiff_keyword (self):
+ FILENAME = "test-writeback-monitored/writeback-test-2.tif"
+ self.__writeback_test (FILENAME, "image/tiff", "nie:keyword", "nao:hasTag:prefLabel")
+
+ def test_014_tiff_hasTag (self):
+ FILENAME = "test-writeback-monitored/writeback-test-2.tif"
+ self.__writeback_hasTag_test (FILENAME, "image/tiff")
+
+
+
+ # PNG tests
+ ## @expectedFailureBug ("NB#185070")
+ def test_021_png_title (self):
+ FILENAME = "test-writeback-monitored/writeback-test-4.png"
+ self.__writeback_test (FILENAME, "image/png", "nie:title")
+
+ @expectedFailureBug ("NB#185070")
+ def test_022_png_description (self):
+ FILENAME = "test-writeback-monitored/writeback-test-4.png"
+ self.__writeback_test (FILENAME, "image/png", "nie:description")
+
+ @expectedFailureBug ("NB#185070")
+ def test_023_png_keyword (self):
+ FILENAME = "test-writeback-monitored/writeback-test-4.png"
+ self.__writeback_test (FILENAME, "image/png", "nie:keyword", "nao:hasTag:prefLabel")
+
+ @expectedFailureBug("NB#185070")
+ def test_024_png_hasTag (self):
+ FILENAME = "test-writeback-monitored/writeback-test-4.png"
+ self.__writeback_hasTag_test (FILENAME, "image/png")
+
+if __name__ == "__main__":
+ ut.main ()
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]