[tracker/sam/functional-test-runner: 1/15] functional-tests: Reformat code to correspond to consistent style



commit d0366ae3955b63ea86a8179986a066912d52da20
Author: Sam Thursfield <sam afuera me uk>
Date:   Mon Dec 29 17:46:46 2014 +0000

    functional-tests: Reformat code to correspond to consistent style
    
    The standard coding style for Python is PEP-8. I used the 'autopep8'
    tool to reformat all the tests to conform to PEP-8 style.
    
    There should be no functional changes.
    
    PEP-8: https://www.python.org/dev/peps/pep-0008/
    autopep8: https://pypi.python.org/pypi/autopep8

 tests/functional-tests/01-insertion.py             |  929 +++---
 tests/functional-tests/02-sparql-bugs.py           |  215 +-
 tests/functional-tests/03-fts-functions.py         |   37 +-
 tests/functional-tests/04-group-concat.py          |   36 +-
 tests/functional-tests/05-coalesce.py              |   40 +-
 tests/functional-tests/06-distance.py              |   72 +-
 tests/functional-tests/07-graph.py                 |   27 +-
 tests/functional-tests/08-unique-insertions.py     |   22 +-
 tests/functional-tests/09-concurrent-query.py      |   85 +-
 tests/functional-tests/10-sqlite-misused.py        |   64 +-
 tests/functional-tests/11-sqlite-batch-misused.py  |   74 +-
 tests/functional-tests/12-transactions.py          |   46 +-
 tests/functional-tests/13-threaded-store.py        |   92 +-
 tests/functional-tests/14-signals.py               |  141 +-
 tests/functional-tests/15-statistics.py            |   98 +-
 tests/functional-tests/16-collation.py             |   66 +-
 tests/functional-tests/17-ontology-changes.py      |  743 +++--
 tests/functional-tests/200-backup-restore.py       |  565 ++--
 tests/functional-tests/300-miner-basic-ops.py      |  317 +-
 .../functional-tests/301-miner-resource-removal.py |   72 +-
 tests/functional-tests/310-fts-indexing.py         |  286 +-
 tests/functional-tests/400-extractor.py            |  244 +-
 tests/functional-tests/500-writeback.py            |  121 +-
 tests/functional-tests/501-writeback-details.py    |   63 +-
 tests/functional-tests/600-applications-camera.py  |  182 +-
 tests/functional-tests/601-applications-sync.py    |   39 +-
 .../common/utils/applicationstest.py               |   85 +-
 tests/functional-tests/common/utils/dconf.py       |   18 +-
 .../common/utils/expectedFailure.py                |   44 +-
 tests/functional-tests/common/utils/extractor.py   |  109 +-
 tests/functional-tests/common/utils/helpers.py     |  422 ++--
 tests/functional-tests/common/utils/html.py        |  102 +-
 tests/functional-tests/common/utils/minertest.py   |   36 +-
 tests/functional-tests/common/utils/options.py     |    8 +-
 tests/functional-tests/common/utils/storetest.py   |   28 +-
 tests/functional-tests/common/utils/system.py      |  215 +-
 .../functional-tests/common/utils/writebacktest.py |   78 +-
 tests/functional-tests/create-tests-aegis.py       |   17 +-
 tests/functional-tests/create-tests-xml.py         |   46 +-
 tests/functional-tests/fts-tc.py                   |  919 +++---
 tests/functional-tests/mass-storage-mode.py        |  920 +++---
 tests/functional-tests/performance-tc-modified.py  | 3269 ++++++++++----------
 tests/functional-tests/performance-tc.py           | 1218 ++++----
 tests/functional-tests/unittest2/__init__.py       |    4 +-
 tests/functional-tests/unittest2/case.py           |  178 +-
 tests/functional-tests/unittest2/compatibility.py  |   24 +-
 tests/functional-tests/unittest2/loader.py         |   40 +-
 tests/functional-tests/unittest2/main.py           |   41 +-
 tests/functional-tests/unittest2/result.py         |   14 +-
 tests/functional-tests/unittest2/runner.py         |   21 +-
 tests/functional-tests/unittest2/signals.py        |    8 +-
 tests/functional-tests/unittest2/suite.py          |   41 +-
 tests/functional-tests/unittest2/unit2.py          |    2 +-
 tests/functional-tests/unittest2/util.py           |    5 +
 tests/functional-tests/virtual-files-tc.py         |  169 +-
 55 files changed, 6534 insertions(+), 6223 deletions(-)
---
diff --git a/tests/functional-tests/01-insertion.py b/tests/functional-tests/01-insertion.py
index 8d9942e..1e25467 100755
--- a/tests/functional-tests/01-insertion.py
+++ b/tests/functional-tests/01-insertion.py
@@ -22,7 +22,9 @@
 Stand-alone tests cases for the store, inserting, removing information
 in pure sparql and checking that the data is really there
 """
-import sys,os,dbus
+import sys
+import os
+import dbus
 import unittest
 import time
 import random
@@ -34,52 +36,53 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TrackerStoreInsertionTests (CommonTrackerStoreTest):
-       """
-        Insert single and multiple-valued properties, dates (ok and broken)
-        and check the results
-       """
-
-       def test_insert_01 (self):
-               """
-                Simple insert of two triplets.
-
-                1. Insert a InformationElement with title.
-                2. TEST: Query the title of that information element
-                3. Remove the InformationElement to keep everything as it was before
-                """
 
-                uri = "tracker://test_insert_01/" + str(random.randint (0, 100))
-                insert = """
+    """
+    Insert single and multiple-valued properties, dates (ok and broken)
+    and check the results
+    """
+
+    def test_insert_01(self):
+        """
+        Simple insert of two triplets.
+
+        1. Insert a InformationElement with title.
+        2. TEST: Query the title of that information element
+        3. Remove the InformationElement to keep everything as it was before
+        """
+
+        uri = "tracker://test_insert_01/" + str(random.randint(0, 100))
+        insert = """
                 INSERT { <%s> a nie:InformationElement;
                         nie:title \"test_insert_01\". }
                 """ % (uri)
-                self.tracker.update (insert)
+        self.tracker.update(insert)
 
-               """ verify the inserted item """
-                query = """
+        """ verify the inserted item """
+        query = """
                 SELECT ?t WHERE {
                 <%s> a nie:InformationElement ;
                 nie:title ?t .
                 }
                 """ % (uri)
-                results = self.tracker.query (query)
+        results = self.tracker.query(query)
 
-                self.assertEquals (str(results[0][0]), "test_insert_01")
+        self.assertEquals(str(results[0][0]), "test_insert_01")
 
-               """ delete the inserted item """
-                delete = """
+        """ delete the inserted item """
+        delete = """
                 DELETE { <%s> a rdfs:Resource. }
                 """ % (uri)
-                self.tracker.update (delete)
-
+        self.tracker.update(delete)
 
-       def test_insert_02(self):
-                """
-                Insert of a bigger set of triplets (linking two objects)
-                """
+    def test_insert_02(self):
+        """
+        Insert of a bigger set of triplets (linking two objects)
+        """
 
-                self.tracker.update("""
+        self.tracker.update("""
                 INSERT {
                 <urn:uuid:bob-dylan> a nmm:Artist;
                    nmm:artistName 'Bob Dylan'.
@@ -95,7 +98,7 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
                    }
                    """)
 
-                QUERY = """
+        QUERY = """
                 SELECT ?uri ?title ?length  WHERE {
                     ?uri a nmm:MusicPiece ;
                          nmm:performer <urn:uuid:bob-dylan> ;
@@ -104,27 +107,26 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
                 }
                 """
 
-                result = self.tracker.query (QUERY)
-                self.assertEquals (len (result), 1)
-                self.assertEquals (len (result[0]), 3) # uri, title, length
-                self.assertEquals (result[0][0], "file:///a/b/c/10_song3.mp3")
-                self.assertEquals (result[0][1], "Subterranean homesick blues")
-                self.assertEquals (result[0][2], "219252")
+        result = self.tracker.query(QUERY)
+        self.assertEquals(len(result), 1)
+        self.assertEquals(len(result[0]), 3)  # uri, title, length
+        self.assertEquals(result[0][0], "file:///a/b/c/10_song3.mp3")
+        self.assertEquals(result[0][1], "Subterranean homesick blues")
+        self.assertEquals(result[0][2], "219252")
 
-                self.tracker.update ("""
+        self.tracker.update ("""
                 DELETE {
                    <urn:uuid:bob-dylan> a rdfs:Resource.
                    <file:///a/b/c/10_song3.mp3> a rdfs:Resource.
                 }
                 """)
 
+    def test_insert_03(self):
+        """
+        Checking all the values are inserted
+        """
 
-        def test_insert_03(self):
-                """
-                Checking all the values are inserted
-                """
-
-               self.tracker.update("""
+        self.tracker.update("""
                 INSERT {
                 <urn:uuid:7646004> a nmm:Artist;
                     nmm:artistName 'John Lennon' .
@@ -145,7 +147,7 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
 
                     """)
 
-                QUERY = """
+        QUERY = """
                 SELECT ?artist ?length ?trackN ?album ?size ?flm ?fc ?filename  WHERE {
                     <file:///a/b/c/imagine.mp3> a nmm:MusicPiece ;
                         nmm:performer ?x ;
@@ -161,21 +163,21 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
                     ?y nmm:albumTitle ?album.
                     }
                     """
-                result = self.tracker.query(QUERY)
-
-                self.assertEquals (len (result), 1)
-                self.assertEquals (len (result[0]), 8)
-                self.assertEquals (result[0][0], "John Lennon")
-                self.assertEquals (result[0][1], "219252")
-                self.assertEquals (result[0][2], "11")
-                self.assertEquals (result[0][3], "Imagine")
-                self.assertEquals (result[0][4], "17630")
-                # FIXME Tracker returns this translated to the current timezone
-                #self.assertEquals (result[0][5], "2008-12-23T11:47:02Z")
-                #self.assertEquals (result[0][6], "2008-12-16T10:41:20Z")
-                self.assertEquals (result[0][7], "imagine.mp3")
-
-                self.tracker.update ("""
+        result = self.tracker.query(QUERY)
+
+        self.assertEquals(len(result), 1)
+        self.assertEquals(len(result[0]), 8)
+        self.assertEquals(result[0][0], "John Lennon")
+        self.assertEquals(result[0][1], "219252")
+        self.assertEquals(result[0][2], "11")
+        self.assertEquals(result[0][3], "Imagine")
+        self.assertEquals(result[0][4], "17630")
+        # FIXME Tracker returns this translated to the current timezone
+        #self.assertEquals (result[0][5], "2008-12-23T11:47:02Z")
+        #self.assertEquals (result[0][6], "2008-12-16T10:41:20Z")
+        self.assertEquals(result[0][7], "imagine.mp3")
+
+        self.tracker.update ("""
                 DELETE {
                    <urn:uuid:123123123> a rdfs:Resource .
                 }
@@ -185,16 +187,13 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
                 }
                 """)
 
-
-
-
-       def test_insert_04(self):
-                """
-                Insert, delete same single valued properties multiple times.
-                """
-                for i in range (0, 3):
-                        # Delete single valued properties of music file.
-                        self.tracker.update("""
+    def test_insert_04(self):
+        """
+        Insert, delete same single valued properties multiple times.
+        """
+        for i in range(0, 3):
+            # Delete single valued properties of music file.
+            self.tracker.update("""
                         DELETE {
                           <test://instance-1> nie:usageCounter ?v
                         } WHERE {
@@ -207,194 +206,196 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
                         }
                         """)
 
-                        # Insert the same single valued properties of music file.
-                        self.tracker.update("""
+            # Insert the same single valued properties of music file.
+            self.tracker.update("""
                         INSERT {
                            <test://instance-1> a nmm:MusicPiece, nfo:FileDataObject;
                            nie:usageCounter '%d';
                            nie:contentAccessed '2000-01-01T00:4%d:47Z' .
                         }""" % (i, i))
 
-                        # Query for the property values and verify whether the last change is applied.
-                        result = self.tracker.query ("""
+            # Query for the property values and verify whether the last change
+            # is applied.
+            result = self.tracker.query ("""
                           SELECT ?playcount ?date WHERE {
                              <test://instance-1> a nmm:MusicPiece ;
                                  nie:usageCounter ?playcount ;
                                  nie:contentAccessed ?date.
                           }""")
 
-                        self.assertEquals (len (result), 1)
-                        self.assertEquals (len (result[0]), 2)
-                        self.assertEquals (int (result[0][0]), i)
-                        self.assertEquals (result[0][1], "2000-01-01T00:4%d:47Z" % (i))
+            self.assertEquals(len(result), 1)
+            self.assertEquals(len(result[0]), 2)
+            self.assertEquals(int(result[0][0]), i)
+            self.assertEquals(result[0][1], "2000-01-01T00:4%d:47Z" % (i))
 
-                self.tracker.update ("""
+        self.tracker.update ("""
                 DELETE { <test://instance-1> a rdfs:Resource. }
                 """)
 
-
-       def test_insert_05(self):
-                """
-                Insert or replace, single valued properties multiple times.
-                """
-                for i in range (0, 3):
-                        # Insert the same single valued properties of music file.
-                        self.tracker.update("""
+    def test_insert_05(self):
+        """
+        Insert or replace, single valued properties multiple times.
+        """
+        for i in range(0, 3):
+            # Insert the same single valued properties of music file.
+            self.tracker.update("""
                         INSERT OR REPLACE {
                            <test://instance-1> a nmm:MusicPiece, nfo:FileDataObject;
                            nie:usageCounter '%d';
                            nie:contentAccessed '2000-01-01T00:4%d:47Z' .
                         }""" % (i, i))
 
-                        # Query for the property values and verify whether the last change is applied.
-                        result = self.tracker.query ("""
+            # Query for the property values and verify whether the last change
+            # is applied.
+            result = self.tracker.query ("""
                           SELECT ?playcount ?date WHERE {
                              <test://instance-1> a nmm:MusicPiece ;
                                  nie:usageCounter ?playcount ;
                                  nie:contentAccessed ?date.
                           }""")
 
-                        self.assertEquals (len (result), 1)
-                        self.assertEquals (len (result[0]), 2)
-                        self.assertEquals (int (result[0][0]), i)
-                        self.assertEquals (result[0][1], "2000-01-01T00:4%d:47Z" % (i))
+            self.assertEquals(len(result), 1)
+            self.assertEquals(len(result[0]), 2)
+            self.assertEquals(int(result[0][0]), i)
+            self.assertEquals(result[0][1], "2000-01-01T00:4%d:47Z" % (i))
 
-                self.tracker.update ("""
+        self.tracker.update ("""
                 DELETE { <test://instance-1> a rdfs:Resource. }
                 """)
 
-       def test_insert_06(self):
-                """
-                Insert or replace, single and multi valued properties multiple times.
-                """
-                for i in range (0, 3):
-                        # Insert the same single valued properties and insert multi valued properties at the 
same time
-                        self.tracker.update("""
+    def test_insert_06(self):
+        """
+        Insert or replace, single and multi valued properties multiple times.
+        """
+        for i in range(0, 3):
+            # Insert the same single valued properties and insert multi valued
+            # properties at the same time
+            self.tracker.update("""
                         INSERT OR REPLACE {
                            <test://instance-2> a nie:InformationElement;
                            nie:title '%d';
                            nie:keyword '%d'
                         }""" % (i, i))
 
-                        # Query for the property values and verify whether the last change is applied.
-                        result = self.tracker.query ("""
+            # Query for the property values and verify whether the last change
+            # is applied.
+            result = self.tracker.query ("""
                           SELECT ?t ?k WHERE {
                              <test://instance-2> nie:title ?t ;
                                  nie:keyword ?k 
                           }""")
 
-                self.assertEquals (len (result), 3)
-                self.assertEquals (len (result[0]), 2)
-                self.assertEquals (result[0][0], "%d" % i)
-                self.assertEquals (result[0][1], "0")
+        self.assertEquals(len(result), 3)
+        self.assertEquals(len(result[0]), 2)
+        self.assertEquals(result[0][0], "%d" % i)
+        self.assertEquals(result[0][1], "0")
 
-                self.assertEquals (result[1][0], "%d" % i)
-                self.assertEquals (result[1][1], "1")
+        self.assertEquals(result[1][0], "%d" % i)
+        self.assertEquals(result[1][1], "1")
 
-                self.assertEquals (result[2][0], "%d" % i)
-                self.assertEquals (result[2][1], "2")
+        self.assertEquals(result[2][0], "%d" % i)
+        self.assertEquals(result[2][1], "2")
 
-                self.tracker.update ("""
+        self.tracker.update ("""
                 DELETE { <test://instance-2> a rdfs:Resource. }
                 """)
 
-       def test_insert_07(self):
-                """
-                Insert or replace, single and multi valued properties with domain errors.
-                """
+    def test_insert_07(self):
+        """
+        Insert or replace, single and multi valued properties with domain errors.
+        """
 
-                try:
-                  INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-3> nie:title 'test' }"""
-                  self.tracker.update (INSERT_SPARQL)
-                except:
-                  pass
+        try:
+            INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-3> nie:title 'test' }"""
+            self.tracker.update(INSERT_SPARQL)
+        except:
+            pass
 
-                INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-4> a nie:DataSource }"""
-                self.tracker.update (INSERT_SPARQL)
+        INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-4> a nie:DataSource }"""
+        self.tracker.update(INSERT_SPARQL)
 
-                try:
-                  INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-5> nie:rootElementOf 
<test://instance-4> }"""
-                  self.tracker.update (INSERT_SPARQL)
-                except:
-                  pass
+        try:
+            INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-5> nie:rootElementOf <test://instance-4> 
}"""
+            self.tracker.update(INSERT_SPARQL)
+        except:
+            pass
 
-                INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-5> a nie:InformationElement ; 
nie:rootElementOf <test://instance-4> }"""
-                self.tracker.update (INSERT_SPARQL)
+        INSERT_SPARQL = """INSERT OR REPLACE { <test://instance-5> a nie:InformationElement ; 
nie:rootElementOf <test://instance-4> }"""
+        self.tracker.update(INSERT_SPARQL)
 
-                self.tracker.update ("""
+        self.tracker.update ("""
                 DELETE { <test://instance-4> a rdfs:Resource. }
                 """)
 
-                self.tracker.update ("""
+        self.tracker.update ("""
                 DELETE { <test://instance-5> a rdfs:Resource. }
                 """)
 
-       def test_insert_08(self):
-                """
-                Insert or replace, single and multi valued properties with graphs
-                """
+    def test_insert_08(self):
+        """
+        Insert or replace, single and multi valued properties with graphs
+        """
 
-                INSERT_SPARQL = """INSERT { GRAPH <test://graph-1> { <test://instance-6> a 
nie:InformationElement ; nie:title 'title 1' } }"""
-                self.tracker.update (INSERT_SPARQL)
+        INSERT_SPARQL = """INSERT { GRAPH <test://graph-1> { <test://instance-6> a nie:InformationElement ; 
nie:title 'title 1' } }"""
+        self.tracker.update(INSERT_SPARQL)
 
-                INSERT_SPARQL = """INSERT { GRAPH <test://graph-2> { <test://instance-6> nie:title 'title 1' 
} }"""
-                self.tracker.update (INSERT_SPARQL)
+        INSERT_SPARQL = """INSERT { GRAPH <test://graph-2> { <test://instance-6> nie:title 'title 1' } }"""
+        self.tracker.update(INSERT_SPARQL)
 
-                result = self.tracker.query ("""
+        result = self.tracker.query ("""
                           SELECT ?g ?t WHERE { GRAPH ?g {
                              <test://instance-6> nie:title ?t
                            } }""")
 
-                self.assertEquals (len (result), 1)
-                self.assertEquals (len (result[0]), 2)
-                self.assertEquals (result[0][0], "test://graph-1") # Yes, indeed
-                self.assertEquals (result[0][1], "title 1")
+        self.assertEquals(len(result), 1)
+        self.assertEquals(len(result[0]), 2)
+        self.assertEquals(result[0][0], "test://graph-1")  # Yes, indeed
+        self.assertEquals(result[0][1], "title 1")
 
-                INSERT_SPARQL = """INSERT OR REPLACE { GRAPH <test://graph-2> { <test://instance-6> 
nie:title 'title 1' } }"""
-                self.tracker.update (INSERT_SPARQL)
+        INSERT_SPARQL = """INSERT OR REPLACE { GRAPH <test://graph-2> { <test://instance-6> nie:title 'title 
1' } }"""
+        self.tracker.update(INSERT_SPARQL)
 
-                result = self.tracker.query ("""
+        result = self.tracker.query ("""
                           SELECT ?g ?t WHERE { GRAPH ?g {
                              <test://instance-6> nie:title ?t
                            } }""")
 
-                self.assertEquals (len (result), 1)
-                self.assertEquals (len (result[0]), 2)
-                self.assertEquals (result[0][0], "test://graph-2") # Yup, that's right
-                self.assertEquals (result[0][1], "title 1")
-
+        self.assertEquals(len(result), 1)
+        self.assertEquals(len(result[0]), 2)
+        self.assertEquals(result[0][0], "test://graph-2")  # Yup, that's right
+        self.assertEquals(result[0][1], "title 1")
 
-                INSERT_SPARQL = """INSERT OR REPLACE { GRAPH <test://graph-3> { <test://instance-6> 
nie:title 'title 2' } }"""
-                self.tracker.update (INSERT_SPARQL)
+        INSERT_SPARQL = """INSERT OR REPLACE { GRAPH <test://graph-3> { <test://instance-6> nie:title 'title 
2' } }"""
+        self.tracker.update(INSERT_SPARQL)
 
-                result = self.tracker.query ("""
+        result = self.tracker.query ("""
                           SELECT ?g ?t WHERE { GRAPH ?g {
                              <test://instance-6> nie:title ?t
                            } }""")
 
-                self.assertEquals (len (result), 1)
-                self.assertEquals (len (result[0]), 2)
-                self.assertEquals (result[0][0], "test://graph-3")
-                self.assertEquals (result[0][1], "title 2")
+        self.assertEquals(len(result), 1)
+        self.assertEquals(len(result[0]), 2)
+        self.assertEquals(result[0][0], "test://graph-3")
+        self.assertEquals(result[0][1], "title 2")
 
-                self.tracker.update ("""
+        self.tracker.update ("""
                 DELETE { <test://instance-6> a rdfs:Resource. }
                 """)
 
-        def __insert_valid_date_test (self, datestring, year, month, day, hours, minutes, seconds, timezone):
-                """
-                Insert a property with datestring value, retrieve its components and validate against
-                the expected results (all the other parameters)
-                """
-                testId = random.randint (10, 1000)
-                self.tracker.update ("""
+    def __insert_valid_date_test(self, datestring, year, month, day, hours, minutes, seconds, timezone):
+        """
+        Insert a property with datestring value, retrieve its components and validate against
+        the expected results (all the other parameters)
+        """
+        testId = random.randint(10, 1000)
+        self.tracker.update ("""
                 INSERT {
                    <test://instance-insert-date-%d> a nie:InformationElement;
                        nie:informationElementDate '%s'.
                 }
                 """ % (testId, datestring))
 
-               result = self.tracker.query ("""
+        result = self.tracker.query ("""
                 SELECT    fn:year-from-dateTime (?v)
                          fn:month-from-dateTime (?v)
                           fn:day-from-dateTime (?v)
@@ -407,80 +408,76 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
                        nie:informationElementDate ?v .
                }
                  """ % (testId))
-                try:
-                        self.assertEquals (len (result), 1)
-                        self.assertEquals (len (result[0]), 7)
-                        self.assertEquals (result[0][0], year)
-                        self.assertEquals (result[0][1], month)
-                        self.assertEquals (result[0][2], day)
-                        self.assertEquals (result[0][3], hours)
-                        self.assertEquals (result[0][4], minutes)
-                        self.assertEquals (result[0][5], seconds)
-                        # FIXME To validate this we need to take into account the locale
-                        # self.assertEquals (result[0][7], timezone)
-                finally:
-                        self.tracker.update ("""
+        try:
+            self.assertEquals(len(result), 1)
+            self.assertEquals(len(result[0]), 7)
+            self.assertEquals(result[0][0], year)
+            self.assertEquals(result[0][1], month)
+            self.assertEquals(result[0][2], day)
+            self.assertEquals(result[0][3], hours)
+            self.assertEquals(result[0][4], minutes)
+            self.assertEquals(result[0][5], seconds)
+            # FIXME To validate this we need to take into account the locale
+            # self.assertEquals (result[0][7], timezone)
+        finally:
+            self.tracker.update ("""
                         DELETE { <test://instance-insert-date-%d> a rdfs:Resource. }
                         """ % (testId))
 
+    """Date-Time storage testing """
 
-       """Date-Time storage testing """
-       def test_insert_date_01 (self):
-               """
-                1. Insert a InformationElement with date having local timezone info.
-                2. TEST: Query and verify the various componentes of date
-                """
-                self.__insert_valid_date_test ("2004-05-06T13:14:15+0400",
-                                               "2004", "05", "06", "13", "14", "15", "14400")
-
-
-       def test_insert_date_02 (self):
-               """
-                1. Insert a InformationElement with date ending with "Z" in TZD.
-                2. TEST: Query and verify the various componentes of date
-                """
-                self.__insert_valid_date_test ("2004-05-06T13:14:15Z",
-                                               "2004", "05", "06", "13", "14", "15", "0")
-
-       def test_insert_date_03 (self):
-               """
-                1. Insert a InformationElement with date ending with no TZD.
-                2. TEST: Query and verify the various componentes of date
-                """
-                self.__insert_valid_date_test ("2004-05-06T13:14:15",
-                                               "2004", "05", "06", "13", "14", "15", "10800") # HEL timezone?
-
-
-        # ut skipIf (1, "It times out in the daemon. Investigate")
-       def test_insert_date_04 (self):
-               """
-                1. Insert a InformationElement with date having local timezone info
-                  with some minutes in it.
-                2. TEST: Query and verify the various componentes of date
-                """
-                self.__insert_valid_date_test ("2004-05-06T13:14:15+0230",
-                                               "2004", "05", "06", "13", "14", "15", "9000")
+    def test_insert_date_01(self):
+        """
+        1. Insert a InformationElement with date having local timezone info.
+        2. TEST: Query and verify the various componentes of date
+        """
+        self.__insert_valid_date_test("2004-05-06T13:14:15+0400",
+                                      "2004", "05", "06", "13", "14", "15", "14400")
 
+    def test_insert_date_02(self):
+        """
+        1. Insert a InformationElement with date ending with "Z" in TZD.
+        2. TEST: Query and verify the various componentes of date
+        """
+        self.__insert_valid_date_test("2004-05-06T13:14:15Z",
+                                      "2004", "05", "06", "13", "14", "15", "0")
 
-        # ut skipIf (1, "It times out in the daemon. Investigate")
-        def __test_insert_date_05 (self):
-               """
-                 1. Insert a InformationElement with date having local timezone info in negative.
-                 2. TEST: Query and verify the various componentes of date
-                 """
-                self.__insert_valid_date_test ("2004-05-06T13:14:15-0230",
-                                               "2004", "05", "06", "13", "14", "15", "-9000")
+    def test_insert_date_03(self):
+        """
+        1. Insert a InformationElement with date ending with no TZD.
+        2. TEST: Query and verify the various componentes of date
+        """
+        self.__insert_valid_date_test("2004-05-06T13:14:15",
+                                      "2004", "05", "06", "13", "14", "15", "10800")  # HEL timezone?
 
+    # ut skipIf (1, "It times out in the daemon. Investigate")
+    def test_insert_date_04(self):
+        """
+        1. Insert a InformationElement with date having local timezone info
+           with some minutes in it.
+        2. TEST: Query and verify the various componentes of date
+        """
+        self.__insert_valid_date_test("2004-05-06T13:14:15+0230",
+                                      "2004", "05", "06", "13", "14", "15", "9000")
 
-        def __insert_invalid_date_test (self, datestring):
-                self.assertRaises (Exception, self.tracker.update, """
+    # ut skipIf (1, "It times out in the daemon. Investigate")
+    def __test_insert_date_05(self):
+        """
+         1. Insert a InformationElement with date having local timezone info in negative.
+         2. TEST: Query and verify the various componentes of date
+         """
+        self.__insert_valid_date_test("2004-05-06T13:14:15-0230",
+                                      "2004", "05", "06", "13", "14", "15", "-9000")
+
+    def __insert_invalid_date_test(self, datestring):
+        self.assertRaises (Exception, self.tracker.update, """
                         INSERT {
                            <test://instance-insert-invalid-date-01> a nie:InformationElement;
                               nie:informationElementDate '204-05-06T13:14:15+0400'.
                         }
                         """)
 
-               result = self.tracker.query ("""
+        result = self.tracker.query ("""
                 SELECT    fn:year-from-dateTime (?v)
                          fn:month-from-dateTime (?v)
                           fn:day-from-dateTime (?v)
@@ -493,145 +490,163 @@ class TrackerStoreInsertionTests (CommonTrackerStoreTest):
                        nie:informationElementDate ?v .
                }
                 """)
-                self.assertEquals (len (result), 0)
-
-                # ut skipIf (1, "It times out in the daemon. Investigate")
-       def test_insert_invalid_date_01 (self):
-               """
-                1. Insert a InformationElement with invalid year in date.
-                2. TEST: Query and verify the various componentes of date
-                """
-                self.__insert_invalid_date_test ("204-05-06T13:14:15+0400")
-
-
-                # ut skipIf (1, "It times out in the daemon. Investigate")
-       def test_insert_invalid_date_02 (self):
-               """
-                1. Insert a InformationElement with date without time.
-                2. TEST: Query and verify the various componentes of date
-                """
-                self.__insert_invalid_date_test ("2004-05-06")
+        self.assertEquals(len(result), 0)
 
+        # ut skipIf (1, "It times out in the daemon. Investigate")
+    def test_insert_invalid_date_01(self):
+        """
+        1. Insert a InformationElement with invalid year in date.
+        2. TEST: Query and verify the various componentes of date
+        """
+        self.__insert_invalid_date_test("204-05-06T13:14:15+0400")
 
+        # ut skipIf (1, "It times out in the daemon. Investigate")
+    def test_insert_invalid_date_02(self):
+        """
+        1. Insert a InformationElement with date without time.
+        2. TEST: Query and verify the various componentes of date
+        """
+        self.__insert_invalid_date_test("2004-05-06")
 
-                # ut skipIf (1, "It times out in the daemon. Investigate")
-       def test_insert_invalid_date_03 (self):
-               """
-                1. Insert a InformationElement with date without time but only the "T" separator.
-                """
-                self.__insert_invalid_date_test ("2004-05-06T")
+        # ut skipIf (1, "It times out in the daemon. Investigate")
+    def test_insert_invalid_date_03(self):
+        """
+        1. Insert a InformationElement with date without time but only the "T" separator.
+        """
+        self.__insert_invalid_date_test("2004-05-06T")
 
-                # ut skipIf (1, "It times out in the daemon. Investigate")
-       def test_insert_invalid_date_04 (self):
-               """
-                1. Insert a InformationElement with date without time but only the "T" separator.
-                """
-                self.__insert_invalid_date_test ("2004-05-06T1g:14:15-0200")
+        # ut skipIf (1, "It times out in the daemon. Investigate")
+    def test_insert_invalid_date_04(self):
+        """
+        1. Insert a InformationElement with date without time but only the "T" separator.
+        """
+        self.__insert_invalid_date_test("2004-05-06T1g:14:15-0200")
 
-       def test_insert_duplicated_url_01 (self):
-               """
-                1. Insert a FileDataObject with a known nie:url, twice
-                """
+    def test_insert_duplicated_url_01(self):
+        """
+        1. Insert a FileDataObject with a known nie:url, twice
+        """
 
-               url = "file:///some/magic/path/here"
+        url = "file:///some/magic/path/here"
 
-               insert = """
+        insert = """
                 INSERT {
                    _:tag a nfo:FileDataObject;
                         nie:url '%s'.
                 }
                 """ % (url)
 
-               # First insert should go ok
-               self.tracker.update (insert)
-               # Second insert should not be ok
-               try:
-                       self.tracker.update (insert)
-               except Exception:
-                       pass
+        # First insert should go ok
+        self.tracker.update(insert)
+        # Second insert should not be ok
+        try:
+            self.tracker.update(insert)
+        except Exception:
+            pass
 
-               # Only 1 element must be available with the given nie:url
-               select = """
+        # Only 1 element must be available with the given nie:url
+        select = """
                 SELECT ?u WHERE { ?u nie:url \"%s\" }
                 """ % (url)
-               self.assertEquals (len (self.tracker.query (select)), 1)
+        self.assertEquals(len(self.tracker.query(select)), 1)
 
-               # Cleanup
-               self.tracker.update ("""
+        # Cleanup
+        self.tracker.update ("""
                 DELETE { ?u a rdfs:Resource } WHERE { ?u a rdfs:Resource ; nie:url '%s' }
                 """ % (url))
 
+    def test_insert_replace_null(self):
+        """
+        Insert or replace, with null
+        """
 
-       def test_insert_replace_null(self):
-                """
-                Insert or replace, with null
-                """
+        self.tracker.update(
+            """INSERT { <test://instance-null> a nie:DataObject, nie:InformationElement }""")
+        self.tracker.update(
+            """INSERT { <test://instance-ds1> a nie:DataSource  }""")
+        self.tracker.update(
+            """INSERT { <test://instance-ds2> a nie:DataSource  }""")
+        self.tracker.update(
+            """INSERT { <test://instance-ds3> a nie:DataSource  }""")
+        self.tracker.update(
+            """INSERT { <test://instance-null> nie:dataSource <test://instance-ds1>, <test://instance-ds2>, 
<test://instance-ds3> }""")
+
+        # null upfront, reset of list, rewrite of new list
+        self.tracker.update(
+            """INSERT OR REPLACE { <test://instance-null> nie:dataSource null, <test://instance-ds1>, 
<test://instance-ds2> }""")
+        result = self.tracker.query(
+            """SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds }""")
+        self.assertEquals(len(result), 2)
+        self.assertEquals(len(result[0]), 1)
+        self.assertEquals(len(result[1]), 1)
+        self.assertEquals(result[0][0], "test://instance-ds1")
+        self.assertEquals(result[1][0], "test://instance-ds2")
+
+        # null upfront, reset of list, rewrite of new list, second test
+        self.tracker.update(
+            """INSERT OR REPLACE { <test://instance-null> nie:dataSource null, <test://instance-ds1>, 
<test://instance-ds2>, <test://instance-ds3> }""")
+        result = self.tracker.query(
+            """SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds }""")
+        self.assertEquals(len(result), 3)
+        self.assertEquals(len(result[0]), 1)
+        self.assertEquals(len(result[1]), 1)
+        self.assertEquals(len(result[2]), 1)
+        self.assertEquals(result[0][0], "test://instance-ds1")
+        self.assertEquals(result[1][0], "test://instance-ds2")
+        self.assertEquals(result[2][0], "test://instance-ds3")
+
+        # null in the middle, rewrite of new list
+        self.tracker.update(
+            """INSERT OR REPLACE { <test://instance-null> nie:dataSource <test://instance-ds1>, null, 
<test://instance-ds2>, <test://instance-ds3> }""")
+        result = self.tracker.query(
+            """SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds }""")
+        self.assertEquals(len(result), 2)
+        self.assertEquals(len(result[0]), 1)
+        self.assertEquals(len(result[1]), 1)
+        self.assertEquals(result[0][0], "test://instance-ds2")
+        self.assertEquals(result[1][0], "test://instance-ds3")
+
+        # null at the end
+        self.tracker.update(
+            """INSERT OR REPLACE { <test://instance-null> nie:dataSource <test://instance-ds1>, 
<test://instance-ds2>, <test://instance-ds3>, null }""")
+        result = self.tracker.query(
+            """SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds }""")
+        self.assertEquals(len(result), 0)
+
+        # Multiple nulls
+        self.tracker.update(
+            """INSERT OR REPLACE { <test://instance-null> nie:dataSource null, <test://instance-ds1>, null, 
<test://instance-ds2>, <test://instance-ds3> }""")
+        result = self.tracker.query(
+            """SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds }""")
+        self.assertEquals(len(result), 2)
+        self.assertEquals(len(result[0]), 1)
+        self.assertEquals(len(result[1]), 1)
+        self.assertEquals(result[0][0], "test://instance-ds2")
+        self.assertEquals(result[1][0], "test://instance-ds3")
+
+        self.tracker.update(
+            """DELETE { <test://instance-null> a rdfs:Resource. }""")
+        self.tracker.update(
+            """DELETE { <test://instance-ds1> a rdfs:Resource. }""")
+        self.tracker.update(
+            """DELETE { <test://instance-ds2> a rdfs:Resource. }""")
+        self.tracker.update(
+            """DELETE { <test://instance-ds3> a rdfs:Resource. }""")
 
-                self.tracker.update("""INSERT { <test://instance-null> a nie:DataObject, 
nie:InformationElement }""")
-                self.tracker.update("""INSERT { <test://instance-ds1> a nie:DataSource  }""")
-                self.tracker.update("""INSERT { <test://instance-ds2> a nie:DataSource  }""")
-                self.tracker.update("""INSERT { <test://instance-ds3> a nie:DataSource  }""")
-                self.tracker.update("""INSERT { <test://instance-null> nie:dataSource <test://instance-ds1>, 
<test://instance-ds2>, <test://instance-ds3> }""")
-
-                # null upfront, reset of list, rewrite of new list
-                self.tracker.update("""INSERT OR REPLACE { <test://instance-null> nie:dataSource null, 
<test://instance-ds1>, <test://instance-ds2> }""")
-                result = self.tracker.query ("""SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds 
}""")
-                self.assertEquals (len (result), 2)
-                self.assertEquals (len (result[0]), 1)
-                self.assertEquals (len (result[1]), 1)
-                self.assertEquals (result[0][0], "test://instance-ds1")
-                self.assertEquals (result[1][0], "test://instance-ds2")
-
-                # null upfront, reset of list, rewrite of new list, second test
-                self.tracker.update("""INSERT OR REPLACE { <test://instance-null> nie:dataSource null, 
<test://instance-ds1>, <test://instance-ds2>, <test://instance-ds3> }""")
-                result = self.tracker.query ("""SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds 
}""")
-                self.assertEquals (len (result), 3)
-                self.assertEquals (len (result[0]), 1)
-                self.assertEquals (len (result[1]), 1)
-                self.assertEquals (len (result[2]), 1)
-                self.assertEquals (result[0][0], "test://instance-ds1")
-                self.assertEquals (result[1][0], "test://instance-ds2")
-                self.assertEquals (result[2][0], "test://instance-ds3")
-
-                # null in the middle, rewrite of new list
-                self.tracker.update("""INSERT OR REPLACE { <test://instance-null> nie:dataSource 
<test://instance-ds1>, null, <test://instance-ds2>, <test://instance-ds3> }""")
-                result = self.tracker.query ("""SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds 
}""")
-                self.assertEquals (len (result), 2)
-                self.assertEquals (len (result[0]), 1)
-                self.assertEquals (len (result[1]), 1)
-                self.assertEquals (result[0][0], "test://instance-ds2")
-                self.assertEquals (result[1][0], "test://instance-ds3")
-                               
-                # null at the end
-                self.tracker.update("""INSERT OR REPLACE { <test://instance-null> nie:dataSource 
<test://instance-ds1>, <test://instance-ds2>, <test://instance-ds3>, null }""")
-                result = self.tracker.query ("""SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds 
}""")
-                self.assertEquals (len (result), 0)
-       
-                # Multiple nulls
-                self.tracker.update("""INSERT OR REPLACE { <test://instance-null> nie:dataSource null, 
<test://instance-ds1>, null, <test://instance-ds2>, <test://instance-ds3> }""")
-                result = self.tracker.query ("""SELECT ?ds WHERE { <test://instance-null> nie:dataSource ?ds 
}""")
-                self.assertEquals (len (result), 2)
-                self.assertEquals (len (result[0]), 1)
-                self.assertEquals (len (result[1]), 1)
-                self.assertEquals (result[0][0], "test://instance-ds2")
-                self.assertEquals (result[1][0], "test://instance-ds3")
-       
-                self.tracker.update ("""DELETE { <test://instance-null> a rdfs:Resource. }""")
-                self.tracker.update ("""DELETE { <test://instance-ds1> a rdfs:Resource. }""")
-                self.tracker.update ("""DELETE { <test://instance-ds2> a rdfs:Resource. }""")
-                self.tracker.update ("""DELETE { <test://instance-ds3> a rdfs:Resource. }""")
 
 class TrackerStoreDeleteTests (CommonTrackerStoreTest):
+
+    """
+    Use DELETE in Sparql and check the information is actually removed
+    """
+
+    def test_delete_01(self):
         """
-        Use DELETE in Sparql and check the information is actually removed
+        Insert triples and Delete a triple. Verify the deletion with a query
         """
-        def test_delete_01 (self):
-                """
-                Insert triples and Delete a triple. Verify the deletion with a query
-                """
 
-               # first insert
-                self.tracker.update ("""
+        # first insert
+        self.tracker.update ("""
                 INSERT {
                    <urn:uuid:7646001> a nco:Contact;
                             nco:fullname 'Artist_1_delete'.
@@ -643,88 +658,89 @@ class TrackerStoreDeleteTests (CommonTrackerStoreTest):
                 }
                 """)
 
-               # verify the insertion
-                result = self.tracker.query ("""
+        # verify the insertion
+        result = self.tracker.query ("""
                 SELECT ?u WHERE {
                     ?u a nmm:MusicPiece ;
                          nfo:genre 'Classic delete' .
                 }
                 """)
-                self.assertEquals (len (result), 1)
-                self.assertEquals (len (result[0]), 1)
-                self.assertEquals (result[0][0], "test://instance-test-delete-01")
+        self.assertEquals(len(result), 1)
+        self.assertEquals(len(result[0]), 1)
+        self.assertEquals(result[0][0], "test://instance-test-delete-01")
 
-               # now delete
-                self.tracker.update("""
+        # now delete
+        self.tracker.update("""
                 DELETE {
                   <test://instance-test-delete-01> a rdfs:Resource.
                 }
                 """)
 
-                # Check the instance is not there
-                result = self.tracker.query ("""
+        # Check the instance is not there
+        result = self.tracker.query ("""
                 SELECT ?u WHERE {
                     ?u a nmm:MusicPiece ;
                          nfo:genre 'Classic delete' .
                 }
                 """)
-                self.assertEquals (len (result), 0)
-
+        self.assertEquals(len(result), 0)
 
-       def test_delete_02 (self):
-                """
-                Delete a MusicAlbum and count the album
+    def test_delete_02(self):
+        """
+        Delete a MusicAlbum and count the album
 
-               1. add a music album.
-               2. count the number of albums
-               3. delete an album
-               2. count the number of albums
-               """
+        1. add a music album.
+        2. count the number of albums
+        3. delete an album
+        2. count the number of albums
+        """
 
-                initial = self.tracker.count_instances ("nmm:MusicAlbum")
+        initial = self.tracker.count_instances("nmm:MusicAlbum")
 
-               """Add a music album """
-                self.tracker.update ("""
+        """Add a music album """
+        self.tracker.update ("""
                 INSERT {
                    <test://instance-delete-02> a nmm:MusicAlbum;
                            nmm:albumTitle '06_Album_delete'.
                 }
                 """)
 
-                after_insert = self.tracker.count_instances ("nmm:MusicAlbum")
-                self.assertEquals (initial+1, after_insert)
+        after_insert = self.tracker.count_instances("nmm:MusicAlbum")
+        self.assertEquals(initial + 1, after_insert)
 
-               """Delete the added music album """
-                self.tracker.update("""
+        """Delete the added music album """
+        self.tracker.update("""
                 DELETE {
                   <test://instance-delete-02> a nmm:MusicAlbum.
                 }
                 """)
 
-               """get the count of music albums"""
-                after_removal = self.tracker.count_instances ("nmm:MusicAlbum")
+        """get the count of music albums"""
+        after_removal = self.tracker.count_instances("nmm:MusicAlbum")
 
-                self.assertEquals (after_removal, initial)
+        self.assertEquals(after_removal, initial)
 
 
 class TrackerStoreBatchUpdateTest (CommonTrackerStoreTest):
+
+    """
+    Insert data using the BatchSparqlUpdate method in the store
+    """
+
+    def test_batch_insert_01(self):
         """
-        Insert data using the BatchSparqlUpdate method in the store
+        batch insertion of 100 contacts:
+        1. insert 100 contacts.
+        2. delete the inserted contacts.
         """
+        NUMBER_OF_TEST_CONTACTS = 3
 
-       def test_batch_insert_01(self):
-               """
-                batch insertion of 100 contacts:
-               1. insert 100 contacts.
-               2. delete the inserted contacts.
-               """
-                NUMBER_OF_TEST_CONTACTS = 3
-
-               # query no. of existing contacts. (predefined instances in the DB)
-               count_before_insert = self.tracker.count_instances ("nco:PersonContact")
+        # query no. of existing contacts. (predefined instances in the DB)
+        count_before_insert = self.tracker.count_instances(
+            "nco:PersonContact")
 
-               # insert contacts.
-                CONTACT_TEMPLATE = """
+        # insert contacts.
+        CONTACT_TEMPLATE = """
                    <test://instance-contact-%d> a nco:PersonContact ;
                       nco:nameGiven 'Contact-name %d';
                       nco:nameFamily 'Contact-family %d';
@@ -733,33 +749,37 @@ class TrackerStoreBatchUpdateTest (CommonTrackerStoreTest):
                       nco:hasPhoneNumber <tel:%s> .
                 """
 
-                global contact_list
-                contact_list = []
-                def complete_contact (contact_template):
-                        random_phone = "".join ([str(random.randint (0, 9)) for i in range (0, 9)])
-                        contact_counter = random.randint (0, 10000)
-
-                        # Avoid duplicates
-                        while contact_counter in contact_list:
-                                contact_counter = random.randint (0, 10000)
-                        contact_list.append (contact_counter)
-
-                        return contact_template % (contact_counter,
-                                                   contact_counter,
-                                                   contact_counter,
-                                                   contact_counter,
-                                                   random_phone)
-
-                contacts = map (complete_contact, [CONTACT_TEMPLATE] * NUMBER_OF_TEST_CONTACTS)
-               INSERT_SPARQL = "\n".join (["INSERT {"] + contacts +["}"])
-                       self.tracker.batch_update (INSERT_SPARQL)
-
-               # Check all instances are in
-               count_after_insert = self.tracker.count_instances ("nco:PersonContact")
-                self.assertEquals (count_before_insert + NUMBER_OF_TEST_CONTACTS, count_after_insert)
-
-               """ Delete the inserted contacts """
-                DELETE_SPARQL = """
+        global contact_list
+        contact_list = []
+
+        def complete_contact(contact_template):
+            random_phone = "".join(
+                [str(random.randint(0, 9)) for i in range(0, 9)])
+            contact_counter = random.randint(0, 10000)
+
+            # Avoid duplicates
+            while contact_counter in contact_list:
+                contact_counter = random.randint(0, 10000)
+            contact_list.append(contact_counter)
+
+            return contact_template % (contact_counter,
+                                       contact_counter,
+                                       contact_counter,
+                                       contact_counter,
+                                       random_phone)
+
+        contacts = map(
+            complete_contact, [CONTACT_TEMPLATE] * NUMBER_OF_TEST_CONTACTS)
+        INSERT_SPARQL = "\n".join(["INSERT {"] + contacts + ["}"])
+        self.tracker.batch_update(INSERT_SPARQL)
+
+        # Check all instances are in
+        count_after_insert = self.tracker.count_instances("nco:PersonContact")
+        self.assertEquals(
+            count_before_insert + NUMBER_OF_TEST_CONTACTS, count_after_insert)
+
+        """ Delete the inserted contacts """
+        DELETE_SPARQL = """
                 DELETE {
                   ?x a rdfs:Resource .
                 } WHERE {
@@ -767,35 +787,37 @@ class TrackerStoreBatchUpdateTest (CommonTrackerStoreTest):
                       nie:generator 'test-instance-to-remove' .
                 }
                 """
-                self.tracker.update (DELETE_SPARQL)
-                count_final = self.tracker.count_instances ("nco:PersonContact")
-                self.assertEquals (count_before_insert, count_final)
+        self.tracker.update(DELETE_SPARQL)
+        count_final = self.tracker.count_instances("nco:PersonContact")
+        self.assertEquals(count_before_insert, count_final)
+
 
 class TrackerStorePhoneNumberTest (CommonTrackerStoreTest):
-       """
-        Tests around phone numbers (maemo specific). Inserting correct/incorrect ones
-        and running query to get the contact from the number.
-       """
-
-        @ut.skipIf (not cfg.haveMaemo, "This test uses maemo:specific properties")
-       def test_phone_01 (self):
-               """
-                1. Setting the maemo:localPhoneNumber property to last 7 digits of phone number.
-               2. Receiving a message  from a contact whose localPhoneNumber is saved.
-               3. Query messages from the local phone number
-               """
-               PhoneNumber = str(random.randint (0, sys.maxint))
-               UUID        = str(time.time())
-               UUID1       = str(random.randint (0, sys.maxint))
-               UUID2       = str(random.randint (0, sys.maxint))
-               localNumber = PhoneNumber[-7:]
-               d=datetime.datetime.now()
-               Received=d.isoformat()
-               ID          = int(time.time())%1000
-               Given_Name  = 'test_GN_' + `ID`
-               Family_Name = 'test_FN_' + `ID`
-
-               INSERT_CONTACT_PHONE = """
+
+    """
+    Tests around phone numbers (maemo specific). Inserting correct/incorrect ones
+    and running query to get the contact from the number.
+    """
+
+    @ut.skipIf(not cfg.haveMaemo, "This test uses maemo:specific properties")
+    def test_phone_01(self):
+        """
+        1. Setting the maemo:localPhoneNumber property to last 7 digits of phone number.
+        2. Receiving a message  from a contact whose localPhoneNumber is saved.
+        3. Query messages from the local phone number
+        """
+        PhoneNumber = str(random.randint(0, sys.maxint))
+        UUID = str(time.time())
+        UUID1 = str(random.randint(0, sys.maxint))
+        UUID2 = str(random.randint(0, sys.maxint))
+        localNumber = PhoneNumber[-7:]
+        d = datetime.datetime.now()
+        Received = d.isoformat()
+        ID = int(time.time()) % 1000
+        Given_Name = 'test_GN_' + `ID`
+        Family_Name = 'test_FN_' + `ID`
+
+        INSERT_CONTACT_PHONE = """
                 INSERT {
                     <tel:123456789> a nco:PhoneNumber ;
                           nco:phoneNumber  '00358555444333' ;
@@ -808,9 +830,9 @@ class TrackerStorePhoneNumberTest (CommonTrackerStoreTest):
                     <test://test_phone_1/contact> nco:hasPhoneNumber <tel:123456789>.
                 }
                 """
-                self.tracker.update (INSERT_CONTACT_PHONE)
+        self.tracker.update(INSERT_CONTACT_PHONE)
 
-               INSERT_MESSAGE = """
+        INSERT_MESSAGE = """
                 INSERT {
                     <test://test_phone_1/message> a nmo:Message ;
                          nmo:from [a nco:Contact ; nco:hasPhoneNumber <tel:123456789>];
@@ -818,27 +840,26 @@ class TrackerStorePhoneNumberTest (CommonTrackerStoreTest):
                          nie:plainTextContent 'hello'
                 }
                 """
-                self.tracker.update (INSERT_MESSAGE)
+        self.tracker.update(INSERT_MESSAGE)
 
-               QUERY_SPARQL = """
+        QUERY_SPARQL = """
                 SELECT ?msg WHERE {
                      ?msg a nmo:Message;
                          nmo:from ?c .
                      ?c nco:hasPhoneNumber ?n .
                      ?n maemo:localPhoneNumber '5444333'.
                } """
-                result = self.tracker.query (QUERY_SPARQL)
-                self.assertEquals (len (result), 1)
-                self.assertEquals (len (result[0]), 1)
-                self.assertEquals (result[0][0], "test://test_phone_1/message")
-
+        result = self.tracker.query(QUERY_SPARQL)
+        self.assertEquals(len(result), 1)
+        self.assertEquals(len(result[0]), 1)
+        self.assertEquals(result[0][0], "test://test_phone_1/message")
 
-        @ut.skipIf (not cfg.haveMaemo, "This test uses maemo:specific properties")
-       def test_phone_02 (self):
-               """
-                Inserting a local phone number which have spaces
-                """
-               INSERT_SPARQL = """
+    @ut.skipIf(not cfg.haveMaemo, "This test uses maemo:specific properties")
+    def test_phone_02(self):
+        """
+        Inserting a local phone number which have spaces
+        """
+        INSERT_SPARQL = """
                 INSERT {
                        <tel+3333333333> a nco:PhoneNumber ;
                                nco:phoneNumber  <tel+3333333333> ;
@@ -850,8 +871,8 @@ class TrackerStorePhoneNumberTest (CommonTrackerStoreTest):
                                 nco:hasPhoneNumber <tel+3333333333> .
                 }
                 """
-                self.assertRaises (Exception, self.tracker.update (INSERT_SPARQL))
+        self.assertRaises(Exception, self.tracker.update(INSERT_SPARQL))
 
 
 if __name__ == "__main__":
-       ut.main()
+    ut.main()
diff --git a/tests/functional-tests/02-sparql-bugs.py b/tests/functional-tests/02-sparql-bugs.py
index c0e4928..dc6ba18 100755
--- a/tests/functional-tests/02-sparql-bugs.py
+++ b/tests/functional-tests/02-sparql-bugs.py
@@ -21,7 +21,9 @@
 """
 Peculiar Sparql behavour reported in bugs
 """
-import sys,os,dbus
+import sys
+import os
+import dbus
 import unittest
 import time
 import random
@@ -37,11 +39,11 @@ from common.utils.expectedFailure import expectedFailureBug
 
 class TrackerStoreSparqlBugsTests (CommonTrackerStoreTest):
 
-        def test_01_NB217566_union_exists_filter (self):
-                """
-                NB217566: Use of UNION in EXISTS in a FILTER breaks filtering 
-                """
-                content = """
+    def test_01_NB217566_union_exists_filter(self):
+        """
+        NB217566: Use of UNION in EXISTS in a FILTER breaks filtering
+        """
+        content = """
                 INSERT {
                     <contact:affiliation> a nco:Affiliation ;
                              nco:hasPhoneNumber
@@ -50,10 +52,10 @@ class TrackerStoreSparqlBugsTests (CommonTrackerStoreTest):
                              nco:hasAffiliation <contact:affiliation> .
                 }
                 """
-                self.tracker.update (content)
+        self.tracker.update(content)
 
-                """ Check that these 3 queries return the same results """
-                query1 = """
+        """ Check that these 3 queries return the same results """
+        query1 = """
                 SELECT  ?_contact ?n WHERE {
                    ?_contact a nco:PersonContact .
                    {
@@ -80,7 +82,7 @@ class TrackerStoreSparqlBugsTests (CommonTrackerStoreTest):
                 }
                 """
 
-                query2 = """
+        query2 = """
                 SELECT ?_contact ?n WHERE {
                     ?_contact a nco:PersonContact .
                     {
@@ -95,7 +97,7 @@ class TrackerStoreSparqlBugsTests (CommonTrackerStoreTest):
                 }
                 """
 
-                query3 = """
+        query3 = """
                 SELECT ?_contact ?n WHERE {
                     ?_contact a nco:PersonContact .
                     {
@@ -117,142 +119,137 @@ class TrackerStoreSparqlBugsTests (CommonTrackerStoreTest):
                 }
                 """
 
-                results1 = self.tracker.query (query1)
-                print "1", results1
-                self.assertEquals (len (results1), 1)
-                self.assertEquals (len (results1[0]), 2)
-                self.assertEquals (results1[0][0], "contact:test")
-                self.assertEquals (results1[0][1], "98653")
-
-                results2 = self.tracker.query (query2)
-                print "2", results2
-                self.assertEquals (len (results2), 1)
-                self.assertEquals (len (results2[0]), 2)
-                self.assertEquals (results2[0][0], "contact:test")
-                self.assertEquals (results2[0][1], "98653")
-                
-
-                results3 = self.tracker.query (query3)
-                print "3", results3
-                self.assertEquals (len (results3), 1)
-                self.assertEquals (len (results3[0]), 2)
-                self.assertEquals (results3[0][0], "contact:test")
-                self.assertEquals (results3[0][1], "98653")
-
-                """ Clean the DB """
-                delete = """
+        results1 = self.tracker.query(query1)
+        print "1", results1
+        self.assertEquals(len(results1), 1)
+        self.assertEquals(len(results1[0]), 2)
+        self.assertEquals(results1[0][0], "contact:test")
+        self.assertEquals(results1[0][1], "98653")
+
+        results2 = self.tracker.query(query2)
+        print "2", results2
+        self.assertEquals(len(results2), 1)
+        self.assertEquals(len(results2[0]), 2)
+        self.assertEquals(results2[0][0], "contact:test")
+        self.assertEquals(results2[0][1], "98653")
+
+        results3 = self.tracker.query(query3)
+        print "3", results3
+        self.assertEquals(len(results3), 1)
+        self.assertEquals(len(results3[0]), 2)
+        self.assertEquals(results3[0][0], "contact:test")
+        self.assertEquals(results3[0][1], "98653")
+
+        """ Clean the DB """
+        delete = """
                 DELETE { <contact:affiliation> a rdfs:Resource .
                 <contact:test> a rdfs:Resource .
                 }
-                """ 
-                
-        def test_02_NB217636_delete_statements (self):
-                """
-                Bug 217636 - Not able to delete contact using
-                DELETE {<contact:556> ?p ?v} WHERE {<contact:556> ?p ?v}.
                 """
-                data = """ INSERT {
+
+    def test_02_NB217636_delete_statements(self):
+        """
+        Bug 217636 - Not able to delete contact using
+        DELETE {<contact:556> ?p ?v} WHERE {<contact:556> ?p ?v}.
+        """
+        data = """ INSERT {
                    <contact:test-nb217636> a nco:PersonContact ;
                           nco:fullname 'Testing bug 217636'
                 }
                 """
-                self.tracker.update (data)
+        self.tracker.update(data)
 
-                results = self.tracker.query ("""
+        results = self.tracker.query ("""
                  SELECT ?u WHERE {
                     ?u a nco:PersonContact ;
                       nco:fullname 'Testing bug 217636' .
                       }
                       """)
-                self.assertEquals (len (results), 1)
-                self.assertEquals (len (results[0]), 1)
-                self.assertEquals (results[0][0], "contact:test-nb217636")
+        self.assertEquals(len(results), 1)
+        self.assertEquals(len(results[0]), 1)
+        self.assertEquals(results[0][0], "contact:test-nb217636")
 
-                problematic_delete = """
+        problematic_delete = """
                 DELETE { <contact:test-nb217636> ?p ?v }
                 WHERE  { <contact:test-nb217636> ?p ?v }
                 """
-                self.tracker.update (problematic_delete)
+        self.tracker.update(problematic_delete)
 
-                results_after = self.tracker.query ("""
+        results_after = self.tracker.query ("""
                  SELECT ?u WHERE {
                     ?u a nco:PersonContact ;
                       nco:fullname 'Testing bug 217636' .
                       }
                       """)
-                self.assertEquals (len (results_after), 0)
+        self.assertEquals(len(results_after), 0)
 
-                # Safe deletion
-                delete = """
+        # Safe deletion
+        delete = """
                 DELETE { <contact:test-nb217636> a rdfs:Resource. }
                 """
-                self.tracker.update (delete)
-
-
-        def test_03_NB222645_non_existing_class_resource (self):
-                """
-                NB222645 - Inserting a resource using an non-existing class, doesn't rollback completely
-                """
-                query = "SELECT tracker:modified (?u) ?u  WHERE { ?u a nco:Contact }"
-                original_data = self.tracker.query (query)
-
-                wrong_insert = "INSERT { <test://nb222645-wrong-class-contact> a nco:IMContact. } "
-                self.assertRaises (dbus.DBusException,
-                                   self.tracker.update,
-                                   wrong_insert)
-
-                new_data = self.tracker.query (query)
-                self.assertEquals (len (original_data), len (new_data))
-                # We could be more picky, but checking there are the same number of results
-                # is enough to verify the problem described in the bug.
-
-
-        def test_04_NB224760_too_long_filter (self):
-                """
-                NB#224760 - 'too many sql variables' when filter ?sth in (long list)
-                """
-                query = "SELECT tracker:id (?m) ?m WHERE { ?m a rdfs:Resource. FILTER (tracker:id (?m) in 
(%s)) }"
-                numbers = ",".join ([str (i) for i in range (1000, 2000)])
-
-                results = self.tracker.query (query % (numbers))
-
-                # The query will raise an exception is the bug is there
-                # If we are here, everything is fine. 
-                self.assertIsNotNone (results)
-                                   
-        def test_05_NB281201_insert_replace_and_superproperties (self):
-                """
-                Bug 281201 - INSERT OR REPLACE does not delete previous values for superproperties
-                """
-                content = """INSERT { <test:resource:nb281201> a nie:InformationElement; 
+        self.tracker.update(delete)
+
+    def test_03_NB222645_non_existing_class_resource(self):
+        """
+        NB222645 - Inserting a resource using an non-existing class, doesn't rollback completely
+        """
+        query = "SELECT tracker:modified (?u) ?u  WHERE { ?u a nco:Contact }"
+        original_data = self.tracker.query(query)
+
+        wrong_insert = "INSERT { <test://nb222645-wrong-class-contact> a nco:IMContact. } "
+        self.assertRaises(dbus.DBusException,
+                          self.tracker.update,
+                          wrong_insert)
+
+        new_data = self.tracker.query(query)
+        self.assertEquals(len(original_data), len(new_data))
+        # We could be more picky, but checking there are the same number of results
+        # is enough to verify the problem described in the bug.
+
+    def test_04_NB224760_too_long_filter(self):
+        """
+        NB#224760 - 'too many sql variables' when filter ?sth in (long list)
+        """
+        query = "SELECT tracker:id (?m) ?m WHERE { ?m a rdfs:Resource. FILTER (tracker:id (?m) in (%s)) }"
+        numbers = ",".join([str(i) for i in range(1000, 2000)])
+
+        results = self.tracker.query(query % (numbers))
+
+        # The query will raise an exception is the bug is there
+        # If we are here, everything is fine.
+        self.assertIsNotNone(results)
+
+    def test_05_NB281201_insert_replace_and_superproperties(self):
+        """
+        Bug 281201 - INSERT OR REPLACE does not delete previous values for superproperties
+        """
+        content = """INSERT { <test:resource:nb281201> a nie:InformationElement;
                                                nie:contentLastModified '2011-09-27T11:11:11Z'. }"""
-                self.tracker.update (content)
+        self.tracker.update(content)
 
-                query = """SELECT ?contentLM ?nieIEDate ?dcDate { 
+        query = """SELECT ?contentLM ?nieIEDate ?dcDate {
                               <test:resource:nb281201> dc:date ?dcDate ;
                                                  nie:informationElementDate ?nieIEDate ;
                                                  nie:contentLastModified ?contentLM .
                            }"""
-                result = self.tracker.query (query)
-                # Only one row of results, and the 3 colums have the same value
-                self.assertEquals (len (result), 1)
-                self.assertEquals (result[0][0], result[0][1])
-                self.assertEquals (result[0][1], result[0][2])
+        result = self.tracker.query(query)
+        # Only one row of results, and the 3 colums have the same value
+        self.assertEquals(len(result), 1)
+        self.assertEquals(result[0][0], result[0][1])
+        self.assertEquals(result[0][1], result[0][2])
 
-                problematic = """INSERT OR REPLACE {
+        problematic = """INSERT OR REPLACE {
                                    <test:resource:nb281201> nie:contentLastModified '2012-10-28T12:12:12'
                                  }"""
-                
-                self.tracker.update (problematic)
 
-                result = self.tracker.query (query)
-                # Only one row of results, and the 3 colums have the same value
-                self.assertEquals (len (result), 1)
-                self.assertEquals (result[0][0], result[0][1])
-                self.assertEquals (result[0][1], result[0][2])
-                
+        self.tracker.update(problematic)
 
+        result = self.tracker.query(query)
+        # Only one row of results, and the 3 colums have the same value
+        self.assertEquals(len(result), 1)
+        self.assertEquals(result[0][0], result[0][1])
+        self.assertEquals(result[0][1], result[0][2])
 
 
 if __name__ == "__main__":
-       ut.main()
+    ut.main()
diff --git a/tests/functional-tests/03-fts-functions.py b/tests/functional-tests/03-fts-functions.py
index f9434e6..e20b926 100755
--- a/tests/functional-tests/03-fts-functions.py
+++ b/tests/functional-tests/03-fts-functions.py
@@ -31,11 +31,14 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TestFTSFunctions (CommonTrackerStoreTest):
+
     """
     Insert data with text and check the fts:xxxx functions are returning the expected results
     """
-    def test_fts_rank (self):
+
+    def test_fts_rank(self):
         """
         1. Insert a Contact1 with 'abcdefxyz' as fullname and nickname
         2. Insert a Contact2 with 'abcdefxyz' as fullname
@@ -58,7 +61,7 @@ class TestFTSFunctions (CommonTrackerStoreTest):
                        nco:nickname 'abcdefxyz abcdefxyz' .
         }
         """
-        self.tracker.update (insert_sparql)
+        self.tracker.update(insert_sparql)
 
         query = """
         SELECT ?contact WHERE {
@@ -66,12 +69,12 @@ class TestFTSFunctions (CommonTrackerStoreTest):
                 fts:match 'abcdefxyz' .
         } ORDER BY DESC (fts:rank(?contact))
         """
-        results = self.tracker.query (query)
+        results = self.tracker.query(query)
 
-        self.assertEquals (len(results), 3)
-        self.assertEquals (results[0][0], "contact://test/fts-function/rank/3")
-        self.assertEquals (results[1][0], "contact://test/fts-function/rank/1")
-        self.assertEquals (results[2][0], "contact://test/fts-function/rank/2")
+        self.assertEquals(len(results), 3)
+        self.assertEquals(results[0][0], "contact://test/fts-function/rank/3")
+        self.assertEquals(results[1][0], "contact://test/fts-function/rank/1")
+        self.assertEquals(results[2][0], "contact://test/fts-function/rank/2")
 
         delete_sparql = """
         DELETE {
@@ -80,10 +83,9 @@ class TestFTSFunctions (CommonTrackerStoreTest):
         <contact://test/fts-function/rank/3> a rdfs:Resource .
         }
         """
-        self.tracker.update (delete_sparql)
-
+        self.tracker.update(delete_sparql)
 
-    def test_fts_offsets (self):
+    def test_fts_offsets(self):
         """
         1. Insert a Contact1 with 'abcdefxyz' as fullname and nickname
         2. Insert a Contact2 with 'abcdefxyz' as fullname
@@ -106,7 +108,7 @@ class TestFTSFunctions (CommonTrackerStoreTest):
                        nco:nickname 'abcdefxyz abcdefxyz' .
         }
         """
-        self.tracker.update (insert_sparql)
+        self.tracker.update(insert_sparql)
 
         query = """
         SELECT fts:offsets (?contact) WHERE {
@@ -114,12 +116,13 @@ class TestFTSFunctions (CommonTrackerStoreTest):
                 fts:match 'abcdefxyz' .
         }
         """
-        results = self.tracker.query (query)
+        results = self.tracker.query(query)
 
-        self.assertEquals (len(results), 3)
-        self.assertEquals (results[0][0], 'nco:fullname,0,nco:nickname,0')
-        self.assertEquals (results[1][0], 'nco:fullname,0')
-        self.assertEquals (results[2][0], 'nco:fullname,0,nco:nickname,0,nco:nickname,10')
+        self.assertEquals(len(results), 3)
+        self.assertEquals(results[0][0], 'nco:fullname,0,nco:nickname,0')
+        self.assertEquals(results[1][0], 'nco:fullname,0')
+        self.assertEquals(
+            results[2][0], 'nco:fullname,0,nco:nickname,0,nco:nickname,10')
 
         delete_sparql = """
         DELETE {
@@ -128,7 +131,7 @@ class TestFTSFunctions (CommonTrackerStoreTest):
         <contact://test/fts-function/offset/3> a rdfs:Resource .
         }
         """
-        self.tracker.update (delete_sparql)
+        self.tracker.update(delete_sparql)
 
 
 if __name__ == '__main__':
diff --git a/tests/functional-tests/04-group-concat.py b/tests/functional-tests/04-group-concat.py
index 5fdfaf5..e14bb1e 100755
--- a/tests/functional-tests/04-group-concat.py
+++ b/tests/functional-tests/04-group-concat.py
@@ -29,20 +29,23 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TestGroupConcat (CommonTrackerStoreTest):
+
     """
     Insert a multivalued property and request the results in GROUP_CONCAT
     """
-    def test_group_concat (self):
+
+    def test_group_concat(self):
         """
         1. Insert 3 capabilities for a test contact
         2. Retrieve contact/capabilites without group_contact (3 results)
         2. TEST: contact with group_concat capabilities (1 result)
         3. Remove the test contact inserted
         """
-        
+
         uri = "contact://test_group_concat"
-        
+
         insert = """
         INSERT { <%s> a nco:IMAddress;
                       nco:imID \"test_group_concat\";
@@ -51,7 +54,7 @@ class TestGroupConcat (CommonTrackerStoreTest):
                       nco:imCapability nco:im-capability-file-transfers .
          }
         """ % (uri)
-        self.tracker.update (insert)
+        self.tracker.update(insert)
 
         query = """
         SELECT ?c ?capability WHERE {
@@ -59,23 +62,23 @@ class TestGroupConcat (CommonTrackerStoreTest):
               nco:imID \"test_group_concat\";
               nco:imCapability ?capability .
         }
-        """ 
-        results = self.tracker.query (query)
+        """
+        results = self.tracker.query(query)
 
-        assert len (results) == 3
+        assert len(results) == 3
         group_concat_query = """
         SELECT ?c GROUP_CONCAT (?capability, '|') AS ?cap WHERE {
            ?c a nco:IMAddress ;
               nco:imID \"test_group_concat\";
               nco:imCapability ?capability .
         } GROUP BY (?c)
-        """ 
-        results = self.tracker.query (group_concat_query)
-        assert len (results) == 1
-        
-        instances = results[0][1].split ('|')
-        assert len (instances) == 3
-        
+        """
+        results = self.tracker.query(group_concat_query)
+        assert len(results) == 1
+
+        instances = results[0][1].split('|')
+        assert len(instances) == 3
+
         TEXT_CHAT = "http://www.semanticdesktop.org/ontologies/2007/03/22/nco#im-capability-text-chat";
         MEDIA_CALLS = "http://www.semanticdesktop.org/ontologies/2007/03/22/nco#im-capability-media-calls";
         FILE_TRANSFERS = 
"http://www.semanticdesktop.org/ontologies/2007/03/22/nco#im-capability-file-transfers";
@@ -83,14 +86,13 @@ class TestGroupConcat (CommonTrackerStoreTest):
         assert MEDIA_CALLS in instances
         assert FILE_TRANSFERS in instances
 
-        
         #self.assertEquals (str(results[0][0]), "test_insertion_1")
 
         delete = """
         DELETE { <%s> a rdfs:Resource. }
         """ % (uri)
-        self.tracker.update (delete)
-        
+        self.tracker.update(delete)
+
 
 if __name__ == '__main__':
     ut.main()
diff --git a/tests/functional-tests/05-coalesce.py b/tests/functional-tests/05-coalesce.py
index d574c27..5ae334c 100755
--- a/tests/functional-tests/05-coalesce.py
+++ b/tests/functional-tests/05-coalesce.py
@@ -30,13 +30,15 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TestCoalesce (CommonTrackerStoreTest):
+
     """
     Insert and instance with some values, and tracker coalesce of some of them
     with different combinations (first NULL, none NULL, all NULL...)
     """
 
-    def setUp (self):
+    def setUp(self):
         self.resource_uri = "contact://test_group_concat"
 
         #
@@ -48,17 +50,15 @@ class TestCoalesce (CommonTrackerStoreTest):
                       nco:nameFamily \"family name\" .
          }
         """ % (self.resource_uri)
-        self.tracker.update (insert)
+        self.tracker.update(insert)
 
-    def tearDown (self):
+    def tearDown(self):
         delete = """
         DELETE { <%s> a rdfs:Resource. }
         """ % (self.resource_uri)
-        self.tracker.update (delete)
-
+        self.tracker.update(delete)
 
-        
-    def test_coalesce_first_fine (self):
+    def test_coalesce_first_fine(self):
         """
         setUp: Insert a contact with only some text properties set
         1. TEST: run a query with coalesce with the valid value in first position
@@ -74,13 +74,12 @@ class TestCoalesce (CommonTrackerStoreTest):
            OPTIONAL { ?c nco:note ?note }
            FILTER (?c != nco:default-contact-me && ?c != nco:default-contact-emergency)
         }
-        """ 
-        results = self.tracker.query (query)
-        assert len (results) == 1
+        """
+        results = self.tracker.query(query)
+        assert len(results) == 1
         assert results[0][0] == "full name"
 
-
-    def test_coalesce_second_fine (self):
+    def test_coalesce_second_fine(self):
         """
         setUp: Insert a contact with only some text properties set
         1. TEST: run a query with coalesce. First property NULL, second fine
@@ -96,13 +95,12 @@ class TestCoalesce (CommonTrackerStoreTest):
            OPTIONAL { ?c nco:note ?note }
            FILTER (?c != nco:default-contact-me && ?c != nco:default-contact-emergency)
         }
-        """ 
-        results = self.tracker.query (query)
-        assert len (results) == 1
+        """
+        results = self.tracker.query(query)
+        assert len(results) == 1
         assert results[0][0] == "family name"
 
-
-    def test_coalesce_none_fine_default (self):
+    def test_coalesce_none_fine_default(self):
         """
         setUp: Insert a contact with only some text properties set
         1. TEST: run a query with coalesce. all variables NULL, return default value
@@ -118,11 +116,11 @@ class TestCoalesce (CommonTrackerStoreTest):
            OPTIONAL { ?c nco:note ?note }
            FILTER (?c != nco:default-contact-me && ?c != nco:default-contact-emergency)
         }
-        """ 
-        results = self.tracker.query (query)
-        assert len (results) == 1
+        """
+        results = self.tracker.query(query)
+        assert len(results) == 1
         assert results[0][0] == "test_coalesce"
-        
+
 
 if __name__ == '__main__':
     ut.main()
diff --git a/tests/functional-tests/06-distance.py b/tests/functional-tests/06-distance.py
index 1966de6..e1b30eb 100755
--- a/tests/functional-tests/06-distance.py
+++ b/tests/functional-tests/06-distance.py
@@ -31,13 +31,16 @@ from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreT
 
 POINT_COORDS = [
     (0, 0), (1, 1), (2, 2), (3, 3), (4, 4)
-    ]
+]
+
 
 class TestDistanceFunctions (CommonTrackerStoreTest):
+
     """
     Insert some points and get the distance between them.
     """
-    def setUp (self):
+
+    def setUp(self):
         self.counter = 0
         for lat, log in POINT_COORDS:
             insert = """
@@ -47,25 +50,24 @@ class TestDistanceFunctions (CommonTrackerStoreTest):
                 mlo:latitude %d .                
             }
             """ % ("point://test/point/" + str(self.counter), log, lat)
-            self.tracker.update (insert)
+            self.tracker.update(insert)
             self.counter += 1
 
-    def tearDown (self):
-        for i in range (0, self.counter):
+    def tearDown(self):
+        for i in range(0, self.counter):
             delete = """
             DELETE {
             <%s> a rdfs:Resource.
             }
             """ % ("point://test/point/" + str (i))
-            self.tracker.update (delete)
+            self.tracker.update(delete)
 
+    def get_distance_between_points(self, sum_func, id1, id2):
 
-    def get_distance_between_points (self, sum_func, id1, id2):
-
-        assert 0 <= id1 <= len (POINT_COORDS)
-        assert 0 <= id2 <= len (POINT_COORDS)
+        assert 0 <= id1 <= len(POINT_COORDS)
+        assert 0 <= id2 <= len(POINT_COORDS)
         assert sum_func == "cartesian" or sum_func == "haversine"
-       
+
         query_1_to_2 = """
         SELECT xsd:integer(tracker:%s-distance(?lat1,?lat2,?lon1,?lon2))
         WHERE {
@@ -78,58 +80,56 @@ class TestDistanceFunctions (CommonTrackerStoreTest):
              mlo:longitude ?lon2 .
         }
         """ % (sum_func, id1, id2)
-        result = self.tracker.query (query_1_to_2)
-        return int (result[0][0])
-        
-        
-    def test_distance_cartesian_symmetry (self):
+        result = self.tracker.query(query_1_to_2)
+        return int(result[0][0])
+
+    def test_distance_cartesian_symmetry(self):
         """
         setUp: Insert 5 points in the pre-defined coordinates
         1. TEST: Check cartesian distance from point A to B, and from B to A
                  (should be the same :P)
         tearDown: Remove the test points inserted before
         """
-        a_to_b = self.get_distance_between_points ("cartesian", 1, 2)
+        a_to_b = self.get_distance_between_points("cartesian", 1, 2)
         assert a_to_b == 204601
 
-        b_to_a = self.get_distance_between_points ("cartesian", 2, 1)
+        b_to_a = self.get_distance_between_points("cartesian", 2, 1)
         assert b_to_a == 204601
 
-        assert a_to_b == b_to_a 
+        assert a_to_b == b_to_a
 
-    def test_distance_haversine_symmetry (self):
+    def test_distance_haversine_symmetry(self):
         """
         setUp: Insert 5 points in the pre-defined coordinates
         1. TEST: Check cartesian distance from point A to B, and from B to A
                  (should be the same :P)
         tearDown: Remove the test points inserted before
         """
-        a_to_b = self.get_distance_between_points ("haversine", 1, 2)
+        a_to_b = self.get_distance_between_points("haversine", 1, 2)
         assert a_to_b == 157225
-        
-        b_to_a = self.get_distance_between_points ("haversine", 2, 1)
+
+        b_to_a = self.get_distance_between_points("haversine", 2, 1)
         assert b_to_a == 157225
 
         assert a_to_b == b_to_a
 
-
-    def test_distance_cartesian_proportion (self):
-        d_1_to_2 = self.get_distance_between_points ("cartesian", 1, 2)
-        d_2_to_3 = self.get_distance_between_points ("cartesian", 2, 3)
-        d_3_to_4 = self.get_distance_between_points ("cartesian", 3, 4)
+    def test_distance_cartesian_proportion(self):
+        d_1_to_2 = self.get_distance_between_points("cartesian", 1, 2)
+        d_2_to_3 = self.get_distance_between_points("cartesian", 2, 3)
+        d_3_to_4 = self.get_distance_between_points("cartesian", 3, 4)
         assert d_1_to_2 > d_2_to_3 > d_3_to_4
 
-    def test_distance_haversine_proportion (self):
-        d_1_to_2 = self.get_distance_between_points ("haversine", 1, 2)
-        d_2_to_3 = self.get_distance_between_points ("haversine", 2, 3)
-        d_3_to_4 = self.get_distance_between_points ("haversine", 3, 4)
+    def test_distance_haversine_proportion(self):
+        d_1_to_2 = self.get_distance_between_points("haversine", 1, 2)
+        d_2_to_3 = self.get_distance_between_points("haversine", 2, 3)
+        d_3_to_4 = self.get_distance_between_points("haversine", 3, 4)
         assert d_1_to_2 > d_2_to_3 > d_3_to_4
 
-    def test_distance_different (self):
-        d_2_to_3h = self.get_distance_between_points ("haversine", 2, 3)
-        d_2_to_3c = self.get_distance_between_points ("cartesian", 2, 3)
+    def test_distance_different(self):
+        d_2_to_3h = self.get_distance_between_points("haversine", 2, 3)
+        d_2_to_3c = self.get_distance_between_points("cartesian", 2, 3)
         assert d_2_to_3h < d_2_to_3c
-        
+
 
 if __name__ == '__main__':
     ut.main()
diff --git a/tests/functional-tests/07-graph.py b/tests/functional-tests/07-graph.py
index 206d100..cb9d1d7 100755
--- a/tests/functional-tests/07-graph.py
+++ b/tests/functional-tests/07-graph.py
@@ -29,13 +29,15 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TestGraphs (CommonTrackerStoreTest):
+
     """
     Insert triplets in different graphs and check the query results asking in
     one specific graph, in all of them and so on.
     """
 
-    def test_graph_filter (self):
+    def test_graph_filter(self):
         """
         1. Insert a contact with different phone numbers from different sources
         2. Query phone numbers of a single graph
@@ -62,7 +64,7 @@ class TestGraphs (CommonTrackerStoreTest):
             }
         }
         """
-        self.tracker.update (insert_sparql)
+        self.tracker.update(insert_sparql)
 
         query = """
         SELECT ?contact ?number WHERE {
@@ -72,11 +74,11 @@ class TestGraphs (CommonTrackerStoreTest):
             }
         } ORDER BY DESC (fts:rank(?contact))
         """
-        results = self.tracker.query (query)
+        results = self.tracker.query(query)
 
-        self.assertEquals (len(results), 1)
-        self.assertEquals (results[0][0], "contact://test/graph/1")
-        self.assertEquals (results[0][1], "tel:+1234567891")
+        self.assertEquals(len(results), 1)
+        self.assertEquals(results[0][0], "contact://test/graph/1")
+        self.assertEquals(results[0][1], "tel:+1234567891")
 
         delete_sparql = """
         DELETE {
@@ -87,7 +89,7 @@ class TestGraphs (CommonTrackerStoreTest):
         }
         """
 
-    def test_graph_insert_multiple (self):
+    def test_graph_insert_multiple(self):
         """
         1. Insert a contact with the same phone number from different sources
         2. Query graph uri of hasPhoneNumber statement
@@ -110,7 +112,7 @@ class TestGraphs (CommonTrackerStoreTest):
             }
         }
         """
-        self.tracker.update (insert_sparql)
+        self.tracker.update(insert_sparql)
 
         query = """
         SELECT ?contact ?g WHERE {
@@ -120,10 +122,10 @@ class TestGraphs (CommonTrackerStoreTest):
             }
         }
         """
-        results = self.tracker.query (query)
-        self.assertEquals (len(results), 1)
-        self.assertEquals (results[0][0], "contact://test/graph/1")
-        self.assertEquals (results[0][1], "graph://test/graph/0")
+        results = self.tracker.query(query)
+        self.assertEquals(len(results), 1)
+        self.assertEquals(results[0][0], "contact://test/graph/1")
+        self.assertEquals(results[0][1], "graph://test/graph/0")
 
         delete_sparql = """
         DELETE {
@@ -133,6 +135,5 @@ class TestGraphs (CommonTrackerStoreTest):
         """
 
 
-
 if __name__ == '__main__':
     ut.main()
diff --git a/tests/functional-tests/08-unique-insertions.py b/tests/functional-tests/08-unique-insertions.py
index ee67d86..ae235f9 100755
--- a/tests/functional-tests/08-unique-insertions.py
+++ b/tests/functional-tests/08-unique-insertions.py
@@ -29,13 +29,15 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TestMinerInsertBehaviour (CommonTrackerStoreTest):
+
     """
     Mimic the behaviour of the miner, removing the previous information of the resource
     and inserting a new one.
     """
 
-    def test_miner_unique_insertion (self):
+    def test_miner_unique_insertion(self):
         """
         We actually can't test tracker-miner-fs, so we mimick its behavior in this test
         1. Insert one resource
@@ -63,22 +65,22 @@ class TestMinerInsertBehaviour (CommonTrackerStoreTest):
         """
 
         ''' First insertion '''
-        self.tracker.update (insert_sparql)
+        self.tracker.update(insert_sparql)
 
-        results = self.tracker.query (select_sparql)
-        self.assertEquals (len(results), 1)
+        results = self.tracker.query(select_sparql)
+        self.assertEquals(len(results), 1)
 
         ''' Second insertion / update '''
-        self.tracker.update (insert_sparql)
+        self.tracker.update(insert_sparql)
 
-        results = self.tracker.query (select_sparql)
-        self.assertEquals (len(results), 1)
+        results = self.tracker.query(select_sparql)
+        self.assertEquals(len(results), 1)
 
         ''' Clean up '''
-        self.tracker.update (delete_sparql)
+        self.tracker.update(delete_sparql)
 
-        results = self.tracker.query (select_sparql)
-        self.assertEquals (len(results), 0)
+        results = self.tracker.query(select_sparql)
+        self.assertEquals(len(results), 0)
 
 
 if __name__ == '__main__':
diff --git a/tests/functional-tests/09-concurrent-query.py b/tests/functional-tests/09-concurrent-query.py
index 652d428..f30d341 100755
--- a/tests/functional-tests/09-concurrent-query.py
+++ b/tests/functional-tests/09-concurrent-query.py
@@ -20,7 +20,9 @@
 """
 Send concurrent inserts and queries to the daemon to check the concurrency.
 """
-import sys,os,dbus
+import sys
+import os
+import dbus
 import unittest
 import time
 import random
@@ -37,70 +39,73 @@ from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreT
 AMOUNT_OF_TEST_INSTANCES = 100
 AMOUNT_OF_QUERIES = 10
 
+
 class TestConcurrentQuery (CommonTrackerStoreTest):
+
     """
     Send a bunch of queries to the daemon asynchronously, to test the queue
     holding those queries
     """
-    def setUp (self):
-        self.main_loop = GObject.MainLoop ()
-        
-        self.mock_data_insert ()
+
+    def setUp(self):
+        self.main_loop = GObject.MainLoop()
+
+        self.mock_data_insert()
         self.finish_counter = 0
-        
-    def mock_data_insert (self):
+
+    def mock_data_insert(self):
         query = "INSERT {\n"
-        for i in range (0, AMOUNT_OF_TEST_INSTANCES):
-            query += "<test-09:instance-%d> a nco:PersonContact ; nco:fullname 'moe %d'.\n" % (i, i)
+        for i in range(0, AMOUNT_OF_TEST_INSTANCES):
+            query += "<test-09:instance-%d> a nco:PersonContact ; nco:fullname 'moe %d'.\n" % (
+                i, i)
         query += "}"
-        self.tracker.update (query)
-        
-    def mock_data_delete (self):
+        self.tracker.update(query)
+
+    def mock_data_delete(self):
         query = "DELETE {\n"
-        for i in range (0, AMOUNT_OF_TEST_INSTANCES):
+        for i in range(0, AMOUNT_OF_TEST_INSTANCES):
             query += "<test-09:instance-%d> a rdfs:Resource.\n" % (i)
         query += "}"
-        self.tracker.update (query)
+        self.tracker.update(query)
 
         query = "DELETE {\n"
-        for i in range (0, AMOUNT_OF_QUERIES):
+        for i in range(0, AMOUNT_OF_QUERIES):
             query += "<test-09:picture-%d> a rdfs:Resource.\n" % (i)
         query += "}"
-        self.tracker.update (query)
-
+        self.tracker.update(query)
 
-    def test_async_queries (self):
+    def test_async_queries(self):
         QUERY = "SELECT ?u WHERE { ?u a nco:PersonContact. FILTER regex (?u, 'test-09:ins')}"
         UPDATE = "INSERT { <test-09:picture-%d> a nmm:Photo. }"
-        for i in range (0, AMOUNT_OF_QUERIES):
-            self.tracker.get_tracker_iface ().SparqlQuery (QUERY,
-                                                           reply_handler=self.reply_cb,
-                                                           error_handler=self.error_handler)
-            self.tracker.get_tracker_iface ().SparqlUpdate (UPDATE % (i),
-                                                            reply_handler=self.update_cb,
-                                                            error_handler=self.error_handler)
-            
+        for i in range(0, AMOUNT_OF_QUERIES):
+            self.tracker.get_tracker_iface().SparqlQuery(QUERY,
+                                                         reply_handler=self.reply_cb,
+                                                         error_handler=self.error_handler)
+            self.tracker.get_tracker_iface().SparqlUpdate(UPDATE % (i),
+                                                          reply_handler=self.update_cb,
+                                                          error_handler=self.error_handler)
+
         # Safeguard of 50 seconds. The last reply should quit the loop
-        GObject.timeout_add_seconds (60, self.timeout_cb)
-        self.main_loop.run ()
-        
-    def reply_cb (self, results):
+        GObject.timeout_add_seconds(60, self.timeout_cb)
+        self.main_loop.run()
+
+    def reply_cb(self, results):
         self.finish_counter += 1
-        self.assertEquals (len (results), AMOUNT_OF_TEST_INSTANCES)
+        self.assertEquals(len(results), AMOUNT_OF_TEST_INSTANCES)
         if (self.finish_counter >= AMOUNT_OF_QUERIES):
-            self.timeout_cb ()
+            self.timeout_cb()
 
-    def update_cb (self):
-        self.assertTrue (True)
+    def update_cb(self):
+        self.assertTrue(True)
 
-    def error_handler (self):
+    def error_handler(self):
         print "ERROR in DBus call"
-        self.assertTrue (False)
+        self.assertTrue(False)
 
-    def timeout_cb (self):
-        self.mock_data_delete ()
-        self.main_loop.quit ()
+    def timeout_cb(self):
+        self.mock_data_delete()
+        self.main_loop.quit()
         return False
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/10-sqlite-misused.py b/tests/functional-tests/10-sqlite-misused.py
index f1b8ca7..37bbe5d 100755
--- a/tests/functional-tests/10-sqlite-misused.py
+++ b/tests/functional-tests/10-sqlite-misused.py
@@ -21,7 +21,8 @@
 Test the query while importing at the same time. This was raising
 some SQLITE_MISUSED errors before.
 """
-import os, dbus
+import os
+import dbus
 from gi.repository import GObject
 from dbus.mainloop.glib import DBusGMainLoop
 
@@ -30,58 +31,61 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TestSqliteMisused (CommonTrackerStoreTest):
+
     """
     Send queries while importing files (in .ttl directory)
     """
-    def setUp (self):
-        self.main_loop = GObject.MainLoop ()
+
+    def setUp(self):
+        self.main_loop = GObject.MainLoop()
         self.files_counter = 0
-        
-    def test_queries_while_import (self):
-        self.assertTrue (os.path.exists ('ttl'))
+
+    def test_queries_while_import(self):
+        self.assertTrue(os.path.exists('ttl'))
         for root, dirs, files in os.walk('ttl'):
-            for ttl_file in filter (lambda f: f.endswith (".ttl"), files):
-                full_path = os.path.abspath(os.path.join (root, ttl_file))
+            for ttl_file in filter(lambda f: f.endswith(".ttl"), files):
+                full_path = os.path.abspath(os.path.join(root, ttl_file))
                 self.files_counter += 1
-                self.tracker.get_tracker_iface ().Load ("file://" + full_path,
-                                     timeout=30000,
-                                     reply_handler=self.loaded_success_cb,
-                                     error_handler=self.loaded_failed_cb)
-        
-        GObject.timeout_add_seconds (2, self.run_a_query)
+                self.tracker.get_tracker_iface().Load("file://" + full_path,
+                                                      timeout=30000,
+                                                      reply_handler=self.loaded_success_cb,
+                                                      error_handler=self.loaded_failed_cb)
+
+        GObject.timeout_add_seconds(2, self.run_a_query)
         # Safeguard of 60 seconds. The last reply should quit the loop
-        GObject.timeout_add_seconds (60, self.timeout_cb)
-        self.main_loop.run ()
+        GObject.timeout_add_seconds(60, self.timeout_cb)
+        self.main_loop.run()
 
-    def run_a_query (self):
+    def run_a_query(self):
         QUERY = "SELECT ?u ?title WHERE { ?u a nie:InformationElement; nie:title ?title. }"
-        self.tracker.get_tracker_iface ().SparqlQuery (QUERY, timeout=20000,
-                                                       reply_handler=self.reply_cb,
-                                                       error_handler=self.error_handler)
+        self.tracker.get_tracker_iface().SparqlQuery(QUERY, timeout=20000,
+                                                     reply_handler=self.reply_cb,
+                                                     error_handler=self.error_handler)
         return True
-        
-    def reply_cb (self, results):
+
+    def reply_cb(self, results):
         print "Query replied correctly"
 
-    def error_handler (self, error_msg):
+    def error_handler(self, error_msg):
         print "ERROR in DBus call", error_msg
 
-    def loaded_success_cb (self):
+    def loaded_success_cb(self):
         self.files_counter -= 1
         if (self.files_counter == 0):
             print "Last file loaded"
-            self.timeout_cb ()
+            self.timeout_cb()
         print "Success loading a file"
 
-    def loaded_failed_cb (self, error):
+    def loaded_failed_cb(self, error):
         print "Failed loading a file"
-        self.assertTrue (False)
+        self.assertTrue(False)
 
-    def timeout_cb (self):
+    def timeout_cb(self):
         print "Forced timeout after 60 sec."
-        self.main_loop.quit ()
+        self.main_loop.quit()
         return False
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/11-sqlite-batch-misused.py 
b/tests/functional-tests/11-sqlite-batch-misused.py
index 230fe99..5a26b77 100755
--- a/tests/functional-tests/11-sqlite-batch-misused.py
+++ b/tests/functional-tests/11-sqlite-batch-misused.py
@@ -21,7 +21,8 @@
 Test the query while running BatchSparqlUpdate at the same time. This was raising
 some SQLITE_MISUSED errors before.
 """
-import os, dbus
+import os
+import dbus
 from gi.repository import GObject
 from dbus.mainloop.glib import DBusGMainLoop
 
@@ -33,77 +34,80 @@ from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreT
 # Number of instances per batch
 BATCH_SIZE = 3000
 
+
 class TestSqliteBatchMisused (CommonTrackerStoreTest):
+
     """
     Send big batchSparqlUpdates and run queries at the same time
     Don't run this script directly, use the bash script "force-sqlite-misused.sh" instead
     to configure properly the environment
     """
-    def setUp (self):
-        self.main_loop = GObject.MainLoop ()
+
+    def setUp(self):
+        self.main_loop = GObject.MainLoop()
         self.batch_counter = 0
-        
-    def test_queries_while_batch_insert (self):
-        self.assertTrue (os.path.exists ('ttl'))
-        
+
+    def test_queries_while_batch_insert(self):
+        self.assertTrue(os.path.exists('ttl'))
+
         for root, dirs, files in os.walk('ttl'):
-            for ttl_file in filter (lambda f: f.endswith (".ttl"), files):
-                full_path = os.path.abspath(os.path.join (root, ttl_file))
+            for ttl_file in filter(lambda f: f.endswith(".ttl"), files):
+                full_path = os.path.abspath(os.path.join(root, ttl_file))
                 print full_path
 
                 counter = 0
                 current_batch = ""
                 for line in open(full_path):
-                    if (line.startswith ("@prefix")):
+                    if (line.startswith("@prefix")):
                         continue
                     current_batch += line
-                    if len(line) > 1 and line[:-1].endswith ('.'):
+                    if len(line) > 1 and line[:-1].endswith('.'):
                         counter += 1
-                
+
                     if counter == BATCH_SIZE:
                         query = "INSERT {" + current_batch + "}"
-                        self.tracker.get_tracker_iface ().BatchSparqlUpdate (query,
-                                                          timeout=20000,
-                                                          reply_handler=self.batch_success_cb,
-                                                          error_handler=self.batch_failed_cb)
-                        self.run_a_query ()
+                        self.tracker.get_tracker_iface(
+                        ).BatchSparqlUpdate(query,
+                                            timeout=20000,
+                                            reply_handler=self.batch_success_cb,
+                                            error_handler=self.batch_failed_cb)
+                        self.run_a_query()
                         counter = 0
                         current_batch = ""
                         self.batch_counter += 1
-                        
-        
-        GObject.timeout_add_seconds (2, self.run_a_query)
+
+        GObject.timeout_add_seconds(2, self.run_a_query)
         # Safeguard of 60 seconds. The last reply should quit the loop
-        GObject.timeout_add_seconds (60, self.timeout_cb)
-        self.main_loop.run ()
+        GObject.timeout_add_seconds(60, self.timeout_cb)
+        self.main_loop.run()
 
-    def run_a_query (self):
+    def run_a_query(self):
         QUERY = "SELECT ?u ?title WHERE { ?u a nie:InformationElement; nie:title ?title. }"
-        self.tracker.get_tracker_iface ().SparqlQuery (QUERY, timeout=20000,
-                                                       reply_handler=self.reply_cb,
-                                                       error_handler=self.error_handler)
+        self.tracker.get_tracker_iface().SparqlQuery(QUERY, timeout=20000,
+                                                     reply_handler=self.reply_cb,
+                                                     error_handler=self.error_handler)
         return True
-        
-    def reply_cb (self, results):
+
+    def reply_cb(self, results):
         print "Query replied correctly"
 
-    def error_handler (self, error_msg):
+    def error_handler(self, error_msg):
         print "Query failed", error_msg
 
-    def batch_success_cb (self):
+    def batch_success_cb(self):
         self.batch_counter -= 1
         if (self.batch_counter == 0):
             print "Last batch was success"
-            self.timeout_cb ()
+            self.timeout_cb()
         print "Success processing a batch"
 
-    def batch_failed_cb (self, error):
+    def batch_failed_cb(self, error):
         print "Failed processing a batch"
 
-    def timeout_cb (self):
+    def timeout_cb(self):
         print "Forced timeout after 60 sec."
-        self.main_loop.quit ()
+        self.main_loop.quit()
         return False
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/12-transactions.py b/tests/functional-tests/12-transactions.py
index a3c19f4..ab62c2d 100755
--- a/tests/functional-tests/12-transactions.py
+++ b/tests/functional-tests/12-transactions.py
@@ -30,7 +30,9 @@ from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreT
 
 TEST_INSTANCE_PATTERN = "test://12-transactions-%d"
 
+
 class TrackerTransactionsTest (CommonTrackerStoreTest):
+
     """
     In a loop:
        1. Inserts a Batch of instances
@@ -41,49 +43,49 @@ class TrackerTransactionsTest (CommonTrackerStoreTest):
     If the commit was real, all the inserted instances should be there.
     """
 
-    def setUp (self):
+    def setUp(self):
         self.instance_counter = 0
 
-    def tearDown (self):
+    def tearDown(self):
         print "Tear down (will take some time to remove all resources)"
         delete_sparql = "DELETE { ?u a rdfs:Resource } WHERE { ?u a nmo:Email} \n"
-        self.tracker.update (delete_sparql,
-                             timeout=60000)
+        self.tracker.update(delete_sparql,
+                            timeout=60000)
         self.instance_counter = 0
 
-    def insert_and_commit (self, number):
+    def insert_and_commit(self, number):
         insert_sparql = "INSERT {\n"
-        for i in range (0, number):
-            insert_sparql += "  <" + TEST_INSTANCE_PATTERN % (self.instance_counter) + ">"
+        for i in range(0, number):
+            insert_sparql += "  <" + \
+                TEST_INSTANCE_PATTERN % (self.instance_counter) + ">"
             insert_sparql += " a nmo:Email.\n "
             self.instance_counter += 1
 
         insert_sparql += "}"
-        self.tracker.batch_update (insert_sparql)
-        #print "Waiting for commit (", number," instances)"
+        self.tracker.batch_update(insert_sparql)
+        # print "Waiting for commit (", number," instances)"
         #start = time.time ()
-        self.tracker.batch_commit ()
+        self.tracker.batch_commit()
         #end = time.time ()
-        #print "BatchCommit returned (after %d s.)" % (end - start)
-
+        # print "BatchCommit returned (after %d s.)" % (end - start)
 
-    def test_commit_and_abort (self):
+    def test_commit_and_abort(self):
 
-        for i in range (0, 20):
+        for i in range(0, 20):
             NUMBER_OF_INSTANCES = 1000
-            self.insert_and_commit (NUMBER_OF_INSTANCES)
+            self.insert_and_commit(NUMBER_OF_INSTANCES)
 
-            self.system.tracker_store_stop_brutally ()
-            self.system.tracker_store_start ()
+            self.system.tracker_store_stop_brutally()
+            self.system.tracker_store_start()
             try:
-                results = self.tracker.count_instances ("nmo:Email")
+                results = self.tracker.count_instances("nmo:Email")
             except:
                 print "Timeout, probably replaying journal or something (wait 20 sec.)"
-                time.sleep (20)
-                results = self.tracker.count_instances ()
+                time.sleep(20)
+                results = self.tracker.count_instances()
 
             # Every iteration we are adding new instances in the store!
-            self.assertEquals (results, NUMBER_OF_INSTANCES * (i+1))
+            self.assertEquals(results, NUMBER_OF_INSTANCES * (i + 1))
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/13-threaded-store.py b/tests/functional-tests/13-threaded-store.py
index bda0adc..69c9704 100755
--- a/tests/functional-tests/13-threaded-store.py
+++ b/tests/functional-tests/13-threaded-store.py
@@ -21,7 +21,8 @@
 Test that the threads in the daemon are working:
  A very long query shouldn't block smaller queries.
 """
-import os, dbus
+import os
+import dbus
 from gi.repository import GObject
 from gi.repository import GLib
 import time
@@ -32,27 +33,32 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
-MAX_TEST_TIME = 60 # seconds to finish the tests (to avoid infinite waitings)
+MAX_TEST_TIME = 60  # seconds to finish the tests (to avoid infinite waitings)
+
+AMOUNT_SIMPLE_QUERIES = 10
+# ms (How long do we wait for an answer to the complex query)
+COMPLEX_QUERY_TIMEOUT = 15000
+# seconds (How freq do we send a simple query to the daemon)
+SIMPLE_QUERY_FREQ = 2
 
-AMOUNT_SIMPLE_QUERIES = 10 
-COMPLEX_QUERY_TIMEOUT = 15000 # ms (How long do we wait for an answer to the complex query)
-SIMPLE_QUERY_FREQ = 2 # seconds (How freq do we send a simple query to the daemon)
 
 class TestThreadedStore (CommonTrackerStoreTest):
+
     """
     When the database is big, running a complex query takes ages.
     After cancelling the query, any following query is queued
 
     Reported in bug NB#183499
     """
-    def setUp (self):
-        self.main_loop = GObject.MainLoop ()
+
+    def setUp(self):
+        self.main_loop = GObject.MainLoop()
         self.simple_queries_counter = AMOUNT_SIMPLE_QUERIES
         self.simple_queries_answers = 0
 
-    def __populate_database (self):
+    def __populate_database(self):
 
-        self.assertTrue (os.path.exists ('ttl'))
+        self.assertTrue(os.path.exists('ttl'))
         for ttl_file in ["010-nco_EmailAddress.ttl",
                          "011-nco_PostalAddress.ttl",
                          "012-nco_PhoneNumber.ttl",
@@ -61,16 +67,16 @@ class TestThreadedStore (CommonTrackerStoreTest):
                          "018-nco_PersonContact.ttl",
                          "012-nco_PhoneNumber.ttl",
                          "016-nco_ContactIM.ttl"]:
-            full_path = os.path.abspath(os.path.join ("ttl", ttl_file))
+            full_path = os.path.abspath(os.path.join("ttl", ttl_file))
             print full_path
-            self.tracker.get_tracker_iface ().Load ("file://" + full_path,
-                                                        timeout=30000)
+            self.tracker.get_tracker_iface().Load("file://" + full_path,
+                                                  timeout=30000)
 
-    def test_complex_query (self):
-        start = time.time ()
-        self.__populate_database ()
-        end = time.time ()
-        print "Loading: %.3f sec." % (end-start)
+    def test_complex_query(self):
+        start = time.time()
+        self.__populate_database()
+        end = time.time()
+        print "Loading: %.3f sec." % (end - start)
 
         COMPLEX_QUERY = """
         SELECT ?url nie:url(?photo) nco:imContactStatusMessage (?url)
@@ -96,50 +102,52 @@ class TestThreadedStore (CommonTrackerStoreTest):
 
         # Standard timeout
         print "Send complex query"
-        self.complex_start = time.time ()
-        self.tracker.get_tracker_iface ().SparqlQuery (COMPLEX_QUERY, timeout=COMPLEX_QUERY_TIMEOUT,
-                                                       reply_handler=self.reply_complex,
-                                                       error_handler=self.error_handler_complex)
-
-        self.timeout_id = GLib.timeout_add_seconds (MAX_TEST_TIME, self.__timeout_on_idle)
-        GLib.timeout_add_seconds (SIMPLE_QUERY_FREQ, self.__simple_query)
-        self.main_loop.run ()
-
-    def __simple_query (self):
+        self.complex_start = time.time()
+        self.tracker.get_tracker_iface(
+        ).SparqlQuery(COMPLEX_QUERY, timeout=COMPLEX_QUERY_TIMEOUT,
+                      reply_handler=self.reply_complex,
+                      error_handler=self.error_handler_complex)
+
+        self.timeout_id = GLib.timeout_add_seconds(
+            MAX_TEST_TIME, self.__timeout_on_idle)
+        GLib.timeout_add_seconds(SIMPLE_QUERY_FREQ, self.__simple_query)
+        self.main_loop.run()
+
+    def __simple_query(self):
         print "Send simple query (%d)" % (self.simple_queries_counter)
         SIMPLE_QUERY = "SELECT ?name WHERE { ?u a nco:PersonContact; nco:fullname ?name. }"
-        self.tracker.get_tracker_iface ().SparqlQuery (SIMPLE_QUERY,
-                                                       timeout=10000,
-                                                       reply_handler=self.reply_simple,
-                                                       error_handler=self.error_handler)
+        self.tracker.get_tracker_iface().SparqlQuery(SIMPLE_QUERY,
+                                                     timeout=10000,
+                                                     reply_handler=self.reply_simple,
+                                                     error_handler=self.error_handler)
         self.simple_queries_counter -= 1
         if (self.simple_queries_counter == 0):
             print "Stop sending queries (wait)"
             return False
         return True
 
-    def reply_simple (self, results):
+    def reply_simple(self, results):
         print "Simple query answered"
-        self.assertNotEquals (len (results), 0)
+        self.assertNotEquals(len(results), 0)
         self.simple_queries_answers += 1
         if (self.simple_queries_answers == AMOUNT_SIMPLE_QUERIES):
             print "All simple queries answered"
-            self.main_loop.quit ()
+            self.main_loop.quit()
 
-    def reply_complex (self, results):
-        print "Complex query: %.3f" % (time.time () - self.complex_start)
+    def reply_complex(self, results):
+        print "Complex query: %.3f" % (time.time() - self.complex_start)
 
-    def error_handler (self, error_msg):
+    def error_handler(self, error_msg):
         print "ERROR in dbus call", error_msg
 
-    def error_handler_complex (self, error_msg):
+    def error_handler_complex(self, error_msg):
         print "Complex query timedout in DBus (", error_msg, ")"
 
-    def __timeout_on_idle (self):
+    def __timeout_on_idle(self):
         print "Timeout... asumming idle"
-        self.main_loop.quit ()
+        self.main_loop.quit()
         return False
-        
+
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/14-signals.py b/tests/functional-tests/14-signals.py
index 19b01fa..d72e747 100755
--- a/tests/functional-tests/14-signals.py
+++ b/tests/functional-tests/14-signals.py
@@ -40,58 +40,62 @@ SIGNALS_IFACE = "org.freedesktop.Tracker1.Resources"
 
 CONTACT_CLASS_URI = "http://www.semanticdesktop.org/ontologies/2007/03/22/nco#PersonContact";
 
-REASONABLE_TIMEOUT = 10 # Time waiting for the signal to be emitted
+REASONABLE_TIMEOUT = 10  # Time waiting for the signal to be emitted
+
 
 class TrackerStoreSignalsTests (CommonTrackerStoreTest):
+
     """
     Insert/update/remove instances from nco:PersonContact
     and check that the signals are emitted
     """
-    def setUp (self):
+
+    def setUp(self):
         self.clean_up_list = []
         self.loop = GObject.MainLoop()
         dbus_loop = DBusGMainLoop(set_as_default=True)
-        self.bus = dbus.SessionBus (dbus_loop)
+        self.bus = dbus.SessionBus(dbus_loop)
         self.timeout_id = 0
 
         self.results_classname = None
         self.results_deletes = None
         self.results_inserts = None
 
-    def tearDown (self):
+    def tearDown(self):
         for uri in self.clean_up_list:
-            self.tracker.update ("DELETE { <%s> a rdfs:Resource }" % uri)
+            self.tracker.update("DELETE { <%s> a rdfs:Resource }" % uri)
 
         self.clean_up_list = []
 
-        
-    def __connect_signal (self):
+    def __connect_signal(self):
         """
         After connecting to the signal, call self.__wait_for_signal.
         """
-        self.cb_id = self.bus.add_signal_receiver (self.__signal_received_cb,
-                                                   signal_name=GRAPH_UPDATED_SIGNAL,
-                                                   path = SIGNALS_PATH,
-                                                   dbus_interface = SIGNALS_IFACE,
-                                                   arg0 = CONTACT_CLASS_URI)
+        self.cb_id = self.bus.add_signal_receiver(self.__signal_received_cb,
+                                                  signal_name=GRAPH_UPDATED_SIGNAL,
+                                                  path=SIGNALS_PATH,
+                                                  dbus_interface=SIGNALS_IFACE,
+                                                  arg0=CONTACT_CLASS_URI)
 
-    def __wait_for_signal (self):
+    def __wait_for_signal(self):
         """
         In the callback of the signals, there should be a self.loop.quit ()
         """
-        self.timeout_id = GLib.timeout_add_seconds (REASONABLE_TIMEOUT, self.__timeout_on_idle)
-        self.loop.run ()
+        self.timeout_id = GLib.timeout_add_seconds(
+            REASONABLE_TIMEOUT, self.__timeout_on_idle)
+        self.loop.run()
 
-    def __timeout_on_idle (self):
-        self.loop.quit ()
-        self.fail ("Timeout, the signal never came!")
+    def __timeout_on_idle(self):
+        self.loop.quit()
+        self.fail("Timeout, the signal never came!")
 
-    def __pretty_print_array (self, array):
+    def __pretty_print_array(self, array):
         for g, s, o, p in array:
-            uri, prop, value = self.tracker.query ("SELECT tracker:uri (%s), tracker:uri (%s), tracker:uri 
(%s) WHERE {}" % (s, o, p))
-            print " - (", "-".join ([g, uri, prop, value]), ")"
-                                    
-    def __signal_received_cb (self, classname, deletes, inserts):
+            uri, prop, value = self.tracker.query(
+                "SELECT tracker:uri (%s), tracker:uri (%s), tracker:uri (%s) WHERE {}" % (s, o, p))
+            print " - (", "-".join([g, uri, prop, value]), ")"
+
+    def __signal_received_cb(self, classname, deletes, inserts):
         """
         Save the content of the signal and disconnect the callback
         """
@@ -100,14 +104,13 @@ class TrackerStoreSignalsTests (CommonTrackerStoreTest):
         self.results_inserts = inserts
 
         if (self.timeout_id != 0):
-            GLib.source_remove (self.timeout_id )
+            GLib.source_remove(self.timeout_id)
             self.timeout_id = 0
-        self.loop.quit ()
-        self.bus._clean_up_signal_match (self.cb_id)
-
+        self.loop.quit()
+        self.bus._clean_up_signal_match(self.cb_id)
 
-    def test_01_insert_contact (self):
-        self.clean_up_list.append ("test://signals-contact-add")
+    def test_01_insert_contact(self):
+        self.clean_up_list.append("test://signals-contact-add")
         CONTACT = """
         INSERT {
         <test://signals-contact-add> a nco:PersonContact ;
@@ -118,16 +121,16 @@ class TrackerStoreSignalsTests (CommonTrackerStoreTest):
              nco:hasPhoneNumber <tel:555555555> .
         }
         """
-        self.__connect_signal ()
-        self.tracker.update (CONTACT)
-        time.sleep (1)
-        self.__wait_for_signal ()
+        self.__connect_signal()
+        self.tracker.update(CONTACT)
+        time.sleep(1)
+        self.__wait_for_signal()
 
         # validate results
-        self.assertEquals (len (self.results_deletes), 0)
-        self.assertEquals (len (self.results_inserts), 6)
-        
-    def test_02_remove_contact (self):
+        self.assertEquals(len(self.results_deletes), 0)
+        self.assertEquals(len(self.results_inserts), 6)
+
+    def test_02_remove_contact(self):
         CONTACT = """
         INSERT {
          <test://signals-contact-remove> a nco:PersonContact ;
@@ -135,56 +138,56 @@ class TrackerStoreSignalsTests (CommonTrackerStoreTest):
              nco:nameFamily 'Contact-family removed'.
         }
         """
-        self.__connect_signal ()
-        self.tracker.update (CONTACT)
-        self.__wait_for_signal ()
-        
-        self.__connect_signal ()
+        self.__connect_signal()
+        self.tracker.update(CONTACT)
+        self.__wait_for_signal()
+
+        self.__connect_signal()
         self.tracker.update ("""
             DELETE { <test://signals-contact-remove> a rdfs:Resource }
             """)
-        self.__wait_for_signal ()
+        self.__wait_for_signal()
 
         # Validate results:
-        self.assertEquals (len (self.results_deletes), 1)
-        self.assertEquals (len (self.results_inserts), 0)
+        self.assertEquals(len(self.results_deletes), 1)
+        self.assertEquals(len(self.results_inserts), 0)
+
+    def test_03_update_contact(self):
+        self.clean_up_list.append("test://signals-contact-update")
 
+        self.__connect_signal()
+        self.tracker.update(
+            "INSERT { <test://signals-contact-update> a nco:PersonContact }")
+        self.__wait_for_signal()
 
-    def test_03_update_contact (self):
-        self.clean_up_list.append ("test://signals-contact-update")
+        self.__connect_signal()
+        self.tracker.update(
+            "INSERT { <test://signals-contact-update> nco:fullname 'wohoo'}")
+        self.__wait_for_signal()
 
-        self.__connect_signal ()
-        self.tracker.update ("INSERT { <test://signals-contact-update> a nco:PersonContact }")
-        self.__wait_for_signal ()
-        
-        self.__connect_signal ()
-        self.tracker.update ("INSERT { <test://signals-contact-update> nco:fullname 'wohoo'}")
-        self.__wait_for_signal ()
+        self.assertEquals(len(self.results_deletes), 0)
+        self.assertEquals(len(self.results_inserts), 1)
 
-        self.assertEquals (len (self.results_deletes), 0)
-        self.assertEquals (len (self.results_inserts), 1)
+    def test_04_fullupdate_contact(self):
+        self.clean_up_list.append("test://signals-contact-fullupdate")
 
+        self.__connect_signal()
+        self.tracker.update(
+            "INSERT { <test://signals-contact-fullupdate> a nco:PersonContact; nco:fullname 'first value' }")
+        self.__wait_for_signal()
 
-    def test_04_fullupdate_contact (self):
-        self.clean_up_list.append ("test://signals-contact-fullupdate")
-        
-        self.__connect_signal ()
-        self.tracker.update ("INSERT { <test://signals-contact-fullupdate> a nco:PersonContact; nco:fullname 
'first value' }")
-        self.__wait_for_signal ()
-        
-        self.__connect_signal ()
+        self.__connect_signal()
         self.tracker.update ("""
                DELETE { <test://signals-contact-fullupdate> nco:fullname ?x }
                WHERE { <test://signals-contact-fullupdate> a nco:PersonContact; nco:fullname ?x }
                
                INSERT { <test://signals-contact-fullupdate> nco:fullname 'second value'}
                """)
-        self.__wait_for_signal ()
+        self.__wait_for_signal()
+
+        self.assertEquals(len(self.results_deletes), 1)
+        self.assertEquals(len(self.results_inserts), 1)
 
-        self.assertEquals (len (self.results_deletes), 1)
-        self.assertEquals (len (self.results_inserts), 1)
-        
 
 if __name__ == "__main__":
     ut.main()
-
diff --git a/tests/functional-tests/15-statistics.py b/tests/functional-tests/15-statistics.py
index 89efc3d..0c36425 100755
--- a/tests/functional-tests/15-statistics.py
+++ b/tests/functional-tests/15-statistics.py
@@ -33,96 +33,98 @@ RDFS_RESOURCE = "rdfs:Resource"
 NIE_IE = "nie:InformationElement"
 RDFS_CLASS = "rdfs:Class"
 
+
 class TrackerStoreStatisticsTests (CommonTrackerStoreTest):
+
     """
     Check initial statistics, add, remove, update content and check results stats
     """
-    def __get_stats (self):
+
+    def __get_stats(self):
         results = {}
-        for classname, count in self.tracker.get_stats ():
-            results [str(classname)] = int(count)
+        for classname, count in self.tracker.get_stats():
+            results[str(classname)] = int(count)
         return results
 
-    def setUp (self):
+    def setUp(self):
         """
         Each test append to this list the used URIS, so they can be removed
         in the tearDown
         """
         self.clean_up_instances = []
 
-    def tearDown (self):
+    def tearDown(self):
         for uri in self.clean_up_instances:
-            self.tracker.update ("DELETE { <%s> a rdfs:Resource. }" % (uri))
+            self.tracker.update("DELETE { <%s> a rdfs:Resource. }" % (uri))
         self.clean_up_instances = []
-        time.sleep (1)
-    
-    def test_stats_01_insert_base_class (self):
-        self.clean_up_instances.append ("test://stats-01")
-        
-        old_stats = self.__get_stats ()
-        self.tracker.update ("INSERT { <test://stats-01> a nie:InformationElement. }")
-        new_stats = self.__get_stats ()
+        time.sleep(1)
+
+    def test_stats_01_insert_base_class(self):
+        self.clean_up_instances.append("test://stats-01")
+
+        old_stats = self.__get_stats()
+        self.tracker.update(
+            "INSERT { <test://stats-01> a nie:InformationElement. }")
+        new_stats = self.__get_stats()
 
         increased_classes = [NIE_IE, RDFS_RESOURCE]
 
-        for k, v in new_stats.iteritems ():
+        for k, v in new_stats.iteritems():
             if k in increased_classes:
-                self.assertEquals (old_stats[k]+1, new_stats[k])
+                self.assertEquals(old_stats[k] + 1, new_stats[k])
             else:
-                self.assertEquals (old_stats [k], new_stats [k],
-                                   "Class %s should have the same instances" % k)
+                self.assertEquals(old_stats[k], new_stats[k],
+                                  "Class %s should have the same instances" % k)
 
+    def test_stats_02_insert_deep_class(self):
+        self.clean_up_instances.append("test://stats-02")
+        old_stats = self.__get_stats()
+        self.tracker.update("INSERT { <test://stats-02> a nmm:Photo. }")
+        new_stats = self.__get_stats()
 
-    def test_stats_02_insert_deep_class (self):
-        self.clean_up_instances.append ("test://stats-02")
-        old_stats = self.__get_stats ()
-        self.tracker.update ("INSERT { <test://stats-02> a nmm:Photo. }")
-        new_stats = self.__get_stats ()
-
-        increased_classes = [ NIE_IE, RDFS_RESOURCE]
+        increased_classes = [NIE_IE, RDFS_RESOURCE]
         new_classes = ["nmm:Photo", "nfo:Visual", "nfo:Image", "nfo:Media"]
 
         # There were no instances of those classes before, check they are now
         for c in new_classes:
-            self.assertIn (c, new_stats)
-        
-        for k, v in new_stats.iteritems ():
+            self.assertIn(c, new_stats)
+
+        for k, v in new_stats.iteritems():
             if k in increased_classes:
-                self.assertEquals (old_stats [k]+1, new_stats[k])
+                self.assertEquals(old_stats[k] + 1, new_stats[k])
             elif k in new_classes:
                 # This classes could exists previous or not!
-                if old_stats.has_key (k):
-                    self.assertEquals (old_stats [k]+1, new_stats [k])
+                if old_stats.has_key(k):
+                    self.assertEquals(old_stats[k] + 1, new_stats[k])
                 else:
-                    self.assertEquals (new_stats [k], 1)
+                    self.assertEquals(new_stats[k], 1)
             else:
-                self.assertEquals (old_stats [k], new_stats[k])
+                self.assertEquals(old_stats[k], new_stats[k])
 
-    def test_stats_03_delete_deep_class (self):
-        self.clean_up_instances.append ("test://stats-03")
-        self.tracker.update ("INSERT { <test://stats-03> a nmm:Photo. }")
+    def test_stats_03_delete_deep_class(self):
+        self.clean_up_instances.append("test://stats-03")
+        self.tracker.update("INSERT { <test://stats-03> a nmm:Photo. }")
 
-        old_stats = self.__get_stats ()
-        self.tracker.update ("DELETE { <test://stats-03> a rdfs:Resource. }")
-        new_stats = self.__get_stats ()
+        old_stats = self.__get_stats()
+        self.tracker.update("DELETE { <test://stats-03> a rdfs:Resource. }")
+        new_stats = self.__get_stats()
 
         decreased_classes = [NIE_IE, RDFS_RESOURCE]
         # These classes could have no instance
-        no_instances_classes = ["nmm:Photo", "nfo:Visual", "nfo:Image", "nfo:Media"]
+        no_instances_classes = [
+            "nmm:Photo", "nfo:Visual", "nfo:Image", "nfo:Media"]
 
         for c in no_instances_classes:
             if (old_stats[c] == 1):
-                self.assertNotIn (c, new_stats)
+                self.assertNotIn(c, new_stats)
             else:
-                self.assertEquals (old_stats[c]-1, new_stats[c])
+                self.assertEquals(old_stats[c] - 1, new_stats[c])
 
-        for k, v in new_stats.iteritems ():
+        for k, v in new_stats.iteritems():
             if k in decreased_classes:
-                self.assertEquals (old_stats [k]-1, new_stats[k])
+                self.assertEquals(old_stats[k] - 1, new_stats[k])
             else:
-                self.assertEquals (old_stats [k], new_stats [k])
+                self.assertEquals(old_stats[k], new_stats[k])
 
 if __name__ == "__main__":
-    ut.main ()
-
-    
+    ut.main()
diff --git a/tests/functional-tests/16-collation.py b/tests/functional-tests/16-collation.py
index 8bab53a..8c51065 100755
--- a/tests/functional-tests/16-collation.py
+++ b/tests/functional-tests/16-collation.py
@@ -31,31 +31,34 @@ import unittest2 as ut
 #import unittest as ut
 from common.utils.storetest import CommonTrackerStoreTest as CommonTrackerStoreTest
 
+
 class TrackerStoreCollationTests (CommonTrackerStoreTest):
+
     """
     Insert few instances with a text field containing collation-problematic words.
     Ask for those instances order by the field and check the results.
     """
-    def setUp (self):
+
+    def setUp(self):
         """
         Each test append to this list the used URIS, so they can be removed
         in the tearDown
         """
         self.clean_up_instances = []
 
-    def tearDown (self):
+    def tearDown(self):
         for uri in self.clean_up_instances:
-            self.tracker.update ("DELETE { <%s> a rdfs:Resource. }" % (uri))
+            self.tracker.update("DELETE { <%s> a rdfs:Resource. }" % (uri))
         self.clean_up_instances = []
-        time.sleep (1)
+        time.sleep(1)
 
-    def __insert_text (self, text):
-        uri = "test://collation-01-%d" % (random.randint (1, 1000))
+    def __insert_text(self, text):
+        uri = "test://collation-01-%d" % (random.randint(1, 1000))
         # There is a remote chance to get a duplicate int
         while (uri in self.clean_up_instances):
-            uri = "test://collation-01-%d" % (random.randint (1, 1000))
-        self.clean_up_instances.append (uri)
-        
+            uri = "test://collation-01-%d" % (random.randint(1, 1000))
+        self.clean_up_instances.append(uri)
+
         self.tracker.update ("""
         INSERT {
             <%s> a nie:InformationElement ;
@@ -64,7 +67,7 @@ class TrackerStoreCollationTests (CommonTrackerStoreTest):
         }
          """ % (uri, text))
 
-    def __get_text_sorted_by_collation (self):
+    def __get_text_sorted_by_collation(self):
         return self.tracker.query ("""
          SELECT ?title WHERE {
             ?u a nie:InformationElement ;
@@ -73,57 +76,58 @@ class TrackerStoreCollationTests (CommonTrackerStoreTest):
          } ORDER BY ?title
         """)
 
-    def __collation_test (self, input_list, expected_list):
+    def __collation_test(self, input_list, expected_list):
 
         for i in input_list:
-            self.__insert_text (i)
-
-        results = [unicode(r[0]) for r in self.__get_text_sorted_by_collation ()]
-        self.assertEquals (len (results), len (expected_list))
-        
-        for r in range (0, len (results)):
-            self.assertEquals (results[r], expected_list [r],
-                               """Error:
+            self.__insert_text(i)
+
+        results = [unicode(r[0])
+                   for r in self.__get_text_sorted_by_collation()]
+        self.assertEquals(len(results), len(expected_list))
+
+        for r in range(0, len(results)):
+            self.assertEquals(results[r], expected_list[r],
+                              """Error:
                                   Expected : *** %s
                                   Result   : *** %s
                                   Using locale (%s, %s)
                                """ % (expected_list,
                                       results,
-                                      locale.getdefaultlocale ()[0],
-                                      locale.getdefaultlocale ()[1]))
+                                      locale.getdefaultlocale()[0],
+                                      locale.getdefaultlocale()[1]))
 
-    def test_collation_01 (self):
+    def test_collation_01(self):
         """
         Behaves as case-insensitive
         """
         input_dt = ["abb", "bb",  "Abc", "Ba"]
         expected = ["abb", "Abc", "Ba",  "bb"]
-        self.__collation_test (input_dt, expected)
+        self.__collation_test(input_dt, expected)
 
-    def test_collation_02 (self):
+    def test_collation_02(self):
         """
         In conflict, Capital letters go *after* small letters
         """
         input_dt = ["Bb", "bb", "aa", "Aa"]
         expected = ["aa", "Aa", "bb", "Bb"]
-        self.__collation_test (input_dt, expected)
+        self.__collation_test(input_dt, expected)
 
-    def test_collation_03 (self):
+    def test_collation_03(self):
         """
         Example from the unicode spec
         http://www.unicode.org/reports/tr10/#Main_Algorithm
         """
         input_dt = ["Cab", "cab", "dab", "cáb"]
         expected = ["cab", "Cab", u"cáb", "dab"]
-        self.__collation_test (input_dt, expected)
+        self.__collation_test(input_dt, expected)
 
-    def test_collation_04 (self):
+    def test_collation_04(self):
         """
         Spanish test in english locale
         """
         input_dt = ["ä", "ö", "a", "e", "i", "o", "u"]
         expected = ["a", u"ä", "e", "i", "o", u"ö", "u"]
-        self.__collation_test (input_dt, expected)
+        self.__collation_test(input_dt, expected)
 
 if __name__ == "__main__":
     print """
@@ -131,6 +135,4 @@ if __name__ == "__main__":
       * Check what happens in non-english encoding
       * Dynamic change of collation (not implemented yet in tracker)
     """
-    ut.main ()
-
-    
+    ut.main()
diff --git a/tests/functional-tests/17-ontology-changes.py b/tests/functional-tests/17-ontology-changes.py
index bbbf0ff..43a3058 100755
--- a/tests/functional-tests/17-ontology-changes.py
+++ b/tests/functional-tests/17-ontology-changes.py
@@ -25,7 +25,7 @@ changes and checking if the data is still there.
 import time
 
 import os
-import dbus # Just for the Exception
+import dbus  # Just for the Exception
 from common.utils import configuration as cfg
 import unittest2 as ut
 #import unittest as ut
@@ -45,7 +45,9 @@ TEST_PREFIX = "http://example.org/ns#";
 import re
 import time
 
+
 class OntologyChangeTestTemplate (ut.TestCase):
+
     """
     Template class for the ontology changes tests. The tests are subclasses
     of this, implementing these methods:
@@ -58,70 +60,68 @@ class OntologyChangeTestTemplate (ut.TestCase):
    
     Check doc in those methods for the specific details.
     """
-        
-    def get_ontology_dir (self, param):
-        local = os.path.join (os.getcwd (), "test-ontologies", param)
-        if (os.path.exists (local)):
+
+    def get_ontology_dir(self, param):
+        local = os.path.join(os.getcwd(), "test-ontologies", param)
+        if (os.path.exists(local)):
             # Use local directory if available
             return local
         else:
-            return os.path.join (cfg.DATADIR, "tracker-tests",
-                                    "test-ontologies", param)
+            return os.path.join(cfg.DATADIR, "tracker-tests",
+                                "test-ontologies", param)
 
-    def setUp (self):
-        self.system = TrackerSystemAbstraction ()
+    def setUp(self):
+        self.system = TrackerSystemAbstraction()
 
-    def tearDown (self):
-        self.system.tracker_store_testing_stop ()
+    def tearDown(self):
+        self.system.tracker_store_testing_stop()
 
-    def template_test_ontology_change (self):
+    def template_test_ontology_change(self):
 
-        self.set_ontology_dirs ()
+        self.set_ontology_dirs()
 
-        
-        basic_ontologies = self.get_ontology_dir (self.FIRST_ONTOLOGY_DIR)
-        modified_ontologies = self.get_ontology_dir (self.SECOND_ONTOLOGY_DIR)
+        basic_ontologies = self.get_ontology_dir(self.FIRST_ONTOLOGY_DIR)
+        modified_ontologies = self.get_ontology_dir(self.SECOND_ONTOLOGY_DIR)
 
-        self.__assert_ontology_dates (basic_ontologies, modified_ontologies)
+        self.__assert_ontology_dates(basic_ontologies, modified_ontologies)
 
-
-        self.system.tracker_store_testing_start (ontodir=basic_ontologies)
+        self.system.tracker_store_testing_start(ontodir=basic_ontologies)
         self.tracker = self.system.store
 
-        self.insert_data ()
+        self.insert_data()
 
         try:
             # Boot the second set of ontologies
-            self.system.tracker_store_restart_with_new_ontologies (modified_ontologies)
+            self.system.tracker_store_restart_with_new_ontologies(
+                modified_ontologies)
         except UnableToBootException, e:
-            self.fail (str(self.__class__) + " " + str(e))
+            self.fail(str(self.__class__) + " " + str(e))
 
-        self.validate_status ()
+        self.validate_status()
 
-    def set_ontology_dirs (self):
+    def set_ontology_dirs(self):
         """
         Implement this method in the subclass setting values for:
         self.FIRST_ONTOLOGY_DIR and
         self.SECOND_ONTOLOGY_DIR
         """
-        raise Exception ("Subclasses must implement 'set_ontology_dir'")
+        raise Exception("Subclasses must implement 'set_ontology_dir'")
 
-    def insert_data (self):
+    def insert_data(self):
         """
         Put in the store some data with the FIRST ontology
         """
-        raise Exception ("Subclasses must implement 'insert_data'")
+        raise Exception("Subclasses must implement 'insert_data'")
 
-    def validate_status (self):
+    def validate_status(self):
         """
         This is called after restarting the store with the SECOND ontology
         Check that the inserted data was handled correctly and the ontology
         is up to date
         """
-        raise Exception ("Subclasses must implement 'validate_status'")
-
+        raise Exception("Subclasses must implement 'validate_status'")
 
-    def assertInDbusResult (self, member, dbus_result, column=0):
+    def assertInDbusResult(self, member, dbus_result, column=0):
         """
         Convenience assertion used in these tests
         """
@@ -129,437 +129,496 @@ class OntologyChangeTestTemplate (ut.TestCase):
             if member == row[column]:
                 return
         # This is going to fail with pretty printing
-        self.assertIn (member, dbus_result) 
+        self.assertIn(member, dbus_result)
 
-    def assertNotInDbusResult (self, member, dbus_result, column=0):
+    def assertNotInDbusResult(self, member, dbus_result, column=0):
         """
         Convenience assertion used in these tests
         """
         for row in dbus_result:
             if member == str(row[column]):
                 # This is going to fail with pretty printing
-                self.fail ("'%s' wasn't supposed to be in '%s'" % (member, dbus_result))
+                self.fail("'%s' wasn't supposed to be in '%s'" %
+                          (member, dbus_result))
         return
 
-    def __assert_ontology_dates (self, first_dir, second_dir):
+    def __assert_ontology_dates(self, first_dir, second_dir):
         """
         Asserts that 91-test.ontology in second_dir has a more recent
         modification time than in first_dir
         """
         ISO9601_REGEX = "(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z)"
 
-        def get_ontology_date (ontology):
-            for line in open (ontology, 'r'):
+        def get_ontology_date(ontology):
+            for line in open(ontology, 'r'):
                 if "nao:lastModified" in line:
-                    getmodtime = re.compile ('nao:lastModified\ \"' + ISO9601_REGEX + '\"')
-                    modtime_match = getmodtime.search (line)
+                    getmodtime = re.compile(
+                        'nao:lastModified\ \"' + ISO9601_REGEX + '\"')
+                    modtime_match = getmodtime.search(line)
 
                     if (modtime_match):
-                        nao_date = modtime_match.group (1)
-                        return time.strptime(nao_date, "%Y-%m-%dT%H:%M:%SZ")  
+                        nao_date = modtime_match.group(1)
+                        return time.strptime(nao_date, "%Y-%m-%dT%H:%M:%SZ")
                     else:
                         print "something funky in", line
                     break
 
-        first_date = get_ontology_date (os.path.join (first_dir, "91-test.ontology"))
-        second_date = get_ontology_date (os.path.join (second_dir, "91-test.ontology"))
+        first_date = get_ontology_date(
+            os.path.join(first_dir, "91-test.ontology"))
+        second_date = get_ontology_date(
+            os.path.join(second_dir, "91-test.ontology"))
         if first_date >= second_date:
-            self.fail ("nao:modifiedTime in '%s' is not more recent in the second ontology" % 
("91-test.ontology"))
-        
+            self.fail(
+                "nao:modifiedTime in '%s' is not more recent in the second ontology" %
+                       ("91-test.ontology"))
 
-        
 
 class PropertyRangeStringToDate (OntologyChangeTestTemplate):
+
     """
     Change the range of a property from string to date. There shouldn't be any data loss.
     """
 
     @expectedFailureJournal()
-    def test_property_range_string_to_date (self):
-        self.template_test_ontology_change ()
+    def test_property_range_string_to_date(self):
+        self.template_test_ontology_change()
 
-    def set_ontology_dirs (self):
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "property-range-string-to-date"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance = "test://ontology-change/property-range/string-to-date"
-        self.tracker.update ("INSERT { <%s> a test:A ; test:a_string '2010-10-12T13:30:00Z' }"
-                             % (self.instance))
+        self.tracker.update("INSERT { <%s> a test:A ; test:a_string '2010-10-12T13:30:00Z' }"
+                            % (self.instance))
 
-    def validate_status (self):
+    def validate_status(self):
         # Query the ontology itself
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_string rdfs:range ?o }")
-        self.assertEquals (result[0][0], XSD_DATETIME)
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_string rdfs:range ?o }")
+        self.assertEquals(result[0][0], XSD_DATETIME)
 
         # Check the value is there
-        result = self.tracker.query ("SELECT ?o WHERE { <%s> test:a_string ?o . }" % (self.instance))
-        self.assertEquals (result[0][0], "2010-10-12T13:30:00Z")
+        result = self.tracker.query(
+            "SELECT ?o WHERE { <%s> test:a_string ?o . }" % (self.instance))
+        self.assertEquals(result[0][0], "2010-10-12T13:30:00Z")
 
 
 class PropertyRangeDateToString (OntologyChangeTestTemplate):
+
     """
     Change the range of a property from date to string. There shouldn't be any data loss.
     """
 
     @expectedFailureJournal()
-    def test_property_range_date_to_string (self):
-        self.template_test_ontology_change ()
+    def test_property_range_date_to_string(self):
+        self.template_test_ontology_change()
 
-    def set_ontology_dirs (self):
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "property-range-string-to-date"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
-        
-    def insert_data (self):
+
+    def insert_data(self):
         self.instance = "test://ontology-change/property-range/date-to-string"
-        self.tracker.update ("INSERT { <%s> a test:A ; test:a_string '2010-10-12T13:30:00Z' }"
-                             % (self.instance))
+        self.tracker.update("INSERT { <%s> a test:A ; test:a_string '2010-10-12T13:30:00Z' }"
+                            % (self.instance))
 
-    def validate_status (self):
+    def validate_status(self):
         # Query the ontology itself
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_string rdfs:range ?o }")
-        self.assertEquals (result[0][0], XSD_STRING)
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_string rdfs:range ?o }")
+        self.assertEquals(result[0][0], XSD_STRING)
 
         # Check the value is there
-        result = self.tracker.query ("SELECT ?o WHERE { <%s> test:a_string ?o . }" % (self.instance))
-        self.assertEquals (result[0][0], "2010-10-12T13:30:00Z")
+        result = self.tracker.query(
+            "SELECT ?o WHERE { <%s> test:a_string ?o . }" % (self.instance))
+        self.assertEquals(result[0][0], "2010-10-12T13:30:00Z")
+
 
 class PropertyRangeIntToString (OntologyChangeTestTemplate):
+
     """
     Change the range of a property from int to string. There shouldn't be any data loss.
     """
-    def test_property_range_int_to_str (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_property_range_int_to_str(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "property-range-int-to-string"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance = "test://ontology-change/property-range/int-to-string"
-        self.tracker.update ("INSERT { <%s> a test:A; test:a_int 12. }" % (self.instance))
+        self.tracker.update(
+            "INSERT { <%s> a test:A; test:a_int 12. }" % (self.instance))
 
-    def validate_status (self):
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_int rdfs:range ?o. }")
-        self.assertEquals (str(result[0][0]), XSD_STRING)
+    def validate_status(self):
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_int rdfs:range ?o. }")
+        self.assertEquals(str(result[0][0]), XSD_STRING)
 
         # Check the value is there
-        result = self.tracker.query ("SELECT ?o WHERE { <%s> test:a_int ?o .}" % (self.instance))
-        self.assertEquals (result[0][0], "12")
+        result = self.tracker.query(
+            "SELECT ?o WHERE { <%s> test:a_int ?o .}" % (self.instance))
+        self.assertEquals(result[0][0], "12")
+
 
 class PropertyRangeStringToInt (OntologyChangeTestTemplate):
+
     """
     Change the range of a property from string to int. There shouldn't be any data loss.
     """
 
-    def test_property_range_str_to_int (self):
-        self.template_test_ontology_change ()
+    def test_property_range_str_to_int(self):
+        self.template_test_ontology_change()
 
-    def set_ontology_dirs (self):
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "property-range-int-to-string"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance = "test://ontology-change/property-range/string-to-int"
-        self.tracker.update ("INSERT { <%s> a test:A; test:a_int '12'. }" % (self.instance))
+        self.tracker.update(
+            "INSERT { <%s> a test:A; test:a_int '12'. }" % (self.instance))
 
-    def validate_status (self):
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_int rdfs:range ?o. }")
-        self.assertEquals (str(result[0][0]), XSD_INTEGER)
+    def validate_status(self):
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_int rdfs:range ?o. }")
+        self.assertEquals(str(result[0][0]), XSD_INTEGER)
 
         # Check the value is there
-        result = self.tracker.query ("SELECT ?o WHERE { <%s> test:a_int ?o .}" % (self.instance))
-        self.assertEquals (result[0][0], "12")
-        
+        result = self.tracker.query(
+            "SELECT ?o WHERE { <%s> test:a_int ?o .}" % (self.instance))
+        self.assertEquals(result[0][0], "12")
+
+
 class PropertyMaxCardinality1toN (OntologyChangeTestTemplate):
+
     """
     Change cardinality of a property from 1 to N. There shouldn't be any data loss
     """
 
     @expectedFailureJournal()
-    def test_property_cardinality_1_to_n (self):
-        self.template_test_ontology_change ()
+    def test_property_cardinality_1_to_n(self):
+        self.template_test_ontology_change()
 
-    def set_ontology_dirs (self):
+    def set_ontology_dirs(self):
         #self.FIRST_ONTOLOGY_DIR = "basic"
         #self.SECOND_ONTOLOGY_DIR = "cardinality"
 
         self.FIRST_ONTOLOGY_DIR = "cardinality"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance = "test://ontology-change/cardinality/1-to-n"
-        self.tracker.update ("INSERT { <%s> a test:A; test:a_n_cardinality 'some text'. }" % (self.instance))
+        self.tracker.update(
+            "INSERT { <%s> a test:A; test:a_n_cardinality 'some text'. }" % (self.instance))
+
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
+        self.assertEquals(int(result[0][0]), 1)
 
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
-        self.assertEquals (int (result[0][0]), 1)
+    def validate_status(self):
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
+        self.assertEquals(len(result), 0, "Cardinality should be 0")
 
-                
-    def validate_status (self):
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
-        self.assertEquals (len (result), 0, "Cardinality should be 0")
-        
         # Check the value is there
-        result = self.tracker.query ("SELECT ?o WHERE { <%s> test:a_n_cardinality ?o .}" % (self.instance))
-        self.assertEquals (str(result[0][0]), "some text")
+        result = self.tracker.query(
+            "SELECT ?o WHERE { <%s> test:a_n_cardinality ?o .}" % (self.instance))
+        self.assertEquals(str(result[0][0]), "some text")
+
 
 class PropertyMaxCardinalityNto1 (OntologyChangeTestTemplate):
+
     """
     Change the cardinality of a property for N to 1.
     """
 
     @expectedFailureJournal()
-    def test_property_cardinality_n_to_1 (self):
-        self.template_test_ontology_change ()
+    def test_property_cardinality_n_to_1(self):
+        self.template_test_ontology_change()
 
-    def set_ontology_dirs (self):
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "cardinality"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance = "test://ontology-change/cardinality/1-to-n"
-        self.tracker.update ("INSERT { <%s> a test:A; test:a_n_cardinality 'some text'. }" % (self.instance))
+        self.tracker.update(
+            "INSERT { <%s> a test:A; test:a_n_cardinality 'some text'. }" % (self.instance))
+
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
+        self.assertEquals(len(result), 0, "Cardinality should be 0")
 
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
-        self.assertEquals (len (result), 0, "Cardinality should be 0")
+    def validate_status(self):
+        result = self.tracker.query(
+            "SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
+        self.assertEquals(int(result[0][0]), 1, "Cardinality should be 1")
 
-                
-    def validate_status (self):
-        result = self.tracker.query ("SELECT ?o WHERE { test:a_n_cardinality nrl:maxCardinality ?o}")
-        self.assertEquals (int (result[0][0]), 1, "Cardinality should be 1")
-        
         # Check the value is there
-        result = self.tracker.query ("SELECT ?o WHERE { <%s> test:a_n_cardinality ?o .}" % (self.instance))
-        self.assertEquals (str(result[0][0]), "some text")
+        result = self.tracker.query(
+            "SELECT ?o WHERE { <%s> test:a_n_cardinality ?o .}" % (self.instance))
+        self.assertEquals(str(result[0][0]), "some text")
+
 
 class ClassNotifySet (OntologyChangeTestTemplate):
+
     """
     Set tracker:notify to true in a class and check there is no data loss
     """
-    def test_property_notify_set (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_property_notify_set(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "notify"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance = "test://ontology-change/notify/true"
-        self.tracker.update ("INSERT { <%s> a test:A; test:a_string 'some text'. }" % (self.instance))
+        self.tracker.update(
+            "INSERT { <%s> a test:A; test:a_string 'some text'. }" % (self.instance))
 
+    def validate_status(self):
+        result = self.tracker.query(
+            "SELECT ?notify WHERE { test:A tracker:notify ?notify}")
+        self.assertEquals(str(result[0][0]), "true")
+
+        result = self.tracker.query("SELECT ?u WHERE { ?u a test:A. }")
+        self.assertEquals(str(result[0][0]), self.instance)
 
-    def validate_status (self):
-        result = self.tracker.query ("SELECT ?notify WHERE { test:A tracker:notify ?notify}")
-        self.assertEquals (str(result[0][0]), "true")
-        
-        result = self.tracker.query ("SELECT ?u WHERE { ?u a test:A. }")
-        self.assertEquals (str(result[0][0]), self.instance)
 
 class ClassNotifyUnset (OntologyChangeTestTemplate):
+
     """
     Set tracker:notify to true in a class and check there is no data loss
     """
-    def test_property_notify_set (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_property_notify_set(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "notify"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance = "test://ontology-change/notify/true"
-        self.tracker.update ("INSERT { <%s> a test:A; test:a_string 'some text'. }" % (self.instance))
-
+        self.tracker.update(
+            "INSERT { <%s> a test:A; test:a_string 'some text'. }" % (self.instance))
 
-    def validate_status (self):
-        result = self.tracker.query ("SELECT ?notify WHERE { test:A tracker:notify ?notify}")
-        if (len (result) == 1):
+    def validate_status(self):
+        result = self.tracker.query(
+            "SELECT ?notify WHERE { test:A tracker:notify ?notify}")
+        if (len(result) == 1):
             # Usually is (none) but it was "true" before so now has value.
-            self.assertEquals (result[0][0], "false")
+            self.assertEquals(result[0][0], "false")
         else:
-            self.assertEquals (len (result), 0)
-        
-        result = self.tracker.query ("SELECT ?u WHERE { ?u a test:A. }")
-        self.assertEquals (str(result[0][0]), self.instance)
+            self.assertEquals(len(result), 0)
+
+        result = self.tracker.query("SELECT ?u WHERE { ?u a test:A. }")
+        self.assertEquals(str(result[0][0]), self.instance)
 
 
 class PropertyIndexedSet (OntologyChangeTestTemplate):
+
     """
     Set tracker:indexed true to single and multiple valued properties.
     Check that instances and content of the property are still in the DB
     """
-    def test_indexed_set (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_indexed_set(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "indexed"
 
-    def insert_data (self):
+    def insert_data(self):
         # Instance with value in the single valued property
         self.instance_single_valued = "test://ontology-change/indexed/single/true"
-        self.tracker.update ("INSERT { <%s> a test:A ; test:a_string 'anything 1'. }"
-                             % (self.instance_single_valued))
+        self.tracker.update("INSERT { <%s> a test:A ; test:a_string 'anything 1'. }"
+                            % (self.instance_single_valued))
 
         # Instance with value in the n valued property
         self.instance_n_valued = "test://ontology-change/indexed/multiple/true"
-        self.tracker.update ("INSERT { <%s> a test:A ; test:a_n_cardinality 'anything n'. }"
-                             % (self.instance_n_valued))
+        self.tracker.update("INSERT { <%s> a test:A ; test:a_n_cardinality 'anything n'. }"
+                            % (self.instance_n_valued))
 
-    def validate_status (self):
+    def validate_status(self):
         # Check ontology and instance for the single valued property
-        result = self.tracker.query ("SELECT ?indexed WHERE { test:a_string tracker:indexed ?indexed}")
-        self.assertEquals (str(result[0][0]), "true")
+        result = self.tracker.query(
+            "SELECT ?indexed WHERE { test:a_string tracker:indexed ?indexed}")
+        self.assertEquals(str(result[0][0]), "true")
 
-        result = self.tracker.query ("SELECT ?content WHERE { <%s> a test:A; test:a_string ?content. }"
-                                     % (self.instance_single_valued))
-        self.assertEquals (str(result[0][0]), "anything 1")
+        result = self.tracker.query("SELECT ?content WHERE { <%s> a test:A; test:a_string ?content. }"
+                                    % (self.instance_single_valued))
+        self.assertEquals(str(result[0][0]), "anything 1")
 
         # Check ontology and instance for the multiple valued property
-        result = self.tracker.query ("SELECT ?indexed WHERE { test:a_n_cardinality tracker:indexed 
?indexed}")
-        self.assertEquals (str(result[0][0]), "true")
+        result = self.tracker.query(
+            "SELECT ?indexed WHERE { test:a_n_cardinality tracker:indexed ?indexed}")
+        self.assertEquals(str(result[0][0]), "true")
+
+        result = self.tracker.query("SELECT ?content WHERE { <%s> a test:A; test:a_n_cardinality ?content. }"
+                                    % (self.instance_n_valued))
+        self.assertEquals(str(result[0][0]), "anything n")
 
-        result = self.tracker.query ("SELECT ?content WHERE { <%s> a test:A; test:a_n_cardinality ?content. 
}"
-                                     % (self.instance_n_valued))
-        self.assertEquals (str(result[0][0]), "anything n")
 
 class PropertyIndexedUnset (OntologyChangeTestTemplate):
+
     """
     tracker:indexed property from true to false in single and multiple valued properties.
     Check that instances and content of the property are still in the DB.
     """
-    def test_indexed_unset (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_indexed_unset(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "indexed"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
 
-    def insert_data (self):
+    def insert_data(self):
         # Instance with value in the single valued property
         self.instance_single_valued = "test://ontology-change/indexed/single/true"
-        self.tracker.update ("INSERT { <%s> a test:A ; test:a_string 'anything 1'. }"
-                             % (self.instance_single_valued))
+        self.tracker.update("INSERT { <%s> a test:A ; test:a_string 'anything 1'. }"
+                            % (self.instance_single_valued))
 
         # Instance with value in the n valued property
         self.instance_n_valued = "test://ontology-change/indexed/multiple/true"
-        self.tracker.update ("INSERT { <%s> a test:A ; test:a_n_cardinality 'anything n'. }"
-                             % (self.instance_n_valued))
+        self.tracker.update("INSERT { <%s> a test:A ; test:a_n_cardinality 'anything n'. }"
+                            % (self.instance_n_valued))
 
-    def validate_status (self):
+    def validate_status(self):
         #
         # NOTE: tracker:indexed can be 'false' or None. In both cases is fine.
-        # 
-        
+        #
+
         # Check ontology and instance for the single valued property
-        result = self.tracker.query ("SELECT ?indexed WHERE { test:a_string tracker:indexed ?indexed}")
-        self.assertEquals (str(result[0][0]), "false")
+        result = self.tracker.query(
+            "SELECT ?indexed WHERE { test:a_string tracker:indexed ?indexed}")
+        self.assertEquals(str(result[0][0]), "false")
 
-        result = self.tracker.query ("SELECT ?content WHERE { <%s> a test:A; test:a_string ?content. }"
-                                     % (self.instance_single_valued))
-        self.assertEquals (str(result[0][0]), "anything 1")
+        result = self.tracker.query("SELECT ?content WHERE { <%s> a test:A; test:a_string ?content. }"
+                                    % (self.instance_single_valued))
+        self.assertEquals(str(result[0][0]), "anything 1")
 
         # Check ontology and instance for the multiple valued property
-        result = self.tracker.query ("SELECT ?indexed WHERE { test:a_n_cardinality tracker:indexed 
?indexed}")
-        self.assertEquals (str(result[0][0]), "false")
+        result = self.tracker.query(
+            "SELECT ?indexed WHERE { test:a_n_cardinality tracker:indexed ?indexed}")
+        self.assertEquals(str(result[0][0]), "false")
+
+        result = self.tracker.query("SELECT ?content WHERE { <%s> a test:A; test:a_n_cardinality ?content. }"
+                                    % (self.instance_n_valued))
+        self.assertEquals(str(result[0][0]), "anything n")
 
-        result = self.tracker.query ("SELECT ?content WHERE { <%s> a test:A; test:a_n_cardinality ?content. 
}"
-                                     % (self.instance_n_valued))
-        self.assertEquals (str(result[0][0]), "anything n")
 
 class OntologyAddClassTest (OntologyChangeTestTemplate):
+
     """
     Add a class in the ontology.
     """
-    def test_ontology_add_class (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_ontology_add_class(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "add-class"
 
-    def insert_data (self):
+    def insert_data(self):
         # No need, adding a class
         pass
 
-    def validate_status (self):
+    def validate_status(self):
         # check the class is there
-        result = self.tracker.query ("SELECT ?k WHERE { ?k a rdfs:Class. }")
-        self.assertInDbusResult (TEST_PREFIX + "D", result)
+        result = self.tracker.query("SELECT ?k WHERE { ?k a rdfs:Class. }")
+        self.assertInDbusResult(TEST_PREFIX + "D", result)
 
-        result = self.tracker.query ("SELECT ?k WHERE { ?k a rdfs:Class. }")
-        self.assertInDbusResult (TEST_PREFIX + "E", result)
+        result = self.tracker.query("SELECT ?k WHERE { ?k a rdfs:Class. }")
+        self.assertInDbusResult(TEST_PREFIX + "E", result)
 
 
 class OntologyRemoveClassTest (OntologyChangeTestTemplate):
+
     """
     Remove a class from the ontology. With and without superclasses.
     """
-    def test_ontology_remove_class (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_ontology_remove_class(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "add-class"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance_e = "test://ontology-change/removal/class/1"
-        self.tracker.update ("INSERT { <%s> a test:E. }" % self.instance_e)
+        self.tracker.update("INSERT { <%s> a test:E. }" % self.instance_e)
 
         self.instance_d = "test://ontology-change/removal/class/2"
-        self.tracker.update ("INSERT { <%s> a test:D. }" % self.instance_d)
+        self.tracker.update("INSERT { <%s> a test:D. }" % self.instance_d)
 
-    def validate_status (self):
+    def validate_status(self):
         #
         # The classes are not actually removed... so this assertions are not valid (yet?)
         #
-        
+
         #result = self.tracker.query ("SELECT ?k WHERE { ?k a rdfs:Class. }")
         #self.assertNotInDbusResult (TEST_PREFIX + "E", result)
         #self.assertNotInDbusResult (TEST_PREFIX + "D", result)
 
         # D is a subclass of A, removing D should keep the A instances
-        result = self.tracker.query ("SELECT ?i WHERE { ?i a test:A. }")
-        self.assertEquals (result[0][0], self.instance_e)
+        result = self.tracker.query("SELECT ?i WHERE { ?i a test:A. }")
+        self.assertEquals(result[0][0], self.instance_e)
+
 
 class OntologyAddPropertyTest (OntologyChangeTestTemplate):
+
     """
     Add new properties in the ontology, with/without super prop and different ranges and cardinalities
     """
-    def test_ontology_add_property (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_ontology_add_property(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "add-prop"
 
-    def insert_data (self):
+    def insert_data(self):
         # No need, adding new properties
         pass
 
-    def validate_status (self):
-        result = self.tracker.query ("SELECT ?k WHERE { ?k a rdf:Property}")
-        self.assertInDbusResult (TEST_PREFIX + "new_prop_int", result)
-        self.assertInDbusResult (TEST_PREFIX + "new_prop_int_n", result)
+    def validate_status(self):
+        result = self.tracker.query("SELECT ?k WHERE { ?k a rdf:Property}")
+        self.assertInDbusResult(TEST_PREFIX + "new_prop_int", result)
+        self.assertInDbusResult(TEST_PREFIX + "new_prop_int_n", result)
 
-        self.assertInDbusResult (TEST_PREFIX + "new_prop_string", result)
-        self.assertInDbusResult (TEST_PREFIX + "new_prop_string_n", result)
+        self.assertInDbusResult(TEST_PREFIX + "new_prop_string", result)
+        self.assertInDbusResult(TEST_PREFIX + "new_prop_string_n", result)
+
+        self.assertInDbusResult(TEST_PREFIX + "new_subprop_string", result)
+        self.assertInDbusResult(TEST_PREFIX + "new_subprop_string_n", result)
 
-        self.assertInDbusResult (TEST_PREFIX + "new_subprop_string", result)
-        self.assertInDbusResult (TEST_PREFIX + "new_subprop_string_n", result)
 
 class OntologyRemovePropertyTest (OntologyChangeTestTemplate):
+
     """
     Remove properties from the ontology, with and without super props and different ranges and cardinalities
     """
-    def test_ontology_remove_property (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_ontology_remove_property(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "add-prop"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance_a = "test://ontology-change/remove/properties/1"
         self.tracker.update ("""
             INSERT { <%s> a   test:A;
@@ -578,14 +637,16 @@ class OntologyRemovePropertyTest (OntologyChangeTestTemplate):
                           test:new_subprop_string_n 'super-prop also keeps this value'.
                    }
         """ % (self.instance_b))
-        self.assertTrue (self.tracker.ask ("ASK { <%s> a test:A}" % (self.instance_a)), "The instance is not 
there")
+        self.assertTrue(
+            self.tracker.ask("ASK { <%s> a test:A}" % (self.instance_a)), "The instance is not there")
 
-    def validate_status (self):
+    def validate_status(self):
         #
         # Note: on removal basically nothing happens. The property and values are still in the DB
         #
-        # Maybe we should test there forcing a db reconstruction and journal replay
-        
+        # Maybe we should test there forcing a db reconstruction and journal
+        # replay
+
         # First the ontology
         ## result = self.tracker.query ("SELECT ?k WHERE { ?k a rdf:Property}")
         ## self.assertNotInDbusResult (TEST_PREFIX + "new_prop_int", result)
@@ -598,27 +659,34 @@ class OntologyRemovePropertyTest (OntologyChangeTestTemplate):
         ## self.assertNotInDbusResult (TEST_PREFIX + "new_subprop_string_n", result)
 
         # The instances are still there
-        self.assertTrue (self.tracker.ask ("ASK { <%s> a test:A}" % (self.instance_a)))
-        self.assertTrue (self.tracker.ask ("ASK { <%s> a test:B}" % (self.instance_b)))
+        self.assertTrue(
+            self.tracker.ask("ASK { <%s> a test:A}" % (self.instance_a)))
+        self.assertTrue(
+            self.tracker.ask("ASK { <%s> a test:B}" % (self.instance_b)))
+
+        check = self.tracker.ask(
+            "ASK { <%s> test:a_superprop 'super-prop keeps this value' }" % (self.instance_b))
+        self.assertTrue(check, "This property and value should exist")
+
+        check = self.tracker.ask(
+            "ASK { <%s> test:a_superprop_n 'super-prop also keeps this value' }" % (self.instance_b))
+        self.assertTrue(check, "This property and value should exist")
 
-        check = self.tracker.ask ("ASK { <%s> test:a_superprop 'super-prop keeps this value' }" % 
(self.instance_b))
-        self.assertTrue (check, "This property and value should exist")
-        
-        check = self.tracker.ask ("ASK { <%s> test:a_superprop_n 'super-prop also keeps this value' }" % 
(self.instance_b))
-        self.assertTrue (check, "This property and value should exist")
 
 class DomainIndexAddTest (OntologyChangeTestTemplate):
+
     """
     Add tracker:domainIndex to a class and check there is no data loss.
     """
-    def test_domain_index_add (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_domain_index_add(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "add-domainIndex"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance_a = "test://ontology-changes/properties/add-domain-index/a"
         self.tracker.update ("""
             INSERT { <%s> a test:B ;
@@ -631,34 +699,40 @@ class DomainIndexAddTest (OntologyChangeTestTemplate):
                           test:a_string 'test-value' ;
                           test:a_n_cardinality 'another-test-value'. }""" % (self.instance_b))
 
-    def validate_status (self):
+    def validate_status(self):
         # Check the ontology
-        has_domainIndex = self.tracker.ask ("ASK { test:B tracker:domainIndex test:a_string }")
-        self.assertTrue (has_domainIndex)
+        has_domainIndex = self.tracker.ask(
+            "ASK { test:B tracker:domainIndex test:a_string }")
+        self.assertTrue(has_domainIndex)
 
-        has_domainIndex = self.tracker.ask ("ASK { test:C tracker:domainIndex test:a_n_cardinality }")
-        self.assertTrue (has_domainIndex)
+        has_domainIndex = self.tracker.ask(
+            "ASK { test:C tracker:domainIndex test:a_n_cardinality }")
+        self.assertTrue(has_domainIndex)
 
         # Check the data
-        dataok = self.tracker.ask ("ASK { <%s> test:a_string 'test-value' }" % (self.instance_a))
-        self.assertTrue (dataok)
+        dataok = self.tracker.ask(
+            "ASK { <%s> test:a_string 'test-value' }" % (self.instance_a))
+        self.assertTrue(dataok)
 
-        dataok = self.tracker.ask ("ASK { <%s> test:a_n_cardinality 'another-test-value' }" % 
(self.instance_b))
-        self.assertTrue (dataok)
+        dataok = self.tracker.ask(
+            "ASK { <%s> test:a_n_cardinality 'another-test-value' }" % (self.instance_b))
+        self.assertTrue(dataok)
 
 
 class DomainIndexRemoveTest (OntologyChangeTestTemplate):
+
     """
     Remove tracker:domainIndex to a class and check there is no data loss.
     """
-    def test_domain_index_remove (self):
-        self.template_test_ontology_change ()
 
-    def set_ontology_dirs (self):
+    def test_domain_index_remove(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "add-domainIndex"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
 
-    def insert_data (self):
+    def insert_data(self):
         self.instance_a = "test://ontology-changes/properties/add-domain-index/a"
         self.tracker.update ("""
             INSERT { <%s> a test:B ;
@@ -671,38 +745,43 @@ class DomainIndexRemoveTest (OntologyChangeTestTemplate):
                           test:a_string 'test-value' ;
                           test:a_n_cardinality 'another-test-value'. }""" % (self.instance_b))
 
-    def validate_status (self):
+    def validate_status(self):
         # Check the ontology
-        has_domainIndex = self.tracker.ask ("ASK { test:B tracker:domainIndex test:a_string }")
-        self.assertFalse (has_domainIndex)
+        has_domainIndex = self.tracker.ask(
+            "ASK { test:B tracker:domainIndex test:a_string }")
+        self.assertFalse(has_domainIndex)
 
-        has_domainIndex = self.tracker.ask ("ASK { test:C tracker:domainIndex test:a_n_cardinality }")
-        self.assertFalse (has_domainIndex)
+        has_domainIndex = self.tracker.ask(
+            "ASK { test:C tracker:domainIndex test:a_n_cardinality }")
+        self.assertFalse(has_domainIndex)
 
         # Check the data
-        dataok = self.tracker.ask ("ASK { <%s> test:a_string 'test-value' }" % (self.instance_a))
-        self.assertTrue (dataok)
+        dataok = self.tracker.ask(
+            "ASK { <%s> test:a_string 'test-value' }" % (self.instance_a))
+        self.assertTrue(dataok)
 
-        dataok = self.tracker.ask ("ASK { <%s> test:a_n_cardinality 'another-test-value' }" % 
(self.instance_b))
-        self.assertTrue (dataok)
+        dataok = self.tracker.ask(
+            "ASK { <%s> test:a_n_cardinality 'another-test-value' }" % (self.instance_b))
+        self.assertTrue(dataok)
 
 
 class SuperclassRemovalTest (OntologyChangeTestTemplate):
+
     """
     Remove the superclass relation between two classes
     """
     @expectedFailureJournal()
-    def test_superclass_removal (self):
-        self.template_test_ontology_change ()
-        
-    def set_ontology_dirs (self):
+    def test_superclass_removal(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "superclass-remove"
-            
-    def insert_data (self):
-        is_subclass = self.tracker.ask ("ASK {test:B rdfs:subClassOf test:A}")
-        self.assertTrue (is_subclass)
-        
+
+    def insert_data(self):
+        is_subclass = self.tracker.ask("ASK {test:B rdfs:subClassOf test:A}")
+        self.assertTrue(is_subclass)
+
         self.instance_a = "test://ontology-changes/superclasses/remove-superclass/a"
         self.tracker.update ("""
          INSERT { <%s> a test:A . }
@@ -713,38 +792,40 @@ class SuperclassRemovalTest (OntologyChangeTestTemplate):
          INSERT { <%s> a test:B . }
         """ % (self.instance_b))
 
-        result = self.tracker.count_instances ("test:B")
-        self.assertEquals (result, 1)
+        result = self.tracker.count_instances("test:B")
+        self.assertEquals(result, 1)
 
-        result = self.tracker.count_instances ("test:A")
-        self.assertEquals (result, 2)
+        result = self.tracker.count_instances("test:A")
+        self.assertEquals(result, 2)
 
-    def validate_status (self):
-        is_subclass = self.tracker.ask ("ASK {test:B rdfs:subClassOf test:A}")
-        self.assertFalse (is_subclass)
+    def validate_status(self):
+        is_subclass = self.tracker.ask("ASK {test:B rdfs:subClassOf test:A}")
+        self.assertFalse(is_subclass)
 
-        result = self.tracker.count_instances ("test:B")
-        self.assertEquals (result, 1)
+        result = self.tracker.count_instances("test:B")
+        self.assertEquals(result, 1)
+
+        result = self.tracker.count_instances("test:A")
+        self.assertEquals(result, 1)
 
-        result = self.tracker.count_instances ("test:A")
-        self.assertEquals (result, 1)
 
 class SuperclassAdditionTest (OntologyChangeTestTemplate):
+
     """
     Add a superclass to a class with no superclass previously
     """
     @expectedFailureJournal()
-    def test_superclass_addition (self):
-        self.template_test_ontology_change ()
-        
-    def set_ontology_dirs (self):
+    def test_superclass_addition(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "superclass-remove"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
-            
-    def insert_data (self):
-        is_subclass = self.tracker.ask ("ASK {test:B rdfs:subClassOf test:A}")
-        self.assertFalse (is_subclass)
-        
+
+    def insert_data(self):
+        is_subclass = self.tracker.ask("ASK {test:B rdfs:subClassOf test:A}")
+        self.assertFalse(is_subclass)
+
         self.instance_a = "test://ontology-changes/superclasses/remove-superclass/a"
         self.tracker.update ("""
          INSERT { <%s> a test:A . }
@@ -755,71 +836,74 @@ class SuperclassAdditionTest (OntologyChangeTestTemplate):
          INSERT { <%s> a test:B . }
         """ % (self.instance_b))
 
-        result = self.tracker.count_instances ("test:B")
-        self.assertEquals (result, 1)
+        result = self.tracker.count_instances("test:B")
+        self.assertEquals(result, 1)
 
-        result = self.tracker.count_instances ("test:A")
-        self.assertEquals (result, 1)
-        
-    def validate_status (self):
-        is_subclass = self.tracker.ask ("ASK {test:B rdfs:subClassOf test:A}")
-        self.assertTrue (is_subclass)
+        result = self.tracker.count_instances("test:A")
+        self.assertEquals(result, 1)
 
-        result = self.tracker.count_instances ("test:B")
-        self.assertEquals (result, 1)
+    def validate_status(self):
+        is_subclass = self.tracker.ask("ASK {test:B rdfs:subClassOf test:A}")
+        self.assertTrue(is_subclass)
+
+        result = self.tracker.count_instances("test:B")
+        self.assertEquals(result, 1)
+
+        result = self.tracker.count_instances("test:A")
+        self.assertEquals(result, 2)
 
-        result = self.tracker.count_instances ("test:A")
-        self.assertEquals (result, 2)
-        
 
 class PropertyPromotionTest (OntologyChangeTestTemplate):
+
     """
     Move a property to the superclass
     """
     @expectedFailureJournal()
-    def test_property_promotion (self):
-        self.template_test_ontology_change ()
-        
-    def set_ontology_dirs (self):
+    def test_property_promotion(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "basic"
         self.SECOND_ONTOLOGY_DIR = "property-promotion"
-            
-    def insert_data (self):
+
+    def insert_data(self):
         self.instance_b = "test://ontology-change/property/promotion-to-superclass/b"
         self.tracker.update ("""
             INSERT { <%s> a test:B; test:b_property 'content-b-test'; test:b_property_n 'b-test-n'. }
            """ % (self.instance_b))
 
         self.instance_a = "test://ontology-change/property/promotion-to-superclass/a"
-        self.assertRaises (dbus.DBusException,
-                           self.tracker.update,
-                           "INSERT { <%s> a test:A; test:b_property 'content-a-test'.}" % (self.instance_a))
-        
-    def validate_status (self):
+        self.assertRaises(dbus.DBusException,
+                          self.tracker.update,
+                          "INSERT { <%s> a test:A; test:b_property 'content-a-test'.}" % (self.instance_a))
+
+    def validate_status(self):
         # This insertion should work now
         self.tracker.update ("""
            INSERT { <%s> a test:A; test:b_property 'content-a-test'.}
         """ % (self.instance_a))
 
         # No data loss
-        result = self.tracker.query ("SELECT ?v ?w WHERE { <%s> test:b_property ?v ; test:b_property_n ?w }"
-                                     % (self.instance_b))
-        self.assertEquals (result [0][0], "content-b-test")
-        self.assertEquals (result [0][1], "b-test-n")
+        result = self.tracker.query("SELECT ?v ?w WHERE { <%s> test:b_property ?v ; test:b_property_n ?w }"
+                                    % (self.instance_b))
+        self.assertEquals(result[0][0], "content-b-test")
+        self.assertEquals(result[0][1], "b-test-n")
+
 
 class PropertyRelegationTest (OntologyChangeTestTemplate):
+
     """
     Move a property to the subclass
     """
     @expectedFailureJournal()
-    def test_property_relegation (self):
-        self.template_test_ontology_change ()
-        
-    def set_ontology_dirs (self):
+    def test_property_relegation(self):
+        self.template_test_ontology_change()
+
+    def set_ontology_dirs(self):
         self.FIRST_ONTOLOGY_DIR = "property-promotion"
         self.SECOND_ONTOLOGY_DIR = "basic-future"
-            
-    def insert_data (self):
+
+    def insert_data(self):
         self.instance_b = "test://ontology-change/property/promotion-to-superclass/b"
         self.tracker.update ("""
             INSERT { <%s> a test:B; test:b_property 'content-b-test'; test:b_property_n 'b-test-n'. }
@@ -829,21 +913,18 @@ class PropertyRelegationTest (OntologyChangeTestTemplate):
         self.tracker.update ("""
            INSERT { <%s> a test:A; test:b_property 'content-a-test'.}
         """ % (self.instance_a))
-        
-    def validate_status (self):
+
+    def validate_status(self):
         # This insertion should fail now
-        self.assertRaises (dbus.DBusException,
-                           self.tracker.update,
-                           "INSERT { <%s> a test:A; test:b_property 'content-a-test'.}" % (self.instance_a))
+        self.assertRaises(dbus.DBusException,
+                          self.tracker.update,
+                          "INSERT { <%s> a test:A; test:b_property 'content-a-test'.}" % (self.instance_a))
         # No data loss
-        result = self.tracker.query ("SELECT ?v ?w WHERE { <%s> test:b_property ?v; test:b_property_n ?w }"
-                                     % (self.instance_b))
-        self.assertEquals (result [0][0], "content-b-test")
-        self.assertEquals (result [0][1], "b-test-n")
-
+        result = self.tracker.query("SELECT ?v ?w WHERE { <%s> test:b_property ?v; test:b_property_n ?w }"
+                                    % (self.instance_b))
+        self.assertEquals(result[0][0], "content-b-test")
+        self.assertEquals(result[0][1], "b-test-n")
 
 
 if __name__ == "__main__":
-    ut.main ()
-
-    
+    ut.main()
diff --git a/tests/functional-tests/200-backup-restore.py b/tests/functional-tests/200-backup-restore.py
index 20cdabc..e5181b9 100755
--- a/tests/functional-tests/200-backup-restore.py
+++ b/tests/functional-tests/200-backup-restore.py
@@ -17,8 +17,8 @@
 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
 # 02110-1301, USA.
 #
-import os 
-import dbus # For the exception handling
+import os
+import dbus  # For the exception handling
 
 from common.utils.system import TrackerSystemAbstraction
 from common.utils.helpers import StoreHelper
@@ -31,294 +31,299 @@ import unittest2 as ut
 """
 Call backup, restore, force the journal replay and check the data is correct afterwards
 """
+
+
 class BackupRestoreTest (CommonTrackerStoreTest):
+
+    """
+    Backup and restore to/from valid/invalid files
+    """
+
+    def setUp(self):
+        self.TEST_INSTANCE = "test://backup-restore/1"
+        self.BACKUP_FILE = "file://" + \
+            os.path.join(cfg.TEST_TMP_DIR, "tracker-backup-test-1")
+
+        if (os.path.exists(self.BACKUP_FILE)):
+            os.unlink(self.BACKUP_FILE)
+
+    def __insert_test_instance(self):
+        self.tracker.update("INSERT { <%s> a nco:Contact; nco:fullname 'test-backup' } "
+                            % (self.TEST_INSTANCE))
+
+    def __delete_test_instance(self):
+        self.tracker.update(
+            "DELETE { <%s> a rdfs:Resource } " % (self.TEST_INSTANCE))
+
+    def __is_test_instance_there(self):
+        result = self.tracker.query(
+            "SELECT ?u WHERE { ?u a nco:Contact; nco:fullname 'test-backup'}")
+        if (len(result) == 1 and len(result[0]) == 1 and result[0][0] == self.TEST_INSTANCE):
+            return True
+        return False
+
+    def test_backup_01(self):
+        """
+        Inserted data is restored after backup
+
+        1.Insert contact
+        2.Take backup.
+        3.Delete contact. (check it is not there)
+        4.Restore the file.
+        5.Check the contact is back there
+        """
+
+        self.__insert_test_instance()
+        instances_before = self.tracker.count_instances("nco:Contact")
+
+        self.tracker.backup(self.BACKUP_FILE)
+
+        self.__delete_test_instance()
+        instances_now = self.tracker.count_instances("nco:Contact")
+
+        self.assertEquals(instances_before - 1, instances_now)
+
+        self.tracker.restore(self.BACKUP_FILE)
+
+        instances_after = self.tracker.count_instances("nco:Contact")
+
+        self.assertEquals(instances_before, instances_after)
+        self.assertTrue(self.__is_test_instance_there())
+
+        # Clean the DB for the next test
+        self.__delete_test_instance()
+
+    def test_backup_02(self):
+        """
+        Data inserted after backup is lost in restore
+
+        1.Take backup of db.
+        2.Insert a contact.
+        3.Restore the db.
+        4.Search for the contact inserted.
+        """
+
+        # Precondition: test backup contact shouldn't be there
+        self.assertFalse(self.__is_test_instance_there())
+
+        self.tracker.backup(self.BACKUP_FILE)
+
+        self.__insert_test_instance()
+        self.assertTrue(self.__is_test_instance_there())
+
+        self.tracker.restore(self.BACKUP_FILE)
+
+        self.assertFalse(self.__is_test_instance_there())
+
+    def test_backup_03(self):
         """
-        Backup and restore to/from valid/invalid files
+        Restore from a random text file
         """
-        def setUp (self):
-            self.TEST_INSTANCE = "test://backup-restore/1"
-            self.BACKUP_FILE = "file://" + os.path.join (cfg.TEST_TMP_DIR, "tracker-backup-test-1")
-
-            if (os.path.exists (self.BACKUP_FILE)):
-                os.unlink (self.BACKUP_FILE)
-
-        def __insert_test_instance (self):
-            self.tracker.update ("INSERT { <%s> a nco:Contact; nco:fullname 'test-backup' } "
-                                 % (self.TEST_INSTANCE))
-
-        def __delete_test_instance (self):
-            self.tracker.update ("DELETE { <%s> a rdfs:Resource } " % (self.TEST_INSTANCE))
-
-        def __is_test_instance_there (self):
-            result = self.tracker.query ("SELECT ?u WHERE { ?u a nco:Contact; nco:fullname 'test-backup'}")
-            if (len (result) == 1 and len (result[0]) == 1 and result[0][0] == self.TEST_INSTANCE):
-                return True
-            return False
-
-       def test_backup_01(self):
-            """
-            Inserted data is restored after backup
-            
-            1.Insert contact
-            2.Take backup.
-            3.Delete contact. (check it is not there)
-            4.Restore the file.
-            5.Check the contact is back there
-            """
-            
-            self.__insert_test_instance ()
-            instances_before = self.tracker.count_instances ("nco:Contact")
-            
-            self.tracker.backup (self.BACKUP_FILE)
-            
-            self.__delete_test_instance ()
-            instances_now = self.tracker.count_instances ("nco:Contact")
-            
-            self.assertEquals (instances_before-1, instances_now)
-            
-            self.tracker.restore (self.BACKUP_FILE)
-
-            instances_after = self.tracker.count_instances ("nco:Contact")
-
-            self.assertEquals (instances_before, instances_after)
-            self.assertTrue (self.__is_test_instance_there ())
-            
-            # Clean the DB for the next test
-            self.__delete_test_instance ()
-
-
-       def test_backup_02 (self):
-             """
-              Data inserted after backup is lost in restore
-              
-             1.Take backup of db.
-             2.Insert a contact.
-             3.Restore the db.
-             4.Search for the contact inserted. 
-             """
-
-              # Precondition: test backup contact shouldn't be there
-              self.assertFalse (self.__is_test_instance_there ())
-
-              self.tracker.backup (self.BACKUP_FILE)
-
-              self.__insert_test_instance ()
-              self.assertTrue (self.__is_test_instance_there ())
-
-              self.tracker.restore (self.BACKUP_FILE)
-
-              self.assertFalse (self.__is_test_instance_there ())
-
-
-       
-       def test_backup_03 (self):
-             """
-              Restore from a random text file
-             """
-              TEST_FILE = os.path.join (cfg.TEST_TMP_DIR, "trash_file")
-              trashfile = open (TEST_FILE, "w")
-              trashfile.write ("Here some useless text that obviously is NOT a backup")
-              trashfile.close ()
-
-             self.assertRaises (dbus.DBusException,
-                                 self.tracker.restore,
-                                 "file://" + TEST_FILE)
-              os.unlink (TEST_FILE)
-
-        def test_backup_04 (self):
-              """
-              Restore from a random binary file
-              """
-              TEST_FILE = os.path.join (cfg.TEST_TMP_DIR, "trash_file.dat")
-              
-              import struct
-              trashfile = open (TEST_FILE, "wb")
-              for n in range (0, 50):
-                  data = struct.pack ('i', n)
-                  trashfile.write (data)
-              trashfile.close ()
-
-              instances_before = self.tracker.count_instances ("nie:InformationElement")
-             self.assertRaises (dbus.DBusException,
-                                 self.tracker.restore,
-                                 "file://" + TEST_FILE)
-
-              os.unlink (TEST_FILE)
-
-       def test_backup_05(self):
-             """
-             Take backup of db to a invalid path.
-             Expected: Backup should not be taken and tracker should behave normally.  
-             """
-              self.assertRaises (dbus.DBusException,
-                                 self.tracker.backup,
-                                 "file://%s/this/is/a/non-existant/folder/backup" % (cfg.TEST_TMP_DIR))
-              
-
-        def test_backup_06 (self):
-            """
-            Try to restore an invalid path
-            """
-            self.assertRaises (dbus.DBusException,
-                               self.tracker.restore,
-                               "file://%s/this/is/a/non-existant/folder/backup" % (cfg.TEST_TMP_DIR))
-               
-
-       def test_backup_07(self):
-             """
-              Restore after removing the DBs and journal
-              
-             1.Insert a contact.
-             2.Take backup of db.
-             4.Delete the database
-             5.Restore the db.
-             6.Search for the contact inserted. 
-              """
-              self.__insert_test_instance ()
-              instances_before = self.tracker.count_instances ("nco:Contact")
-             self.tracker.backup (self.BACKUP_FILE)
-
-              self.system.tracker_store_stop_nicely ()
-              self.system.tracker_store_remove_dbs ()
-              self.system.tracker_store_remove_journal ()
-              self.system.tracker_store_start ()
-              
-              instances_before_restore = self.tracker.count_instances ("nco:Contact")
-              self.assertNotEqual (instances_before_restore, instances_before)
-              
-              self.tracker.restore (self.BACKUP_FILE)
-             self.assertTrue (self.__is_test_instance_there ())
-
-              self.__delete_test_instance ()
-
-
-       def test_backup_08 (self):
-             """
-              Restore after corrupting DB
-              
-             1.Insert a contact.
-             2.Take backup of db.
-             5.Restore the db.
-             6.Search for the contact inserted. 
-              """
-              self.__insert_test_instance ()
-              instances_before = self.tracker.count_instances ("nco:Contact")
-             self.tracker.backup (self.BACKUP_FILE)
-
-              self.system.tracker_store_stop_brutally ()
-              self.system.tracker_store_corrupt_dbs ()
-              self.system.tracker_store_remove_journal ()
-              self.system.tracker_store_start ()
-              
-              instances_before_restore = self.tracker.count_instances ("nco:Contact")
-              self.assertNotEqual (instances_before_restore, instances_before)
-              
-              self.tracker.restore (self.BACKUP_FILE)
-             self.assertTrue (self.__is_test_instance_there ())
-
-              # DB to the original state
-              self.__delete_test_instance ()
-              
-        def test_backup_11(self):
-             """
-              Backup ignores the file extension
-              
-             1.Insert a contact.
-             2.Take backup of db in .jpg format.
-             3.Restore the db.
-             4.Search for the contact inserted. 
-             """
-              BACKUP_JPG_EXT = "file://%s/tracker-test-backup.jpg" % (cfg.TEST_TMP_DIR)
-              
-              self.__insert_test_instance ()
-
-              instances_before = self.tracker.count_instances ("nco:Contact")
-
-             self.tracker.backup (BACKUP_JPG_EXT)
-
-              self.__delete_test_instance ()
-              instances_now = self.tracker.count_instances ("nco:Contact")
-              self.assertEquals (instances_before, instances_now+1)
-
-              self.tracker.restore (BACKUP_JPG_EXT)
-              instances_after = self.tracker.count_instances ("nco:Contact")
-              self.assertEquals (instances_before, instances_after)
-
-              # Restore the DB to the original state
-              self.__delete_test_instance ()
+        TEST_FILE = os.path.join(cfg.TEST_TMP_DIR, "trash_file")
+        trashfile = open(TEST_FILE, "w")
+        trashfile.write(
+            "Here some useless text that obviously is NOT a backup")
+        trashfile.close()
+
+        self.assertRaises(dbus.DBusException,
+                          self.tracker.restore,
+                          "file://" + TEST_FILE)
+        os.unlink(TEST_FILE)
+
+    def test_backup_04(self):
+        """
+        Restore from a random binary file
+        """
+        TEST_FILE = os.path.join(cfg.TEST_TMP_DIR, "trash_file.dat")
+
+        import struct
+        trashfile = open(TEST_FILE, "wb")
+        for n in range(0, 50):
+            data = struct.pack('i', n)
+            trashfile.write(data)
+        trashfile.close()
+
+        instances_before = self.tracker.count_instances(
+            "nie:InformationElement")
+        self.assertRaises(dbus.DBusException,
+                          self.tracker.restore,
+                          "file://" + TEST_FILE)
+
+        os.unlink(TEST_FILE)
 
+    def test_backup_05(self):
+        """
+        Take backup of db to a invalid path.
+        Expected: Backup should not be taken and tracker should behave normally.
+        """
+        self.assertRaises(dbus.DBusException,
+                          self.tracker.backup,
+                          "file://%s/this/is/a/non-existant/folder/backup" % (cfg.TEST_TMP_DIR))
+
+    def test_backup_06(self):
+        """
+        Try to restore an invalid path
+        """
+        self.assertRaises(dbus.DBusException,
+                          self.tracker.restore,
+                          "file://%s/this/is/a/non-existant/folder/backup" % (cfg.TEST_TMP_DIR))
+
+    def test_backup_07(self):
+        """
+        Restore after removing the DBs and journal
+
+        1.Insert a contact.
+        2.Take backup of db.
+        4.Delete the database
+        5.Restore the db.
+        6.Search for the contact inserted.
+        """
+        self.__insert_test_instance()
+        instances_before = self.tracker.count_instances("nco:Contact")
+        self.tracker.backup(self.BACKUP_FILE)
+
+        self.system.tracker_store_stop_nicely()
+        self.system.tracker_store_remove_dbs()
+        self.system.tracker_store_remove_journal()
+        self.system.tracker_store_start()
+
+        instances_before_restore = self.tracker.count_instances("nco:Contact")
+        self.assertNotEqual(instances_before_restore, instances_before)
+
+        self.tracker.restore(self.BACKUP_FILE)
+        self.assertTrue(self.__is_test_instance_there())
+
+        self.__delete_test_instance()
+
+    def test_backup_08(self):
+        """
+        Restore after corrupting DB
+
+        1.Insert a contact.
+        2.Take backup of db.
+        5.Restore the db.
+        6.Search for the contact inserted.
+        """
+        self.__insert_test_instance()
+        instances_before = self.tracker.count_instances("nco:Contact")
+        self.tracker.backup(self.BACKUP_FILE)
+
+        self.system.tracker_store_stop_brutally()
+        self.system.tracker_store_corrupt_dbs()
+        self.system.tracker_store_remove_journal()
+        self.system.tracker_store_start()
+
+        instances_before_restore = self.tracker.count_instances("nco:Contact")
+        self.assertNotEqual(instances_before_restore, instances_before)
+
+        self.tracker.restore(self.BACKUP_FILE)
+        self.assertTrue(self.__is_test_instance_there())
+
+        # DB to the original state
+        self.__delete_test_instance()
+
+    def test_backup_11(self):
+        """
+        Backup ignores the file extension
+
+        1.Insert a contact.
+        2.Take backup of db in .jpg format.
+        3.Restore the db.
+        4.Search for the contact inserted.
+        """
+        BACKUP_JPG_EXT = "file://%s/tracker-test-backup.jpg" % (
+            cfg.TEST_TMP_DIR)
+
+        self.__insert_test_instance()
+
+        instances_before = self.tracker.count_instances("nco:Contact")
+
+        self.tracker.backup(BACKUP_JPG_EXT)
+
+        self.__delete_test_instance()
+        instances_now = self.tracker.count_instances("nco:Contact")
+        self.assertEquals(instances_before, instances_now + 1)
+
+        self.tracker.restore(BACKUP_JPG_EXT)
+        instances_after = self.tracker.count_instances("nco:Contact")
+        self.assertEquals(instances_before, instances_after)
+
+        # Restore the DB to the original state
+        self.__delete_test_instance()
 
 
 class JournalReplayTest (CommonTrackerStoreTest):
+
+    """
+    Force journal replaying and check that the DB is correct aftewards
+    """
+    @expectedFailureJournal()
+    def test_journal_01(self):
         """
-        Force journal replaying and check that the DB is correct aftewards
+        Journal replaying when the DB is corrupted
+
+        Insert few data (to have more than the pre-defined instances)
+        Check instances of different classes
+        Kill the store (SIGKILL)
+        Replace the DB with a random file
+        Start the store
+        Check instances of different classes
         """
-       @expectedFailureJournal()
-       def test_journal_01 (self) :
-            """
-            Journal replaying when the DB is corrupted
-            
-            Insert few data (to have more than the pre-defined instances)
-            Check instances of different classes
-            Kill the store (SIGKILL)
-            Replace the DB with a random file
-            Start the store
-            Check instances of different classes
-            """
-            self.tracker.update ("INSERT { <test://journal-replay/01> a nco:Contact. }")
-            
-            emails = self.tracker.count_instances ("nmo:Email")
-            ie = self.tracker.count_instances ("nie:InformationElement")
-            contacts = self.tracker.count_instances ("nco:Contact")
-
-            self.system.tracker_store_stop_brutally ()
-            self.system.tracker_store_corrupt_dbs ()
-            self.system.tracker_store_start ()
-
-            emails_now = self.tracker.count_instances ("nmo:Email")
-            ie_now = self.tracker.count_instances ("nie:InformationElement")
-            contacts_now = self.tracker.count_instances ("nco:Contact")
-
-            self.assertEquals (emails, emails_now)
-            self.assertEquals (ie, ie_now)
-            self.assertEquals (contacts, contacts_now)
-
-            self.tracker.update ("DELETE { <test://journal-replay/01> a rdfs:Resource. }")
-
-       @expectedFailureJournal()
-       def test_journal_02 (self) :
-            """
-            Journal replaying when the DB disappears
-            
-            Insert few data (to have more than the pre-defined instances)
-            Check instances of different classes
-            KILL the store
-            Force a journal replay (DB missing, .meta.isrunning exists, journal OK)
-            Start the store
-            Check instances of different classes
-            """
-            self.tracker.update ("INSERT { <test://journal-replay/02> a nco:Contact. }")
-            
-            emails = self.tracker.count_instances ("nmo:Email")
-            ie = self.tracker.count_instances ("nie:InformationElement")
-            contacts = self.tracker.count_instances ("nco:Contact")
-
-            self.system.tracker_store_stop_brutally ()
-            self.system.tracker_store_prepare_journal_replay ()
-            self.system.tracker_store_start ()
-
-            emails_now = self.tracker.count_instances ("nmo:Email")
-            ie_now = self.tracker.count_instances ("nie:InformationElement")
-            contacts_now = self.tracker.count_instances ("nco:Contact")
-
-            self.assertEquals (emails, emails_now)
-            self.assertEquals (ie, ie_now)
-            self.assertEquals (contacts, contacts_now)
-
-            self.tracker.update ("DELETE { <test://journal-replay/02> a rdfs:Resource. }")
+        self.tracker.update(
+            "INSERT { <test://journal-replay/01> a nco:Contact. }")
 
-if __name__ == "__main__":
-    ut.main()                      
+        emails = self.tracker.count_instances("nmo:Email")
+        ie = self.tracker.count_instances("nie:InformationElement")
+        contacts = self.tracker.count_instances("nco:Contact")
+
+        self.system.tracker_store_stop_brutally()
+        self.system.tracker_store_corrupt_dbs()
+        self.system.tracker_store_start()
+
+        emails_now = self.tracker.count_instances("nmo:Email")
+        ie_now = self.tracker.count_instances("nie:InformationElement")
+        contacts_now = self.tracker.count_instances("nco:Contact")
 
+        self.assertEquals(emails, emails_now)
+        self.assertEquals(ie, ie_now)
+        self.assertEquals(contacts, contacts_now)
 
+        self.tracker.update(
+            "DELETE { <test://journal-replay/01> a rdfs:Resource. }")
 
+    @expectedFailureJournal()
+    def test_journal_02(self):
+        """
+        Journal replaying when the DB disappears
+
+        Insert few data (to have more than the pre-defined instances)
+        Check instances of different classes
+        KILL the store
+        Force a journal replay (DB missing, .meta.isrunning exists, journal OK)
+        Start the store
+        Check instances of different classes
+        """
+        self.tracker.update(
+            "INSERT { <test://journal-replay/02> a nco:Contact. }")
+
+        emails = self.tracker.count_instances("nmo:Email")
+        ie = self.tracker.count_instances("nie:InformationElement")
+        contacts = self.tracker.count_instances("nco:Contact")
+
+        self.system.tracker_store_stop_brutally()
+        self.system.tracker_store_prepare_journal_replay()
+        self.system.tracker_store_start()
+
+        emails_now = self.tracker.count_instances("nmo:Email")
+        ie_now = self.tracker.count_instances("nie:InformationElement")
+        contacts_now = self.tracker.count_instances("nco:Contact")
+
+        self.assertEquals(emails, emails_now)
+        self.assertEquals(ie, ie_now)
+        self.assertEquals(contacts, contacts_now)
+
+        self.tracker.update(
+            "DELETE { <test://journal-replay/02> a rdfs:Resource. }")
+
+if __name__ == "__main__":
+    ut.main()
diff --git a/tests/functional-tests/300-miner-basic-ops.py b/tests/functional-tests/300-miner-basic-ops.py
index cf4fb47..24ed8a0 100755
--- a/tests/functional-tests/300-miner-basic-ops.py
+++ b/tests/functional-tests/300-miner-basic-ops.py
@@ -33,11 +33,14 @@ import unittest2 as ut
 from common.utils.helpers import log
 from common.utils.minertest import CommonTrackerMinerTest, MINER_TMP_DIR, uri, path
 
+
 class MinerCrawlTest (CommonTrackerMinerTest):
+
     """
     Test cases to check if miner is able to monitor files that are created, deleted or moved
     """
-    def __get_text_documents (self):
+
+    def __get_text_documents(self):
         return self.tracker.query ("""
           SELECT ?url WHERE {
               ?u a nfo:TextDocument ;
@@ -45,171 +48,187 @@ class MinerCrawlTest (CommonTrackerMinerTest):
           }
           """)
 
-    def __get_parent_urn (self, filepath):
+    def __get_parent_urn(self, filepath):
         result = self.tracker.query ("""
           SELECT nfo:belongsToContainer(?u) WHERE {
               ?u a nfo:FileDataObject ;
                  nie:url \"%s\" .
           }
           """ % (uri (filepath)))
-        self.assertEquals (len (result), 1)
+        self.assertEquals(len(result), 1)
         return result[0][0]
 
-    def __get_file_urn (self, filepath):
+    def __get_file_urn(self, filepath):
         result = self.tracker.query ("""
           SELECT ?u WHERE {
               ?u a nfo:FileDataObject ;
                  nie:url \"%s\" .
           }
           """ % (uri (filepath)))
-        self.assertEquals (len (result), 1)
+        self.assertEquals(len(result), 1)
         return result[0][0]
 
     """
     Boot the miner with the correct configuration and check everything is fine
     """
-    def test_01_initial_crawling (self):
+
+    def test_01_initial_crawling(self):
         """
         The precreated files and folders should be there
         """
         # Maybe the information hasn't been committed yet
-        time.sleep (1)
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 3)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        time.sleep(1)
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 3)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(uri("test-monitored/dir1/file2.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
 
-        # We don't check (yet) folders, because Applications module is injecting results
+        # We don't check (yet) folders, because Applications module is
+        # injecting results
 
 
-## class copy(TestUpdate):
-## FIXME all tests in one class because the miner-fs restarting takes some time (~5 sec)
-##       Maybe we can move the miner-fs initialization to setUpModule and then move these
-##       tests to different classes
+# class copy(TestUpdate):
+# FIXME all tests in one class because the miner-fs restarting takes some time (~5 sec)
+# Maybe we can move the miner-fs initialization to setUpModule and then move these
+# tests to different classes
 
-    def test_02_copy_from_unmonitored_to_monitored (self):
+    def test_02_copy_from_unmonitored_to_monitored(self):
         """
         Copy an file from unmonitored directory to monitored directory
         and verify if data base is updated accordingly
         """
-        source = os.path.join (MINER_TMP_DIR, "test-no-monitored", "file0.txt")
-        dest = os.path.join (MINER_TMP_DIR, "test-monitored", "file0.txt")
-        shutil.copyfile (source, dest)
+        source = os.path.join(MINER_TMP_DIR, "test-no-monitored", "file0.txt")
+        dest = os.path.join(MINER_TMP_DIR, "test-monitored", "file0.txt")
+        shutil.copyfile(source, dest)
 
-        dest_id, dest_urn = self.system.store.await_resource_inserted ('nfo:TextDocument', uri(dest))
+        dest_id, dest_urn = self.system.store.await_resource_inserted(
+            'nfo:TextDocument', uri(dest))
 
         # verify if miner indexed this file.
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 4)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/file0.txt"), unpacked_result)
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 4)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(uri("test-monitored/dir1/file2.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        self.assertIn(uri("test-monitored/file0.txt"), unpacked_result)
 
         # Clean the new file so the test directory is as before
-        log ("Remove and wait")
-        os.remove (dest)
-        self.system.store.await_resource_deleted (dest_id)
+        log("Remove and wait")
+        os.remove(dest)
+        self.system.store.await_resource_deleted(dest_id)
 
-    def test_03_copy_from_monitored_to_unmonitored (self):
+    def test_03_copy_from_monitored_to_unmonitored(self):
         """
         Copy an file from a monitored location to an unmonitored location
         Nothing should change
         """
 
         # Copy from monitored to unmonitored
-        source = os.path.join (MINER_TMP_DIR, "test-monitored", "file1.txt")
-        dest = os.path.join (MINER_TMP_DIR, "test-no-monitored", "file1.txt")
-        shutil.copyfile (source, dest)
+        source = os.path.join(MINER_TMP_DIR, "test-monitored", "file1.txt")
+        dest = os.path.join(MINER_TMP_DIR, "test-no-monitored", "file1.txt")
+        shutil.copyfile(source, dest)
 
-        time.sleep (1)
+        time.sleep(1)
         # Nothing changed
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 3, "Results:" + str(result))
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 3, "Results:" + str(result))
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(uri("test-monitored/dir1/file2.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
 
         # Clean the file
-        os.remove (dest)
+        os.remove(dest)
 
-    def test_04_copy_from_monitored_to_monitored (self):
+    def test_04_copy_from_monitored_to_monitored(self):
         """
         Copy a file between monitored directories
         """
-        source = os.path.join (MINER_TMP_DIR, "test-monitored", "file1.txt")
-        dest = os.path.join (MINER_TMP_DIR, "test-monitored", "dir1", "dir2", "file-test04.txt")
-        shutil.copyfile (source, dest)
-
-        dest_id, dest_urn = self.system.store.await_resource_inserted ('nfo:TextDocument', uri(dest))
-
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 4)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file-test04.txt"), unpacked_result)
+        source = os.path.join(MINER_TMP_DIR, "test-monitored", "file1.txt")
+        dest = os.path.join(
+            MINER_TMP_DIR, "test-monitored", "dir1", "dir2", "file-test04.txt")
+        shutil.copyfile(source, dest)
+
+        dest_id, dest_urn = self.system.store.await_resource_inserted(
+            'nfo:TextDocument', uri(dest))
+
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 4)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(uri("test-monitored/dir1/file2.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file-test04.txt"), unpacked_result)
 
         # Clean the file
-        os.remove (dest)
-        self.system.store.await_resource_deleted (dest_id)
-        self.assertEquals (3, self.tracker.count_instances ("nfo:TextDocument"))
-
+        os.remove(dest)
+        self.system.store.await_resource_deleted(dest_id)
+        self.assertEquals(
+            3, self.tracker.count_instances("nfo:TextDocument"))
 
-    def test_05_move_from_unmonitored_to_monitored (self):
+    def test_05_move_from_unmonitored_to_monitored(self):
         """
         Move a file from unmonitored to monitored directory
         """
-        source = os.path.join (MINER_TMP_DIR, "test-no-monitored", "file0.txt")
-        dest = os.path.join (MINER_TMP_DIR, "test-monitored", "dir1", "file-test05.txt")
-        shutil.move (source, dest)
-        dest_id, dest_urn = self.system.store.await_resource_inserted ('nfo:TextDocument', uri(dest))
-
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 4)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/file-test05.txt"), unpacked_result)
+        source = os.path.join(MINER_TMP_DIR, "test-no-monitored", "file0.txt")
+        dest = os.path.join(
+            MINER_TMP_DIR, "test-monitored", "dir1", "file-test05.txt")
+        shutil.move(source, dest)
+        dest_id, dest_urn = self.system.store.await_resource_inserted(
+            'nfo:TextDocument', uri(dest))
+
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 4)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(uri("test-monitored/dir1/file2.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/file-test05.txt"), unpacked_result)
 
         # Clean the file
-        os.remove (dest)
-        self.system.store.await_resource_deleted (dest_id)
-        self.assertEquals (3, self.tracker.count_instances ("nfo:TextDocument"))
+        os.remove(dest)
+        self.system.store.await_resource_deleted(dest_id)
+        self.assertEquals(
+            3, self.tracker.count_instances("nfo:TextDocument"))
 
 ## """ move operation and tracker-miner response test cases """
-## class move(TestUpdate):
-
+# class move(TestUpdate):
 
-    def test_06_move_from_monitored_to_unmonitored (self):
+    def test_06_move_from_monitored_to_unmonitored(self):
         """
         Move a file from monitored to unmonitored directory
         """
         source = path("test-monitored/dir1/file2.txt")
         dest = path("test-no-monitored/file2.txt")
-        source_id = self.system.store.get_resource_id (uri(source))
-        shutil.move (source, dest)
-        self.system.store.await_resource_deleted (source_id)
+        source_id = self.system.store.get_resource_id(uri(source))
+        shutil.move(source, dest)
+        self.system.store.await_resource_deleted(source_id)
 
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 2)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 2)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
 
         # Restore the file
-        shutil.move (dest, source)
-        self.system.store.await_resource_inserted ('nfo:TextDocument', uri(source))
-        self.assertEquals (3, self.tracker.count_instances ("nfo:TextDocument"))
+        shutil.move(dest, source)
+        self.system.store.await_resource_inserted(
+            'nfo:TextDocument', uri(source))
+        self.assertEquals(
+            3, self.tracker.count_instances("nfo:TextDocument"))
 
-
-    def test_07_move_from_monitored_to_monitored (self):
+    def test_07_move_from_monitored_to_monitored(self):
         """
         Move a file between monitored directories
         """
@@ -219,89 +238,93 @@ class MinerCrawlTest (CommonTrackerMinerTest):
 
         resource_id = self.tracker.get_resource_id(url=uri(source))
 
-        source_dir_urn = self.__get_file_urn (os.path.dirname(source))
-        parent_before = self.__get_parent_urn (source)
-        self.assertEquals (source_dir_urn, parent_before)
+        source_dir_urn = self.__get_file_urn(os.path.dirname(source))
+        parent_before = self.__get_parent_urn(source)
+        self.assertEquals(source_dir_urn, parent_before)
 
-        shutil.move (source, dest)
+        shutil.move(source, dest)
         self.tracker.await_property_changed(resource_id, 'nie:url')
 
         # Checking fix for NB#214413: After a move operation, nfo:belongsToContainer
         # should be changed to the new one
-        dest_dir_urn = self.__get_file_urn (os.path.dirname(dest))
-        parent_after = self.__get_parent_urn (dest)
-        self.assertNotEquals (parent_before, parent_after)
-        self.assertEquals (dest_dir_urn, parent_after)
-
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 3)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/file2.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        dest_dir_urn = self.__get_file_urn(os.path.dirname(dest))
+        parent_after = self.__get_parent_urn(dest)
+        self.assertNotEquals(parent_before, parent_after)
+        self.assertEquals(dest_dir_urn, parent_after)
+
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 3)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(uri("test-monitored/file2.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
 
         # Restore the file
-        shutil.move (dest, source)
+        shutil.move(dest, source)
         self.tracker.await_property_changed(resource_id, 'nie:url')
 
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 3)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), unpacked_result)
-
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 3)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/dir1/file2.txt"), unpacked_result)
 
-    def test_08_deletion_single_file (self):
+    def test_08_deletion_single_file(self):
         """
         Delete one of the files
         """
         victim = path("test-monitored/dir1/file2.txt")
-        victim_id = self.system.store.get_resource_id (uri(victim))
-        os.remove (victim)
-        self.system.store.await_resource_deleted (victim_id)
+        victim_id = self.system.store.get_resource_id(uri(victim))
+        os.remove(victim)
+        self.system.store.await_resource_deleted(victim_id)
 
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 2)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 2)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
+        self.assertIn(
+            uri("test-monitored/dir1/dir2/file3.txt"), unpacked_result)
 
         # Restore the file
-        f = open (victim, "w")
-        f.write ("Don't panic, everything is fine")
-        f.close ()
-        self.system.store.await_resource_inserted ('nfo:TextDocument', uri(victim))
+        f = open(victim, "w")
+        f.write("Don't panic, everything is fine")
+        f.close()
+        self.system.store.await_resource_inserted(
+            'nfo:TextDocument', uri(victim))
 
-    def test_09_deletion_directory (self):
+    def test_09_deletion_directory(self):
         """
         Delete a directory
         """
         victim = path("test-monitored/dir1")
-        victim_id = self.system.store.get_resource_id (uri(victim))
-        shutil.rmtree (victim)
+        victim_id = self.system.store.get_resource_id(uri(victim))
+        shutil.rmtree(victim)
 
-        file_inside_victim_url = uri (os.path.join (victim, "file2.txt"))
-        file_inside_victim_id = self.system.store.get_resource_id (file_inside_victim_url)
-        self.system.store.await_resource_deleted (file_inside_victim_id)
+        file_inside_victim_url = uri(os.path.join(victim, "file2.txt"))
+        file_inside_victim_id = self.system.store.get_resource_id(
+            file_inside_victim_url)
+        self.system.store.await_resource_deleted(file_inside_victim_id)
 
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 1)
-        unpacked_result = [ r[0] for r in result]
-        self.assertIn ( uri ("test-monitored/file1.txt"), unpacked_result)
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 1)
+        unpacked_result = [r[0] for r in result]
+        self.assertIn(uri("test-monitored/file1.txt"), unpacked_result)
 
         # Restore the dirs
-        os.makedirs (path("test-monitored/dir1"))
-        os.makedirs (path("test-monitored/dir1/dir2"))
+        os.makedirs(path("test-monitored/dir1"))
+        os.makedirs(path("test-monitored/dir1/dir2"))
         for f in ["test-monitored/dir1/file2.txt",
                   "test-monitored/dir1/dir2/file3.txt"]:
             filename = path(f)
-            writer = open (filename, "w")
-            writer.write ("Don't panic, everything is fine")
-            writer.close ()
-            self.system.store.await_resource_inserted ('nfo:TextDocument', uri(f))
+            writer = open(filename, "w")
+            writer.write("Don't panic, everything is fine")
+            writer.close()
+            self.system.store.await_resource_inserted(
+                'nfo:TextDocument', uri(f))
 
         # Check everything is fine
-        result = self.__get_text_documents ()
-        self.assertEquals (len (result), 3)
+        result = self.__get_text_documents()
+        self.assertEquals(len(result), 3)
 
 if __name__ == "__main__":
     print """
diff --git a/tests/functional-tests/301-miner-resource-removal.py 
b/tests/functional-tests/301-miner-resource-removal.py
index 557cadc..b432612 100755
--- a/tests/functional-tests/301-miner-resource-removal.py
+++ b/tests/functional-tests/301-miner-resource-removal.py
@@ -52,67 +52,73 @@ CONF_OPTIONS = {
 
 REASONABLE_TIMEOUT = 30
 
+
 class MinerResourceRemovalTest (CommonTrackerMinerTest):
 
-    def prepare_directories (self):
+    def prepare_directories(self):
         # Override content from the base class
         pass
 
-    def create_test_content (self, file_urn, title):
+    def create_test_content(self, file_urn, title):
         sparql = "INSERT { \
                     _:ie a nmm:MusicPiece ; \
                          nie:title \"%s\" ; \
                          nie:isStoredAs <%s> \
                   } " % (title, file_urn)
 
-        self.tracker.update (sparql)
+        self.tracker.update(sparql)
 
-        return self.tracker.await_resource_inserted (rdf_class = 'nmm:MusicPiece',
-                                                     title = title)
+        return self.tracker.await_resource_inserted(
+            rdf_class='nmm:MusicPiece',
+                                                     title=title)
 
-    def create_test_file (self, file_name):
+    def create_test_file(self, file_name):
         file_path = path(file_name)
 
-        file = open (file_path, 'w')
-        file.write ("Test")
-        file.close ()
-
-        return self.tracker.await_resource_inserted (rdf_class = 'nfo:Document',
-                                                     url = uri(file_name))
+        file = open(file_path, 'w')
+        file.write("Test")
+        file.close()
 
-    def assertResourceExists (self, urn):
-        if self.tracker.ask ("ASK { <%s> a rdfs:Resource }" % urn) == False:
-            self.fail ("Resource <%s> does not exist" % urn)
+        return self.tracker.await_resource_inserted(
+            rdf_class='nfo:Document',
+                                                     url=uri(file_name))
 
-    def assertResourceMissing (self, urn):
-        if self.tracker.ask ("ASK { <%s> a rdfs:Resource }" % urn) == True:
-            self.fail ("Resource <%s> should not exist" % urn)
+    def assertResourceExists(self, urn):
+        if self.tracker.ask("ASK { <%s> a rdfs:Resource }" % urn) == False:
+            self.fail("Resource <%s> does not exist" % urn)
 
+    def assertResourceMissing(self, urn):
+        if self.tracker.ask("ASK { <%s> a rdfs:Resource }" % urn) == True:
+            self.fail("Resource <%s> should not exist" % urn)
 
-    def test_01_file_deletion (self):
+    def test_01_file_deletion(self):
         """
         Ensure every logical resource (nie:InformationElement) contained with
         in a file is deleted when the file is deleted.
         """
 
-        (file_1_id, file_1_urn) = self.create_test_file ("test-monitored/test_1.txt")
-        (file_2_id, file_2_urn) = self.create_test_file ("test-monitored/test_2.txt")
-        (ie_1_id, ie_1_urn) = self.create_test_content (file_1_urn, "Test resource 1")
-        (ie_2_id, ie_2_urn) = self.create_test_content (file_2_urn, "Test resource 2")
+        (file_1_id, file_1_urn) = self.create_test_file(
+            "test-monitored/test_1.txt")
+        (file_2_id, file_2_urn) = self.create_test_file(
+            "test-monitored/test_2.txt")
+        (ie_1_id, ie_1_urn) = self.create_test_content(
+            file_1_urn, "Test resource 1")
+        (ie_2_id, ie_2_urn) = self.create_test_content(
+            file_2_urn, "Test resource 2")
 
-        os.unlink (path ("test-monitored/test_1.txt"))
+        os.unlink(path("test-monitored/test_1.txt"))
 
-        self.tracker.await_resource_deleted (file_1_id)
-        self.tracker.await_resource_deleted (ie_1_id,
-                                             "Associated logical resource failed to be deleted " \
-                                             "when its containing file was removed.")
+        self.tracker.await_resource_deleted(file_1_id)
+        self.tracker.await_resource_deleted(ie_1_id,
+                                            "Associated logical resource failed to be deleted "
+                                            "when its containing file was removed.")
 
-        self.assertResourceMissing (file_1_urn)
-        self.assertResourceMissing (ie_1_urn)
-        self.assertResourceExists (file_2_urn)
-        self.assertResourceExists (ie_2_urn)
+        self.assertResourceMissing(file_1_urn)
+        self.assertResourceMissing(ie_1_urn)
+        self.assertResourceExists(file_2_urn)
+        self.assertResourceExists(ie_2_urn)
 
-    #def test_02_removable_device_data (self):
+    # def test_02_removable_device_data (self):
     #    """
     #    Tracker does periodic cleanups of data on removable volumes that haven't
     #    been seen since 'removable-days-threshold', and will also remove all data
diff --git a/tests/functional-tests/310-fts-indexing.py b/tests/functional-tests/310-fts-indexing.py
index 4673da6..54cc47a 100755
--- a/tests/functional-tests/310-fts-indexing.py
+++ b/tests/functional-tests/310-fts-indexing.py
@@ -36,27 +36,30 @@ from common.utils.helpers import log
 from common.utils.minertest import CommonTrackerMinerTest, MINER_TMP_DIR, uri, path, DEFAULT_TEXT
 from common.utils import configuration as cfg
 
+
 class CommonMinerFTS (CommonTrackerMinerTest):
+
     """
     Superclass to share methods. Shouldn't be run by itself.
     """
-    def prepare_directories (self):
+
+    def prepare_directories(self):
         # Override content from the base class
         pass
 
-    def setUp (self):
+    def setUp(self):
         self.testfile = "test-monitored/miner-fts-test.txt"
-        if os.path.exists (path (self.testfile)):
-            os.remove (path (self.testfile))
+        if os.path.exists(path(self.testfile)):
+            os.remove(path(self.testfile))
 
         super(CommonMinerFTS, self).setUp()
 
-    def set_text (self, text):
+    def set_text(self, text):
         exists = os.path.exists(path(self.testfile))
 
-        f = open (path (self.testfile), "w")
-        f.write (text)
-        f.close ()
+        f = open(path(self.testfile), "w")
+        f.write(text)
+        f.close()
 
         if exists:
             subject_id = self.tracker.get_resource_id(uri(self.testfile))
@@ -69,11 +72,11 @@ class CommonMinerFTS (CommonTrackerMinerTest):
 
         self.tracker.reset_graph_updates_tracking()
 
-    def search_word (self, word):
+    def search_word(self, word):
         """
         Return list of URIs with the word in them
         """
-        log ("Search for: %s" % word)
+        log("Search for: %s" % word)
         results = self.tracker.query ("""
                 SELECT ?url WHERE {
                   ?u a nfo:TextDocument ;
@@ -82,8 +85,8 @@ class CommonMinerFTS (CommonTrackerMinerTest):
                  }
                  """ % (word))
         return [r[0] for r in results]
-   
-    def basic_test (self, text, word):
+
+    def basic_test(self, text, word):
         """
         Save the text on the testfile, search the word
         and assert the testfile is only result.
@@ -91,172 +94,173 @@ class CommonMinerFTS (CommonTrackerMinerTest):
         Be careful with the default contents of the text files
         ( see common/utils/minertest.py DEFAULT_TEXT )
         """
-        self.set_text (text)
-        results = self.search_word (word)
-        self.assertEquals (len (results), 1)
-        self.assertIn ( uri (self.testfile), results)
+        self.set_text(text)
+        results = self.search_word(word)
+        self.assertEquals(len(results), 1)
+        self.assertIn(uri(self.testfile), results)
 
-    def _query_id (self, uri):
+    def _query_id(self, uri):
         query = "SELECT tracker:id(?urn) WHERE { ?urn nie:url \"%s\". }" % uri
-        result = self.tracker.query (query)
-        assert len (result) == 1
-        return int (result[0][0])
+        result = self.tracker.query(query)
+        assert len(result) == 1
+        return int(result[0][0])
 
 
 class MinerFTSBasicTest (CommonMinerFTS):
+
     """
     Tests different contents in a single file
     """
 
-    def test_01_single_word (self):
+    def test_01_single_word(self):
         TEXT = "automobile"
-        self.basic_test (TEXT, TEXT)
+        self.basic_test(TEXT, TEXT)
 
-    def test_02_multiple_words (self):
+    def test_02_multiple_words(self):
         TEXT = "automobile with unlimited power"
-        self.set_text (TEXT)
-        
-        results = self.search_word ("automobile")
-        self.assertEquals (len (results), 1)
-        self.assertIn (uri (self.testfile), results)
+        self.set_text(TEXT)
 
-        results = self.search_word ("unlimited")
-        self.assertEquals (len (results), 1)
-        self.assertIn (uri (self.testfile), results)
+        results = self.search_word("automobile")
+        self.assertEquals(len(results), 1)
+        self.assertIn(uri(self.testfile), results)
 
+        results = self.search_word("unlimited")
+        self.assertEquals(len(results), 1)
+        self.assertIn(uri(self.testfile), results)
 
-    def test_03_long_word (self):
-        # TEXT is longer than the 20 characters specified in the fts configuration
+    def test_03_long_word(self):
+        # TEXT is longer than the 20 characters specified in the fts
+        # configuration
         TEXT = "fsfsfsdfskfweeqrewqkmnbbvkdasdjefjewriqjfnc"
-        self.set_text (TEXT)
+        self.set_text(TEXT)
 
-        results = self.search_word (TEXT)
-        self.assertEquals (len (results), 0)
+        results = self.search_word(TEXT)
+        self.assertEquals(len(results), 0)
 
-    def test_04_non_existent_word (self):
+    def test_04_non_existent_word(self):
         TEXT = "This a trick"
-        self.set_text (TEXT)
-        results = self.search_word ("trikc")
-        self.assertEquals (len (results), 0)
-
+        self.set_text(TEXT)
+        results = self.search_word("trikc")
+        self.assertEquals(len(results), 0)
 
-    def test_05_word_in_multiple_files (self):
+    def test_05_word_in_multiple_files(self):
         # Safeguard, in the case we modify the DEFAULT_TEXT later...
         assert "content" in DEFAULT_TEXT
 
-        self.set_text (DEFAULT_TEXT)
-        results = self.search_word ("content")
-        self.assertEquals (len (results), 4)
-        self.assertIn ( uri (self.testfile), results)
-        self.assertIn ( uri ("test-monitored/file1.txt"), results)
-        self.assertIn ( uri ("test-monitored/dir1/file2.txt"), results)
-        self.assertIn ( uri ("test-monitored/dir1/dir2/file3.txt"), results)
+        self.set_text(DEFAULT_TEXT)
+        results = self.search_word("content")
+        self.assertEquals(len(results), 4)
+        self.assertIn(uri(self.testfile), results)
+        self.assertIn(uri("test-monitored/file1.txt"), results)
+        self.assertIn(uri("test-monitored/dir1/file2.txt"), results)
+        self.assertIn(uri("test-monitored/dir1/dir2/file3.txt"), results)
 
-    def test_06_word_multiple_times_in_file (self):
+    def test_06_word_multiple_times_in_file(self):
         TEXT = "automobile is red. automobile is big. automobile is great!"
-        self.basic_test (TEXT, "automobile")
+        self.basic_test(TEXT, "automobile")
 
-    def test_07_sentence (self):
+    def test_07_sentence(self):
         TEXT = "plastic is fantastic"
-        self.basic_test (TEXT, TEXT)
+        self.basic_test(TEXT, TEXT)
 
-    def test_08_partial_sentence (self):
+    def test_08_partial_sentence(self):
         TEXT = "plastic is fantastic"
-        self.basic_test (TEXT, "is fantastic")
+        self.basic_test(TEXT, "is fantastic")
 
-    def test_09_strange_word (self):
+    def test_09_strange_word(self):
         # FIXME Not sure what are we testing here
         TEXT = "'summer.time'"
-        self.basic_test (TEXT, "summer.time")
+        self.basic_test(TEXT, "summer.time")
 
     # Skip the test 'search for .'
 
-    def test_10_mixed_letters_and_numbers (self):
+    def test_10_mixed_letters_and_numbers(self):
         TEXT = "abc123"
-        self.basic_test (TEXT, "abc123")
+        self.basic_test(TEXT, "abc123")
 
-    def test_11_ignore_numbers (self):
+    def test_11_ignore_numbers(self):
         TEXT = "palabra 123123"
-        self.set_text (TEXT)
-        results = self.search_word ("123123")
-        self.assertEquals (len (results), 0)
+        self.set_text(TEXT)
+        results = self.search_word("123123")
+        self.assertEquals(len(results), 0)
 
 
 class MinerFTSFileOperationsTest (CommonMinerFTS):
+
     """
     Move, update, delete the files and check the text indexes are updated accordingly.
     """
 
-    def test_01_removal_of_file (self):
+    def test_01_removal_of_file(self):
         """
         When removing the file, its text contents disappear from the index
         """
         TEXT = "automobile is red and big and whatnot"
-        self.basic_test (TEXT, "automobile")
+        self.basic_test(TEXT, "automobile")
 
-        id = self._query_id (uri (self.testfile))
-        os.remove ( path (self.testfile))
-        self.tracker.await_resource_deleted (id)
+        id = self._query_id(uri(self.testfile))
+        os.remove(path(self.testfile))
+        self.tracker.await_resource_deleted(id)
 
-        results = self.search_word ("automobile")
-        self.assertEquals (len (results), 0)
+        results = self.search_word("automobile")
+        self.assertEquals(len(results), 0)
 
-    def test_02_empty_the_file (self):
+    def test_02_empty_the_file(self):
         """
         Emptying the file, the indexed words are also removed
 
         FIXME: this test currently fails!
         """
         TEXT = "automobile is red and big and whatnot"
-        self.basic_test (TEXT, "automobile")
+        self.basic_test(TEXT, "automobile")
 
-        self.set_text ("")
-        results = self.search_word ("automobile")
-        self.assertEquals (len (results), 0)
+        self.set_text("")
+        results = self.search_word("automobile")
+        self.assertEquals(len(results), 0)
 
-    def test_03_update_the_file (self):
+    def test_03_update_the_file(self):
         """
         Changing the contents of the file, updates the index
 
         FIXME: this test fails!
         """
         TEXT = "automobile is red and big and whatnot"
-        self.basic_test (TEXT, "automobile")
+        self.basic_test(TEXT, "automobile")
 
-        self.set_text ("airplane is blue and small and wonderful")
+        self.set_text("airplane is blue and small and wonderful")
 
-        results = self.search_word ("automobile")
-        self.assertEquals (len (results), 0)
+        results = self.search_word("automobile")
+        self.assertEquals(len(results), 0)
 
-        results = self.search_word ("airplane")
-        self.assertEquals (len (results), 1)
+        results = self.search_word("airplane")
+        self.assertEquals(len(results), 1)
 
-    # Skip the test_text_13... feel, feet, fee in three diff files and search feet
+    # Skip the test_text_13... feel, feet, fee in three diff files and search
+    # feet
 
-    def __recreate_file (self, filename, content):
-        if os.path.exists (filename):
-            os.remove (filename)
+    def __recreate_file(self, filename, content):
+        if os.path.exists(filename):
+            os.remove(filename)
 
-        f = open (filename, "w")
-        f.write (content)
-        f.close ()
-        
+        f = open(filename, "w")
+        f.write(content)
+        f.close()
 
-    def test_04_on_unmonitored_file (self):
+    def test_04_on_unmonitored_file(self):
         """
         Set text in an unmonitored file. There should be no results.
         """
         TEXT = "automobile is red"
 
         TEST_15_FILE = "test-no-monitored/fts-indexing-test-15.txt"
-        self.__recreate_file (path (TEST_15_FILE), TEXT)
+        self.__recreate_file(path(TEST_15_FILE), TEXT)
 
-        results = self.search_word ("automobile")
-        self.assertEquals (len (results), 0)
+        results = self.search_word("automobile")
+        self.assertEquals(len(results), 0)
 
-        os.remove (path (TEST_15_FILE))
+        os.remove(path(TEST_15_FILE))
 
-    def test_05_move_file_unmonitored_monitored (self):
+    def test_05_move_file_unmonitored_monitored(self):
         """
         Move file from unmonitored location to monitored location and index should be updated
         """
@@ -264,71 +268,77 @@ class MinerFTSFileOperationsTest (CommonMinerFTS):
         TEXT = "airplane is beautiful"
         TEST_16_SOURCE = "test-no-monitored/fts-indexing-text-16.txt"
         TEST_16_DEST = "test-monitored/fts-indexing-text-16.txt"
-        
-        self.__recreate_file (path (TEST_16_SOURCE), TEXT)
-        # the file is supposed to be ignored by tracker, so there is no notification..
-        time.sleep (5)
 
-        results = self.search_word ("airplane")
-        self.assertEquals (len (results), 0)
+        self.__recreate_file(path(TEST_16_SOURCE), TEXT)
+        # the file is supposed to be ignored by tracker, so there is no
+        # notification..
+        time.sleep(5)
+
+        results = self.search_word("airplane")
+        self.assertEquals(len(results), 0)
 
-        shutil.copyfile ( path (TEST_16_SOURCE), path (TEST_16_DEST))
-        self.tracker.await_resource_inserted (rdf_class = 'nfo:Document',
-                                              url = uri(TEST_16_DEST),
-                                              required_property = 'nie:plainTextContent')
+        shutil.copyfile(path(TEST_16_SOURCE), path(TEST_16_DEST))
+        self.tracker.await_resource_inserted(rdf_class='nfo:Document',
+                                             url=uri(TEST_16_DEST),
+                                             required_property='nie:plainTextContent')
 
-        results = self.search_word ("airplane")
-        self.assertEquals (len (results), 1)
+        results = self.search_word("airplane")
+        self.assertEquals(len(results), 1)
 
-        os.remove ( path (TEST_16_SOURCE))
-        os.remove ( path (TEST_16_DEST))
+        os.remove(path(TEST_16_SOURCE))
+        os.remove(path(TEST_16_DEST))
 
     # skip test for a file in a hidden directory
 
+
 class MinerFTSStopwordsTest (CommonMinerFTS):
+
     """
     Search for stopwords in a file 
     """
 
-    def __get_some_stopwords (self):
+    def __get_some_stopwords(self):
 
-        langcode, encoding = locale.getdefaultlocale ()
+        langcode, encoding = locale.getdefaultlocale()
         if "_" in langcode:
-            langcode = langcode.split ("_")[0]
+            langcode = langcode.split("_")[0]
 
-        stopwordsfile = os.path.join (cfg.DATADIR, "tracker", "stop-words", "stopwords." + langcode)
+        stopwordsfile = os.path.join(
+            cfg.DATADIR, "tracker", "stop-words", "stopwords." + langcode)
 
-        if not os.path.exists (stopwordsfile):
-            self.skipTest ("No stopwords for the current locale ('%s' doesn't exist)" % (stopwordsfile))
+        if not os.path.exists(stopwordsfile):
+            self.skipTest(
+                "No stopwords for the current locale ('%s' doesn't exist)" % (stopwordsfile))
             return []
-        
+
         stopwords = []
         counter = 0
-        for line in open (stopwordsfile, "r"):
-            if len (line) > 4:
-                stopwords.append (line[:-1])
+        for line in open(stopwordsfile, "r"):
+            if len(line) > 4:
+                stopwords.append(line[:-1])
                 counter += 1
 
             if counter > 5:
                 break
-            
+
         return stopwords
-    
-    def test_01_stopwords (self):
-        stopwords = self.__get_some_stopwords ()
-        TEXT = " ".join (["this a completely normal text automobile"] + stopwords)
-        
-        self.set_text (TEXT)
-        results = self.search_word ("automobile")
-        self.assertEquals (len (results), 1)
-        log ("Stopwords: %s" % stopwords)
-        for i in range (0, len (stopwords)):
-            results = self.search_word (stopwords[i])
-            self.assertEquals (len (results), 0)
-
-    ## FIXME add all the special character tests!
-    ##  http://git.gnome.org/browse/tracker/commit/?id=81c0d3bd754a6b20ac72323481767dc5b4a6217b
-    
+
+    def test_01_stopwords(self):
+        stopwords = self.__get_some_stopwords()
+        TEXT = " ".join(
+            ["this a completely normal text automobile"] + stopwords)
+
+        self.set_text(TEXT)
+        results = self.search_word("automobile")
+        self.assertEquals(len(results), 1)
+        log("Stopwords: %s" % stopwords)
+        for i in range(0, len(stopwords)):
+            results = self.search_word(stopwords[i])
+            self.assertEquals(len(results), 0)
+
+    # FIXME add all the special character tests!
+    # http://git.gnome.org/browse/tracker/commit/?id=81c0d3bd754a6b20ac72323481767dc5b4a6217b
+
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/400-extractor.py b/tests/functional-tests/400-extractor.py
index 140b5d5..d7a8029 100755
--- a/tests/functional-tests/400-extractor.py
+++ b/tests/functional-tests/400-extractor.py
@@ -33,228 +33,240 @@ import ConfigParser
 
 
 class ExtractionTestCase (ut.TestCase):
+
     """
     Test checks if the tracker extractor is able to retrieve metadata
     """
-    def __init__ (self, methodName='runTest', descfile=None):
+
+    def __init__(self, methodName='runTest', descfile=None):
         """
         Descfile is the description file in a relative path
         """
-        ut.TestCase.__init__ (self, methodName)
+        ut.TestCase.__init__(self, methodName)
 
         # Load the description file
         assert descfile
         self.rel_description = descfile
-        self.configParser = self.__load_description_file (self.rel_description)
+        self.configParser = self.__load_description_file(self.rel_description)
 
         # Add a method to the class called after the description file
-        methodName = self.rel_description.lower()[:-len(".expected")].replace (" ", "_")[-60:]
+        methodName = self.rel_description.lower()[
+            :-len(".expected")].replace(" ", "_")[-60:]
 
-        if (self.__is_expected_failure ()):
-            setattr (self,
-                     methodName,
-                     self.expected_failure_test_extraction)
+        if (self.__is_expected_failure()):
+            setattr(self,
+                    methodName,
+                    self.expected_failure_test_extraction)
         else:
-            setattr (self,
-                     methodName,
-                     self.generic_test_extraction)
+            setattr(self,
+                    methodName,
+                    self.generic_test_extraction)
 
         # unittest framework will run the test called "self._testMethodName"
         # So we set that variable to our new name
         self._testMethodName = methodName
 
-    def runTest (self):
+    def runTest(self):
         """
         Empty function pointer, that should NEVER be called. It is required to exist by unittest.
         """
         assert False
 
-    def __load_description_file (self, descfile):
-        configParser = ConfigParser.RawConfigParser ()
+    def __load_description_file(self, descfile):
+        configParser = ConfigParser.RawConfigParser()
         # Make it case sensitive:
         configParser.optionxform = str
 
-        abs_description = os.path.abspath (descfile)
-        loaded_files = configParser.read (abs_description)
+        abs_description = os.path.abspath(descfile)
+        loaded_files = configParser.read(abs_description)
         if not abs_description in loaded_files:
             raise Exception("Unable to load %s" % (abs_description))
 
         return configParser
 
-    def __is_expected_failure (self):
+    def __is_expected_failure(self):
         assert self.configParser
-        return self.configParser.has_option ("TestFile", "ExpectedFailure")
+        return self.configParser.has_option("TestFile", "ExpectedFailure")
 
-    def __get_bugnumber (self):
+    def __get_bugnumber(self):
         assert self.configParser
-        if self.configParser.has_option ("TestFile", "Bugzilla"):
-            return "'" + self.configParser.get ("TestFile", "Bugzilla") + "'"
+        if self.configParser.has_option("TestFile", "Bugzilla"):
+            return "'" + self.configParser.get("TestFile", "Bugzilla") + "'"
         else:
             return None
 
-    def expected_failure_test_extraction (self):
+    def expected_failure_test_extraction(self):
         try:
-            self.generic_test_extraction ()
+            self.generic_test_extraction()
         except Exception:
             raise ut.case._ExpectedFailure(sys.exc_info())
 
-        if self.__get_bugnumber ():
-            raise Exception ("Unexpected success. Maybe bug: " + self.__get_bugnumber () + " has been 
fixed?")
+        if self.__get_bugnumber():
+            raise Exception(
+                "Unexpected success. Maybe bug: " + self.__get_bugnumber() + " has been fixed?")
         else:
-            raise Exception ("Unexpected success. Check " + self.rel_description)
+            raise Exception(
+                "Unexpected success. Check " + self.rel_description)
 
-    def generic_test_extraction (self):
-        abs_description = os.path.abspath (self.rel_description)
+    def generic_test_extraction(self):
+        abs_description = os.path.abspath(self.rel_description)
 
-        # Filename contains the file to extract, in a relative path to the description file
-        desc_root, desc_file = os.path.split (abs_description)
+        # Filename contains the file to extract, in a relative path to the
+        # description file
+        desc_root, desc_file = os.path.split(abs_description)
 
-        filename_to_extract = self.configParser.get ("TestFile", "Filename")
-        self.file_to_extract = os.path.join (desc_root, filename_to_extract)
+        filename_to_extract = self.configParser.get("TestFile", "Filename")
+        self.file_to_extract = os.path.join(desc_root, filename_to_extract)
 
         result = get_tracker_extract_output(self.file_to_extract)
-        self.__assert_extraction_ok (result)
+        self.__assert_extraction_ok(result)
 
-    def assertDictHasKey (self, d, key, msg=None):
-        if not d.has_key (key):
+    def assertDictHasKey(self, d, key, msg=None):
+        if not d.has_key(key):
             standardMsg = "Missing: %s\n" % (key)
-            self.fail (self._formatMessage (msg, standardMsg))
+            self.fail(self._formatMessage(msg, standardMsg))
         else:
             return
 
-    def assertIsURN (self, supposed_uuid, msg=None):
+    def assertIsURN(self, supposed_uuid, msg=None):
         import uuid
 
         try:
-            if (supposed_uuid.startswith ("<") and supposed_uuid.endswith (">")):
+            if (supposed_uuid.startswith("<") and supposed_uuid.endswith(">")):
                 supposed_uuid = supposed_uuid[1:-1]
 
-            uuid.UUID (supposed_uuid)
+            uuid.UUID(supposed_uuid)
         except ValueError:
             standardMsg = "'%s' is not a valid UUID" % (supposed_uuid)
-            self.fail (self._formatMessage (msg, standardMsg))
-
-    def __assert_extraction_ok (self, result):
-        self.__check_section ("Metadata", result)
+            self.fail(self._formatMessage(msg, standardMsg))
 
-        if (cfg.haveMaemo and self.configParser.has_section ("Meego")):
-            self.__check_section ("Meego", result)
+    def __assert_extraction_ok(self, result):
+        self.__check_section("Metadata", result)
 
+        if (cfg.haveMaemo and self.configParser.has_section("Meego")):
+            self.__check_section("Meego", result)
 
-    def __check_section (self, section, result):
+    def __check_section(self, section, result):
         error_missing_prop = "Property '%s' hasn't been extracted from file \n'%s'\n (requested on '%s' 
[%s])"
         error_wrong_value = "on property '%s' from file %s\n (requested on: '%s' [%s])"
         error_extra_prop = "Property '%s' was explicitely banned for file \n'%s'\n (requested on '%s' [%s])"
         error_extra_prop_v = "Property '%s' with value '%s' was explicitely banned for file \n'%s'\n 
(requested on %s' [%s])"
 
-        expected_pairs = [] # List of expected (key, value)
+        expected_pairs = []  # List of expected (key, value)
         unexpected_pairs = []  # List of unexpected (key, value)
-        expected_keys = []  # List of expected keys (the key must be there, value doesnt matter)
-
-        for k, v in self.configParser.items (section):
-            if k.startswith ("!"):
-                unexpected_pairs.append ( (k[1:].replace ("_", ":"), v) )
-            elif k.startswith ("@"):
-                expected_keys.append ( k[1:].replace ("_", ":") )
+        expected_keys = []
+            # List of expected keys (the key must be there, value doesnt
+            # matter)
+
+        for k, v in self.configParser.items(section):
+            if k.startswith("!"):
+                unexpected_pairs.append((k[1:].replace("_", ":"), v))
+            elif k.startswith("@"):
+                expected_keys.append(k[1:].replace("_", ":"))
             else:
-                expected_pairs.append ( (k.replace ("_", ":"), v) )
-
+                expected_pairs.append((k.replace("_", ":"), v))
 
         for (prop, value) in expected_pairs:
-            self.assertDictHasKey (result, prop,
-                                   error_missing_prop % (prop,
-                                                         self.file_to_extract,
-                                                         self.rel_description,
-                                                         section))
+            self.assertDictHasKey(result, prop,
+                                  error_missing_prop % (prop,
+                                                        self.file_to_extract,
+                                                        self.rel_description,
+                                                        section))
             if value == "@URNUUID@":
-                # Watch out! We take only the FIRST element. Incompatible with multiple-valued props.
-                self.assertIsURN (result [prop][0],
-                                  error_wrong_value % (prop,
-                                                       self.file_to_extract,
-                                                       self.rel_description,
-                                                       section))
+                # Watch out! We take only the FIRST element. Incompatible with
+                # multiple-valued props.
+                self.assertIsURN(result[prop][0],
+                                 error_wrong_value % (prop,
+                                                      self.file_to_extract,
+                                                      self.rel_description,
+                                                      section))
             else:
-                self.assertIn (value, result [prop],
-                               error_wrong_value % (prop,
-                                                    self.file_to_extract,
-                                                    self.rel_description,
-                                                    section))
+                self.assertIn(value, result[prop],
+                              error_wrong_value % (prop,
+                                                   self.file_to_extract,
+                                                   self.rel_description,
+                                                   section))
 
         for (prop, value) in unexpected_pairs:
             # There is no prop, or it is but not with that value
             if (value == ""):
-                self.assertFalse (result.has_key (prop), error_extra_prop % (prop,
-                                                                             self.file_to_extract,
-                                                                             self.rel_description,
-                                                                             section))
+                self.assertFalse(
+                    result.has_key(prop), error_extra_prop % (prop,
+                                                              self.file_to_extract,
+                                                              self.rel_description,
+                                                              section))
             else:
                 if (value == "@URNUUID@"):
-                    self.assertIsURN (result [prop][0], error_extra_prop % (prop,
-                                                                            self.file_to_extract,
-                                                                            self.rel_description,
-                                                                            section))
+                    self.assertIsURN(
+                        result[prop][0], error_extra_prop % (prop,
+                                                             self.file_to_extract,
+                                                             self.rel_description,
+                                                             section))
                 else:
-                    self.assertNotIn (value, result [prop], error_extra_prop_v % (prop,
-                                                                                  value,
-                                                                                  self.file_to_extract,
-                                                                                  self.rel_description,
-                                                                                  section))
+                    self.assertNotIn(
+                        value, result[prop], error_extra_prop_v % (prop,
+                                                                   value,
+                                                                   self.file_to_extract,
+                                                                   self.rel_description,
+                                                                   section))
 
         for prop in expected_keys:
-             self.assertDictHasKey (result, prop,
-                                    error_missing_prop % (prop,
-                                                          self.file_to_extract,
-                                                          self.rel_description,
-                                                          section))
+            self.assertDictHasKey(result, prop,
+                                  error_missing_prop % (prop,
+                                                        self.file_to_extract,
+                                                        self.rel_description,
+                                                        section))
 
 
-def run_all ():
-    ##
+def run_all():
+    #
     # Traverse the TEST_DATA_PATH directory looking for .description files
     # Add a new TestCase to the suite per .description file and run the suite.
     #
     # Is we do this inside a single TestCase an error in one test would stop the whole
     # testing.
-    ##
-    if (os.path.exists (os.getcwd() + "/test-extraction-data")):
+    #
+    if (os.path.exists(os.getcwd() + "/test-extraction-data")):
         # Use local directory if available
         TEST_DATA_PATH = os.getcwd() + "/test-extraction-data"
     else:
-        TEST_DATA_PATH = os.path.join (cfg.DATADIR, "tracker-tests",
-                                       "test-extraction-data")
+        TEST_DATA_PATH = os.path.join(cfg.DATADIR, "tracker-tests",
+                                      "test-extraction-data")
     print "Loading test descriptions from", TEST_DATA_PATH
-    extractionTestSuite = ut.TestSuite ()
-    for root, dirs, files in os.walk (TEST_DATA_PATH):
-         descriptions = [os.path.join (root, f) for f in files if f.endswith ("expected")]
-         for descfile in descriptions:
-             tc = ExtractionTestCase(descfile=descfile)
-             extractionTestSuite.addTest(tc)
-    result = ut.TextTestRunner (verbosity=1).run (extractionTestSuite)
+    extractionTestSuite = ut.TestSuite()
+    for root, dirs, files in os.walk(TEST_DATA_PATH):
+        descriptions = [os.path.join(root, f)
+                        for f in files if f.endswith("expected")]
+        for descfile in descriptions:
+            tc = ExtractionTestCase(descfile=descfile)
+            extractionTestSuite.addTest(tc)
+    result = ut.TextTestRunner(verbosity=1).run(extractionTestSuite)
     sys.exit(not result.wasSuccessful())
 
-def run_one (filename):
-    ##
+
+def run_one(filename):
+    #
     # Run just one .description file
-    ##
-    description = os.path.join (os.getcwd (), filename) 
+    #
+    description = os.path.join(os.getcwd(), filename)
 
-    extractionTestSuite = ut.TestSuite ()
+    extractionTestSuite = ut.TestSuite()
     tc = ExtractionTestCase(descfile=description)
     extractionTestSuite.addTest(tc)
 
-    result = ut.TextTestRunner (verbosity=2).run (extractionTestSuite)
+    result = ut.TextTestRunner(verbosity=2).run(extractionTestSuite)
     sys.exit(not result.wasSuccessful())
 
 
 if __name__ == "__main__":
-    if (len (sys.argv) == 1):
-        run_all ()
+    if (len(sys.argv) == 1):
+        run_all()
     else:
-        if os.path.exists (sys.argv[1]) and sys.argv[1].endswith (".expected"):
-            run_one (sys.argv[1])
-        # FIXME: for the case when invoked by testrunner (see create-tests-xml.py)
+        if os.path.exists(sys.argv[1]) and sys.argv[1].endswith(".expected"):
+            run_one(sys.argv[1])
+        # FIXME: for the case when invoked by testrunner (see
+        # create-tests-xml.py)
         elif sys.argv[1] == "ExtractionTestCase":
-            run_all ()
+            run_all()
         else:
             print "Usage: %s [FILE.expected]" % (sys.argv[0])
-        
diff --git a/tests/functional-tests/500-writeback.py b/tests/functional-tests/500-writeback.py
index cdd2b06..1fc52aa 100755
--- a/tests/functional-tests/500-writeback.py
+++ b/tests/functional-tests/500-writeback.py
@@ -21,7 +21,8 @@
 Write values in tracker and check the actual values are written
 on the files. Note that these tests are highly platform dependant.
 """
-import os, dbus
+import os
+import dbus
 import time
 
 from common.utils.extractor import get_tracker_extract_output
@@ -29,18 +30,21 @@ from common.utils.writebacktest import CommonTrackerWritebackTest as CommonTrack
 import unittest2 as ut
 from common.utils.expectedFailure import expectedFailureBug
 
-REASONABLE_TIMEOUT = 5 # Seconds we wait for tracker-writeback to do the work
+REASONABLE_TIMEOUT = 5  # Seconds we wait for tracker-writeback to do the work
+
 
 class WritebackBasicDataTest (CommonTrackerWritebackTest):
+
     """
     Write in tracker store the properties witih writeback support and check
     that the new values are actually in the file
     """
-    def setUp (self):
+
+    def setUp(self):
         self.tracker = self.system.store
         self.extractor = self.system.extractor
 
-    def __clean_property (self, property_name, fileuri, expectFailure=True):
+    def __clean_property(self, property_name, fileuri, expectFailure=True):
         """
         Remove the property for the fileuri (file://...)
         """
@@ -53,13 +57,13 @@ class WritebackBasicDataTest (CommonTrackerWritebackTest):
            }
         """
         try:
-            self.tracker.update (CLEAN % (property_name, fileuri, property_name))
+            self.tracker.update(
+                CLEAN % (property_name, fileuri, property_name))
         except Exception, e:
             print e
             assert expectFailure
-                                
 
-    def __writeback_test (self, filename, mimetype, prop, expectedKey=None):
+    def __writeback_test(self, filename, mimetype, prop, expectedKey=None):
         """
         Set a value in @prop for the @filename. Then ask tracker-extractor
         for metadata and check in the results dictionary if the property is there.
@@ -74,23 +78,22 @@ class WritebackBasicDataTest (CommonTrackerWritebackTest):
         filename_real = filename[len('file://'):]
         initial_mtime = os.stat(filename_real).st_mtime
 
-        TEST_VALUE = prop.replace (":","") + "test"
+        TEST_VALUE = prop.replace(":", "") + "test"
         SPARQL_TMPL = """
            INSERT { ?u %s '%s' }
            WHERE  { ?u nie:url '%s' }
-        """ 
-        self.__clean_property (prop, filename)
-        self.tracker.update (SPARQL_TMPL % (prop, TEST_VALUE, filename))
+        """
+        self.__clean_property(prop, filename)
+        self.tracker.update(SPARQL_TMPL % (prop, TEST_VALUE, filename))
 
         self.wait_for_file_change(filename_real, initial_mtime)
 
-        results = get_tracker_extract_output (filename, mimetype)
+        results = get_tracker_extract_output(filename, mimetype)
         keyDict = expectedKey or prop
-        self.assertIn (TEST_VALUE, results[keyDict])
-        self.__clean_property (prop, filename, False)
-
+        self.assertIn(TEST_VALUE, results[keyDict])
+        self.__clean_property(prop, filename, False)
 
-    def __writeback_hasTag_test (self, filename, mimetype):
+    def __writeback_hasTag_test(self, filename, mimetype):
 
         SPARQL_TMPL = """
             INSERT {
@@ -112,73 +115,79 @@ class WritebackBasicDataTest (CommonTrackerWritebackTest):
            }
         """
 
-        self.tracker.update (SPARQL_TMPL % (filename))
-
-        time.sleep (REASONABLE_TIMEOUT)
+        self.tracker.update(SPARQL_TMPL % (filename))
 
-        results = get_tracker_extract_output (filename, mimetype)
-        self.assertIn ("testTag", results ["nao:hasTag"])
+        time.sleep(REASONABLE_TIMEOUT)
 
+        results = get_tracker_extract_output(filename, mimetype)
+        self.assertIn("testTag", results["nao:hasTag"])
 
     # JPEG test
-    def test_001_jpeg_title (self):
+    def test_001_jpeg_title(self):
         #FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
-        self.__writeback_test (self.get_test_filename_jpeg (), "image/jpeg", "nie:title")
+        self.__writeback_test(
+            self.get_test_filename_jpeg(), "image/jpeg", "nie:title")
 
-    def test_002_jpeg_description (self):
+    def test_002_jpeg_description(self):
         #FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
-        self.__writeback_test (self.get_test_filename_jpeg (), "image/jpeg", "nie:description")
+        self.__writeback_test(
+            self.get_test_filename_jpeg(), "image/jpeg", "nie:description")
 
-    def test_003_jpeg_keyword (self):
+    def test_003_jpeg_keyword(self):
         #FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
-        self.__writeback_test (self.get_test_filename_jpeg (), "image/jpeg",
-                               "nie:keyword", "nao:hasTag")
+        self.__writeback_test(self.get_test_filename_jpeg(), "image/jpeg",
+                              "nie:keyword", "nao:hasTag")
 
-    def test_004_jpeg_hasTag (self):
+    def test_004_jpeg_hasTag(self):
         #FILENAME = "test-writeback-monitored/writeback-test-1.jpeg"
-        self.__writeback_hasTag_test (self.get_test_filename_jpeg (), "image/jpeg")
+        self.__writeback_hasTag_test(
+            self.get_test_filename_jpeg(), "image/jpeg")
 
-        
     # TIFF tests
-    def test_011_tiff_title (self):
+    def test_011_tiff_title(self):
         #FILANAME = "test-writeback-monitored/writeback-test-2.tif"
-        self.__writeback_test (self.get_test_filename_tiff (), "image/tiff", "nie:title")
+        self.__writeback_test(
+            self.get_test_filename_tiff(), "image/tiff", "nie:title")
 
-    def test_012_tiff_description (self):
+    def test_012_tiff_description(self):
         FILENAME = "test-writeback-monitored/writeback-test-2.tif"
-        self.__writeback_test (self.get_test_filename_tiff (), "image/tiff", "nie:description")
-        
-    def test_013_tiff_keyword (self):
+        self.__writeback_test(
+            self.get_test_filename_tiff(), "image/tiff", "nie:description")
+
+    def test_013_tiff_keyword(self):
         FILENAME = "test-writeback-monitored/writeback-test-2.tif"
-        self.__writeback_test (self.get_test_filename_tiff (), "image/tiff",
-                               "nie:keyword", "nao:hasTag")
+        self.__writeback_test(self.get_test_filename_tiff(), "image/tiff",
+                              "nie:keyword", "nao:hasTag")
 
-    def test_014_tiff_hasTag (self):
+    def test_014_tiff_hasTag(self):
         FILENAME = "test-writeback-monitored/writeback-test-2.tif"
-        self.__writeback_hasTag_test (self.get_test_filename_tiff (), "image/tiff")
-      
-        
+        self.__writeback_hasTag_test(
+            self.get_test_filename_tiff(), "image/tiff")
 
     # PNG tests
-    @expectedFailureBug ("NB#185070")
-    def test_021_png_title (self):
+    @expectedFailureBug("NB#185070")
+    def test_021_png_title(self):
         FILENAME = "test-writeback-monitored/writeback-test-4.png"
-        self.__writeback_test (self.get_test_filaname_png (), "image/png", "nie:title")
+        self.__writeback_test(
+            self.get_test_filaname_png(), "image/png", "nie:title")
 
-    @expectedFailureBug ("NB#185070")
-    def test_022_png_description (self):
+    @expectedFailureBug("NB#185070")
+    def test_022_png_description(self):
         FILENAME = "test-writeback-monitored/writeback-test-4.png"
-        self.__writeback_test (self.get_test_filaname_png (), "image/png", "nie:description")
-        
-    @expectedFailureBug ("NB#185070")
-    def test_023_png_keyword (self):
+        self.__writeback_test(
+            self.get_test_filaname_png(), "image/png", "nie:description")
+
+    @expectedFailureBug("NB#185070")
+    def test_023_png_keyword(self):
         FILENAME = "test-writeback-monitored/writeback-test-4.png"
-        self.__writeback_test (self.get_test_filaname_png (), "image/png", "nie:keyword", 
"nao:hasTag:prefLabel")
+        self.__writeback_test(
+            self.get_test_filaname_png(), "image/png", "nie:keyword", "nao:hasTag:prefLabel")
 
     @expectedFailureBug("NB#185070")
-    def test_024_png_hasTag (self):
+    def test_024_png_hasTag(self):
         FILENAME = "test-writeback-monitored/writeback-test-4.png"
-        self.__writeback_hasTag_test (self.get_test_filaname_png (), "image/png")
+        self.__writeback_hasTag_test(
+            self.get_test_filaname_png(), "image/png")
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/501-writeback-details.py b/tests/functional-tests/501-writeback-details.py
index c7adceb..6bc4d6d 100755
--- a/tests/functional-tests/501-writeback-details.py
+++ b/tests/functional-tests/501-writeback-details.py
@@ -25,22 +25,23 @@ from common.utils.expectedFailure import expectedFailureBug
 import os
 import time
 
-REASONABLE_TIMEOUT = 5 # Seconds we wait for tracker-writeback to do the work
+REASONABLE_TIMEOUT = 5  # Seconds we wait for tracker-writeback to do the work
 
 
 class WritebackKeepDateTest (CommonTrackerWritebackTest):
 
-    def setUp (self):
+    def setUp(self):
         self.tracker = self.system.store
         self.extractor = self.system.extractor
-        self.favorite = self.__prepare_favorite_tag ()
+        self.favorite = self.__prepare_favorite_tag()
 
-    def __prepare_favorite_tag (self):
-        # Check here if favorite has tag... to make sure writeback is actually writing
+    def __prepare_favorite_tag(self):
+        # Check here if favorite has tag... to make sure writeback is actually
+        # writing
         results = self.tracker.query ("""
              SELECT ?label WHERE { nao:predefined-tag-favorite nao:prefLabel ?label }""")
 
-        if len (results) == 0:
+        if len(results) == 0:
             self.tracker.update ("""
              INSERT { nao:predefined-tag-favorite nao:prefLabel 'favorite'}
              WHERE { nao:predefined-tag-favorite a nao:Tag }
@@ -48,9 +49,8 @@ class WritebackKeepDateTest (CommonTrackerWritebackTest):
             return "favorite"
         else:
             return str(results[0][0])
-                       
 
-    def test_01_NB217627_content_created_date (self):
+    def test_01_NB217627_content_created_date(self):
         """
         NB#217627 - Order if results is different when an image is marked as favorite.
         """
@@ -60,13 +60,13 @@ class WritebackKeepDateTest (CommonTrackerWritebackTest):
               nfo:fileLastModified ?contentCreated
           } ORDER BY ?contentCreated
           """
-        results = self.tracker.query (query_images)
-        self.assertEquals (len (results), 3, results)
+        results = self.tracker.query(query_images)
+        self.assertEquals(len(results), 3, results)
 
-        log ("Waiting 2 seconds to ensure there is a noticiable difference in the timestamp")
-        time.sleep (2)
+        log("Waiting 2 seconds to ensure there is a noticiable difference in the timestamp")
+        time.sleep(2)
 
-        url = self.get_test_filename_jpeg ()
+        url = self.get_test_filename_jpeg()
 
         filename = url[len('file://'):]
         initial_mtime = os.stat(filename).st_mtime
@@ -79,29 +79,32 @@ class WritebackKeepDateTest (CommonTrackerWritebackTest):
            ?u nie:url <%s> .
          }
         """ % url
-        self.tracker.update (mark_as_favorite)
-        log ("Setting favorite in <%s>" % url)
+        self.tracker.update(mark_as_favorite)
+        log("Setting favorite in <%s>" % url)
 
-        self.wait_for_file_change (filename, initial_mtime)
+        self.wait_for_file_change(filename, initial_mtime)
 
         # Check the value is written in the file
-        metadata = get_tracker_extract_output (filename, "")
-        self.assertIn (self.favorite, metadata ["nao:hasTag"],
-                       "Tag hasn't been written in the file")
-        
-        # Now check the modification date of the files and it should be the same :)
-        new_results = self.tracker.query (query_images)
-        ## for (uri, date) in new_results:
-        ##     print "Checking dates of <%s>" % uri
+        metadata = get_tracker_extract_output(filename, "")
+        self.assertIn(self.favorite, metadata["nao:hasTag"],
+                      "Tag hasn't been written in the file")
+
+        # Now check the modification date of the files and it should be the
+        # same :)
+        new_results = self.tracker.query(query_images)
+        # for (uri, date) in new_results:
+        # print "Checking dates of <%s>" % uri
         ##     previous_date = convenience_dict[uri]
-        ##     print "Before: %s \nAfter : %s" % (previous_date, date)
+        # print "Before: %s \nAfter : %s" % (previous_date, date)
         ##     self.assertEquals (date, previous_date, "File <%s> has change its contentCreated date!" % uri)
 
         # Indeed the order of the results should be the same
-        for i in range (0, len (results)):
-            self.assertEquals (results[i][0], new_results[i][0], "Order of the files is different")
-            self.assertEquals (results[i][1], new_results[i][1], "Date has change in file <%s>" % 
results[i][0])
-        
+        for i in range(0, len(results)):
+            self.assertEquals(results[i][0], new_results[
+                i][0], "Order of the files is different")
+            self.assertEquals(results[i][1], new_results[i][
+                1], "Date has change in file <%s>" % results[i][0])
+
 
 if __name__ == "__main__":
-    ut.main ()
+    ut.main()
diff --git a/tests/functional-tests/600-applications-camera.py 
b/tests/functional-tests/600-applications-camera.py
index 31a7a56..a38b29c 100755
--- a/tests/functional-tests/600-applications-camera.py
+++ b/tests/functional-tests/600-applications-camera.py
@@ -31,11 +31,12 @@ from common.utils.helpers import log
 
 
 class TrackerCameraTestSuite (CommonTrackerApplicationTest):
+
     """
     Common functionality for camera tests.
     """
 
-    def insert_photo_resource_info (self, urn, file_url):
+    def insert_photo_resource_info(self, urn, file_url):
         """
         Insert new photo resource in the store, including nie:mimeType and nie:url
         """
@@ -66,10 +67,10 @@ class TrackerCameraTestSuite (CommonTrackerApplicationTest):
             nie:isStoredAs <%(urn)s>
         }
         """ % locals()
-        self.tracker.update (insert)
-        self.assertEquals (self.get_urn_count_by_url (file_url), 1)
+        self.tracker.update(insert)
+        self.assertEquals(self.get_urn_count_by_url(file_url), 1)
 
-    def insert_video_resource_info (self, urn, file_url):
+    def insert_video_resource_info(self, urn, file_url):
         """
         Insert new video resource in the store, including nie:mimeType and nie:url
         """
@@ -100,10 +101,10 @@ class TrackerCameraTestSuite (CommonTrackerApplicationTest):
             nie:isStoredAs <%(urn)s>
         }
         """ % locals()
-        self.tracker.update (insert)
-        self.assertEquals (self.get_urn_count_by_url (file_url), 1)
+        self.tracker.update(insert)
+        self.assertEquals(self.get_urn_count_by_url(file_url), 1)
 
-    def insert_dummy_location_info (self, fileurn, geolocationurn, postaladdressurn):
+    def insert_dummy_location_info(self, fileurn, geolocationurn, postaladdressurn):
         """
         Insert placeholder location info for a file
         """
@@ -121,12 +122,12 @@ class TrackerCameraTestSuite (CommonTrackerApplicationTest):
                       slo:location <%s>
         }
         """ % (postaladdressurn, geolocationurn, postaladdressurn, fileurn, geolocationurn)
-        self.tracker.update (location_insert)
+        self.tracker.update(location_insert)
 
 
 class TrackerCameraPicturesApplicationTests (TrackerCameraTestSuite):
 
-    def test_01_camera_picture (self):
+    def test_01_camera_picture(self):
         """
         Camera simulation:
 
@@ -136,26 +137,30 @@ class TrackerCameraPicturesApplicationTests (TrackerCameraTestSuite):
         4. Ensure no duplicates are found
         """
 
-        fileurn = "tracker://test_camera_picture_01/" + str(random.randint (0,100))
-        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_image ())
-        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_image ())
+        fileurn = "tracker://test_camera_picture_01/" + \
+            str(random.randint(0, 100))
+        origin_filepath = os.path.join(
+            self.get_data_dir(), self.get_test_image())
+        dest_filepath = os.path.join(
+            self.get_dest_dir(), self.get_test_image())
         dest_fileuri = "file://" + dest_filepath
 
-        self.insert_photo_resource_info (fileurn, dest_fileuri)
+        self.insert_photo_resource_info(fileurn, dest_fileuri)
 
         # Copy the image to the dest path
-        self.slowcopy_file (origin_filepath, dest_filepath)
-        assert os.path.exists (dest_filepath)
-        dest_id, dest_urn = self.system.store.await_resource_inserted ('nmm:Photo', dest_fileuri)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)
+        self.slowcopy_file(origin_filepath, dest_filepath)
+        assert os.path.exists(dest_filepath)
+        dest_id, dest_urn = self.system.store.await_resource_inserted(
+            'nmm:Photo', dest_fileuri)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)
 
         # Clean the new file so the test directory is as before
-        log ("Remove and wait")
-        os.remove (dest_filepath)
-        self.system.store.await_resource_deleted (dest_id)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
+        log("Remove and wait")
+        os.remove(dest_filepath)
+        self.system.store.await_resource_deleted(dest_id)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 0)
 
-    def test_02_camera_picture_geolocation (self):
+    def test_02_camera_picture_geolocation(self):
         """
         Camera simulation:
 
@@ -166,45 +171,52 @@ class TrackerCameraPicturesApplicationTests (TrackerCameraTestSuite):
         4. Ensure no duplicates are found
         """
 
-        fileurn = "tracker://test_camera_picture_02/" + str(random.randint (0,100))
-        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_image ())
+        fileurn = "tracker://test_camera_picture_02/" + \
+            str(random.randint(0, 100))
+        dest_filepath = os.path.join(
+            self.get_dest_dir(), self.get_test_image())
         dest_fileuri = "file://" + dest_filepath
 
-        geolocationurn = "tracker://test_camera_picture_02_geolocation/" + str(random.randint (0,100))
-        postaladdressurn = "tracker://test_camera_picture_02_postaladdress/" + str(random.randint (0,100))
+        geolocationurn = "tracker://test_camera_picture_02_geolocation/" + \
+            str(random.randint(0, 100))
+        postaladdressurn = "tracker://test_camera_picture_02_postaladdress/" + \
+            str(random.randint(0, 100))
 
-        self.insert_photo_resource_info (fileurn, dest_fileuri)
+        self.insert_photo_resource_info(fileurn, dest_fileuri)
 
         # FIRST, open the file for writing, and just write some garbage, to simulate that
         # we already started recording the video...
-        fdest = open (dest_filepath, 'wb')
-        fdest.write ("some garbage written here")
-        fdest.write ("to simulate we're recording something...")
-        fdest.seek (0)
+        fdest = open(dest_filepath, 'wb')
+        fdest.write("some garbage written here")
+        fdest.write("to simulate we're recording something...")
+        fdest.seek(0)
 
         # SECOND, set slo:location
-        self.insert_dummy_location_info (fileurn, geolocationurn, postaladdressurn)
+        self.insert_dummy_location_info(
+            fileurn, geolocationurn, postaladdressurn)
 
-        #THIRD, start copying the image to the dest path
-        original_file = os.path.join (self.get_data_dir (),self.get_test_image ())
-        self.slowcopy_file_fd (original_file, fdest)
-        fdest.close ()
-        assert os.path.exists (dest_filepath)
+        # THIRD, start copying the image to the dest path
+        original_file = os.path.join(
+            self.get_data_dir(), self.get_test_image())
+        self.slowcopy_file_fd(original_file, fdest)
+        fdest.close()
+        assert os.path.exists(dest_filepath)
 
         # FOURTH, ensure we have only 1 resource
-        dest_id, dest_urn = self.system.store.await_resource_inserted ('nmm:Photo', dest_fileuri)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)
+        dest_id, dest_urn = self.system.store.await_resource_inserted(
+            'nmm:Photo', dest_fileuri)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)
 
         # Clean the new file so the test directory is as before
-        log ("Remove and wait")
-        os.remove (dest_filepath)
-        self.system.store.await_resource_deleted (dest_id)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
+        log("Remove and wait")
+        os.remove(dest_filepath)
+        self.system.store.await_resource_deleted(dest_id)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 0)
 
 
 class TrackerCameraVideosApplicationTests (TrackerCameraTestSuite):
 
-    def test_01_camera_video (self):
+    def test_01_camera_video(self):
         """
         Camera video recording simulation:
 
@@ -214,27 +226,30 @@ class TrackerCameraVideosApplicationTests (TrackerCameraTestSuite):
         4. Ensure no duplicates are found
         """
 
-        fileurn = "tracker://test_camera_video_01/" + str(random.randint (0,100))
-        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_video ())
-        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_video ())
+        fileurn = "tracker://test_camera_video_01/" + \
+            str(random.randint(0, 100))
+        origin_filepath = os.path.join(
+            self.get_data_dir(), self.get_test_video())
+        dest_filepath = os.path.join(
+            self.get_dest_dir(), self.get_test_video())
         dest_fileuri = "file://" + dest_filepath
 
         self.insert_video_resource_info(fileurn, dest_fileuri)
 
         # Copy the image to the dest path
-        self.slowcopy_file (origin_filepath, dest_filepath)
-        assert os.path.exists (dest_filepath)
-        dest_id, dest_urn = self.system.store.await_resource_inserted ('nmm:Video', dest_fileuri)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)
+        self.slowcopy_file(origin_filepath, dest_filepath)
+        assert os.path.exists(dest_filepath)
+        dest_id, dest_urn = self.system.store.await_resource_inserted(
+            'nmm:Video', dest_fileuri)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)
 
         # Clean the new file so the test directory is as before
-        log ("Remove and wait")
-        os.remove (dest_filepath)
-        self.system.store.await_resource_deleted (dest_id)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
-
+        log("Remove and wait")
+        os.remove(dest_filepath)
+        self.system.store.await_resource_deleted(dest_id)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 0)
 
-    def test_02_camera_video_geolocation (self):
+    def test_02_camera_video_geolocation(self):
         """
         Camera simulation:
 
@@ -245,42 +260,47 @@ class TrackerCameraVideosApplicationTests (TrackerCameraTestSuite):
         4. Ensure no duplicates are found
         """
 
-        fileurn = "tracker://test_camera_video_02/" + str(random.randint (0,100))
-        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_video ())
-        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_video ())
+        fileurn = "tracker://test_camera_video_02/" + \
+            str(random.randint(0, 100))
+        origin_filepath = os.path.join(
+            self.get_data_dir(), self.get_test_video())
+        dest_filepath = os.path.join(
+            self.get_dest_dir(), self.get_test_video())
         dest_fileuri = "file://" + dest_filepath
 
-        geolocationurn = "tracker://test_camera_video_02_geolocation/" + str(random.randint (0,100))
-        postaladdressurn = "tracker://test_camera_video_02_postaladdress/" + str(random.randint (0,100))
+        geolocationurn = "tracker://test_camera_video_02_geolocation/" + \
+            str(random.randint(0, 100))
+        postaladdressurn = "tracker://test_camera_video_02_postaladdress/" + \
+            str(random.randint(0, 100))
 
-        self.insert_video_resource_info (fileurn, dest_fileuri)
+        self.insert_video_resource_info(fileurn, dest_fileuri)
 
         # FIRST, open the file for writing, and just write some garbage, to simulate that
         # we already started recording the video...
-        fdest = open (dest_filepath, 'wb')
-        fdest.write ("some garbage written here")
-        fdest.write ("to simulate we're recording something...")
-        fdest.seek (0)
+        fdest = open(dest_filepath, 'wb')
+        fdest.write("some garbage written here")
+        fdest.write("to simulate we're recording something...")
+        fdest.seek(0)
 
         # SECOND, set slo:location
-        self.insert_dummy_location_info (fileurn, geolocationurn, postaladdressurn)
+        self.insert_dummy_location_info(
+            fileurn, geolocationurn, postaladdressurn)
 
-        #THIRD, start copying the image to the dest path
-        self.slowcopy_file_fd (origin_filepath, fdest)
-        fdest.close ()
-        assert os.path.exists (dest_filepath)
+        # THIRD, start copying the image to the dest path
+        self.slowcopy_file_fd(origin_filepath, fdest)
+        fdest.close()
+        assert os.path.exists(dest_filepath)
 
         # FOURTH, ensure we have only 1 resource
-        dest_id, dest_urn = self.system.store.await_resource_inserted ('nmm:Video', dest_fileuri)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)
+        dest_id, dest_urn = self.system.store.await_resource_inserted(
+            'nmm:Video', dest_fileuri)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)
 
         # Clean the new file so the test directory is as before
-        log ("Remove and wait")
-        os.remove (dest_filepath)
-        self.system.store.await_resource_deleted (dest_id)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
+        log("Remove and wait")
+        os.remove(dest_filepath)
+        self.system.store.await_resource_deleted(dest_id)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 0)
 
 if __name__ == "__main__":
-       ut.main()
-
-
+    ut.main()
diff --git a/tests/functional-tests/601-applications-sync.py b/tests/functional-tests/601-applications-sync.py
index c3747c6..b2b0561 100755
--- a/tests/functional-tests/601-applications-sync.py
+++ b/tests/functional-tests/601-applications-sync.py
@@ -22,7 +22,9 @@
 Tests trying to simulate the behaviour of applications working with tracker
 """
 
-import sys,os,dbus
+import sys
+import os
+import dbus
 import unittest
 import time
 import random
@@ -39,7 +41,7 @@ from common.utils.helpers import log
 
 class TrackerSyncApplicationTests (CommonTrackerApplicationTest):
 
-    def test_01_sync_audio_nb219946 (self):
+    def test_01_sync_audio_nb219946(self):
         """
         Sync simulation (after fix for NB#219946):
 
@@ -62,11 +64,13 @@ class TrackerSyncApplicationTests (CommonTrackerApplicationTest):
         This is because the test already inserted the resource in the store.
         """
 
-        origin_filepath = os.path.join (self.get_data_dir (), self.get_test_music ())
-        dest_filepath = os.path.join (self.get_dest_dir (), self.get_test_music ())
+        origin_filepath = os.path.join(
+            self.get_data_dir(), self.get_test_music())
+        dest_filepath = os.path.join(
+            self.get_dest_dir(), self.get_test_music())
         dest_fileuri = "file://" + dest_filepath
 
-        log ("Synchronizing audio file in '%s'..." % (dest_filepath))
+        log("Synchronizing audio file in '%s'..." % (dest_filepath))
 
         # Insert new resource in the store
         insert = """
@@ -98,25 +102,24 @@ class TrackerSyncApplicationTests (CommonTrackerApplicationTest):
                                      nmm:artistName 'AbBaby'
         }
         """ % (dest_fileuri, dest_fileuri)
-        self.tracker.update (insert)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)
+        self.tracker.update(insert)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)
 
         resource_id = self.tracker.get_resource_id(dest_fileuri)
 
         # Copy the image to the dest path
-        self.slowcopy_file (origin_filepath, dest_filepath)
-        assert os.path.exists (dest_filepath)
-        self.tracker.await_resource_inserted ('nmm:MusicPiece', url=dest_fileuri)
+        self.slowcopy_file(origin_filepath, dest_filepath)
+        assert os.path.exists(dest_filepath)
+        self.tracker.await_resource_inserted(
+            'nmm:MusicPiece', url=dest_fileuri)
 
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 1)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 1)
 
         # Clean the new file so the test directory is as before
-        log ("Remove and wait")
-        os.remove (dest_filepath)
-        self.tracker.await_resource_deleted (resource_id)
-        self.assertEquals (self.get_urn_count_by_url (dest_fileuri), 0)
+        log("Remove and wait")
+        os.remove(dest_filepath)
+        self.tracker.await_resource_deleted(resource_id)
+        self.assertEquals(self.get_urn_count_by_url(dest_fileuri), 0)
 
 if __name__ == "__main__":
-       ut.main()
-
-
+    ut.main()
diff --git a/tests/functional-tests/common/utils/applicationstest.py 
b/tests/functional-tests/common/utils/applicationstest.py
index 72a8b84..ad8e51c 100644
--- a/tests/functional-tests/common/utils/applicationstest.py
+++ b/tests/functional-tests/common/utils/applicationstest.py
@@ -28,7 +28,8 @@ import shutil
 import os
 import time
 
-APPLICATIONS_TMP_DIR = os.path.join (cfg.TEST_MONITORED_TMP_DIR, "test-applications-monitored")
+APPLICATIONS_TMP_DIR = os.path.join(
+    cfg.TEST_MONITORED_TMP_DIR, "test-applications-monitored")
 
 index_dirs = [APPLICATIONS_TMP_DIR]
 CONF_OPTIONS = {
@@ -43,86 +44,84 @@ CONF_OPTIONS = {
 # Copy rate, 10KBps (1024b/100ms)
 SLOWCOPY_RATE = 1024
 
+
 class CommonTrackerApplicationTest (ut.TestCase):
 
-    def get_urn_count_by_url (self, url):
+    def get_urn_count_by_url(self, url):
         select = """
         SELECT ?u WHERE { ?u nie:url \"%s\" }
         """ % (url)
-        return len (self.tracker.query (select))
-
+        return len(self.tracker.query(select))
 
-    def get_test_image (self):
+    def get_test_image(self):
         TEST_IMAGE = "test-image-1.jpg"
         return TEST_IMAGE
 
-    def get_test_video (self):
+    def get_test_video(self):
         TEST_VIDEO = "test-video-1.mp4"
         return TEST_VIDEO
 
-    def get_test_music (self):
-        TEST_AUDIO =  "test-music-1.mp3"
+    def get_test_music(self):
+        TEST_AUDIO = "test-music-1.mp3"
         return TEST_AUDIO
 
-    def get_data_dir (self):
+    def get_data_dir(self):
         return self.datadir
 
-    def get_dest_dir (self):
+    def get_dest_dir(self):
         return APPLICATIONS_TMP_DIR
 
-    def slowcopy_file_fd (self, src, fdest, rate=SLOWCOPY_RATE):
+    def slowcopy_file_fd(self, src, fdest, rate=SLOWCOPY_RATE):
         """
         @rate: bytes per 100ms
         """
-        log ("Copying slowly\n '%s' to\n '%s'" % (src, fdest.name))
-        fsrc = open (src, 'rb')
-        buffer_ = fsrc.read (rate)
+        log("Copying slowly\n '%s' to\n '%s'" % (src, fdest.name))
+        fsrc = open(src, 'rb')
+        buffer_ = fsrc.read(rate)
         while (buffer_ != ""):
-            fdest.write (buffer_)
-            time.sleep (0.1)
-            buffer_ = fsrc.read (rate)
-        fsrc.close ()
-        
+            fdest.write(buffer_)
+            time.sleep(0.1)
+            buffer_ = fsrc.read(rate)
+        fsrc.close()
 
-    def slowcopy_file (self, src, dst, rate=SLOWCOPY_RATE):
+    def slowcopy_file(self, src, dst, rate=SLOWCOPY_RATE):
         """
         @rate: bytes per 100ms
         """
-        fdest = open (dst, 'wb')
-        self.slowcopy_file_fd (src, fdest, rate)
-        fdest.close ()
+        fdest = open(dst, 'wb')
+        self.slowcopy_file_fd(src, fdest, rate)
+        fdest.close()
 
     @classmethod
-    def setUp (self):
+    def setUp(self):
         # Create temp directory to monitor
-        if (os.path.exists (APPLICATIONS_TMP_DIR)):
-            shutil.rmtree (APPLICATIONS_TMP_DIR)
-        os.makedirs (APPLICATIONS_TMP_DIR)
+        if (os.path.exists(APPLICATIONS_TMP_DIR)):
+            shutil.rmtree(APPLICATIONS_TMP_DIR)
+        os.makedirs(APPLICATIONS_TMP_DIR)
 
         # Use local directory if available. Installation otherwise.
-        if os.path.exists (os.path.join (os.getcwd (),
-                                         "test-apps-data")):
-            self.datadir = os.path.join (os.getcwd (),
-                                         "test-apps-data")
+        if os.path.exists(os.path.join(os.getcwd(),
+                                       "test-apps-data")):
+            self.datadir = os.path.join(os.getcwd(),
+                                        "test-apps-data")
         else:
-            self.datadir = os.path.join (cfg.DATADIR,
-                                         "tracker-tests",
-                                         "test-apps-data")
-
+            self.datadir = os.path.join(cfg.DATADIR,
+                                        "tracker-tests",
+                                        "test-apps-data")
 
-        self.system = TrackerSystemAbstraction ()
-        self.system.tracker_all_testing_start (CONF_OPTIONS)
+        self.system = TrackerSystemAbstraction()
+        self.system.tracker_all_testing_start(CONF_OPTIONS)
 
         # Returns when ready
         self.tracker = self.system.store
 
-        log ("Ready to go!")
+        log("Ready to go!")
 
     @classmethod
-    def tearDown (self):
-        #print "Stopping the daemon in test mode (Doing nothing now)"
-        self.system.tracker_all_testing_stop ()
+    def tearDown(self):
+        # print "Stopping the daemon in test mode (Doing nothing now)"
+        self.system.tracker_all_testing_stop()
 
         # Remove monitored directory
-        if (os.path.exists (APPLICATIONS_TMP_DIR)):
-            shutil.rmtree (APPLICATIONS_TMP_DIR)
+        if (os.path.exists(APPLICATIONS_TMP_DIR)):
+            shutil.rmtree(APPLICATIONS_TMP_DIR)
diff --git a/tests/functional-tests/common/utils/dconf.py b/tests/functional-tests/common/utils/dconf.py
index 0af94ce..986aeee 100644
--- a/tests/functional-tests/common/utils/dconf.py
+++ b/tests/functional-tests/common/utils/dconf.py
@@ -5,7 +5,9 @@ import os
 
 from helpers import log
 
+
 class DConfClient(object):
+
     """
     Allow changing Tracker configuration in DConf.
 
@@ -20,7 +22,7 @@ class DConfClient(object):
     break.
     """
 
-    def __init__ (self, schema):
+    def __init__(self, schema):
         self._settings = Gio.Settings.new(schema)
 
         backend = self._settings.get_property('backend')
@@ -69,10 +71,10 @@ class DConfClient(object):
         # XDG_CONFIG_HOME is useless, so we use HOME. This code should not be
         # needed unless for some reason the test is not being run via the
         # 'test-runner.sh' script.
-        dconf_db = os.path.join (os.environ ["HOME"],
-                                 ".config",
-                                 "dconf",
-                                 "trackertest")
-        if os.path.exists (dconf_db):
-            log ("[Conf] Removing dconf database: " + dconf_db)
-            os.remove (dconf_db)
+        dconf_db = os.path.join(os.environ["HOME"],
+                                ".config",
+                                "dconf",
+                                "trackertest")
+        if os.path.exists(dconf_db):
+            log("[Conf] Removing dconf database: " + dconf_db)
+            os.remove(dconf_db)
diff --git a/tests/functional-tests/common/utils/expectedFailure.py 
b/tests/functional-tests/common/utils/expectedFailure.py
index a496ee2..bdc762a 100644
--- a/tests/functional-tests/common/utils/expectedFailure.py
+++ b/tests/functional-tests/common/utils/expectedFailure.py
@@ -1,25 +1,25 @@
 #!/usr/bin/python
 
-## Code taken and modified from unittest2 framework (case.py)
+# Code taken and modified from unittest2 framework (case.py)
 
-## Copyright (c) 1999-2003 Steve Purcell
-## Copyright (c) 2003-2010 Python Software Foundation
-## Copyright (c) 2010, Nokia (ivan frade nokia com)
+# Copyright (c) 1999-2003 Steve Purcell
+# Copyright (c) 2003-2010 Python Software Foundation
+# Copyright (c) 2010, Nokia (ivan frade nokia com)
 
-## This module is free software, and you may redistribute it and/or modify
-## it under the same terms as Python itself, so long as this copyright message
-## and disclaimer are retained in their original form.
+# This module is free software, and you may redistribute it and/or modify
+# it under the same terms as Python itself, so long as this copyright message
+# and disclaimer are retained in their original form.
 
-## IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
-## SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
-## THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-## DAMAGE.
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+# THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+# DAMAGE.
 
-## THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
-## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-## PARTICULAR PURPOSE.  THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
-## AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
-## SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+# THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE.  THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+# AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
 
 """
 Write values in tracker and check the actual values are written
@@ -30,6 +30,7 @@ import unittest2 as ut
 from unittest2.compatibility import wraps
 import configuration as cfg
 
+
 def expectedFailureBug(bugnumber):
     """
     Decorator to mark bugs with ExpectedFailure. In case that a expected failure PASS
@@ -37,22 +38,24 @@ def expectedFailureBug(bugnumber):
 
     Keep your bugs and tests in sync!
     """
-    def decorator (func):
+    def decorator(func):
         @wraps(func)
         def wrapper(*args, **kwargs):
             try:
                 func(*args, **kwargs)
             except Exception:
                 raise ut.case._ExpectedFailure(sys.exc_info())
-            raise Exception ("Unexpected success. This should fail because of bug " +str(bugnumber))
+            raise Exception(
+                "Unexpected success. This should fail because of bug " + str(bugnumber))
         return wrapper
     return decorator
 
+
 def expectedFailureJournal():
     """
     Decorator to handle tests that are expected to fail when journal is disabled.
     """
-    def decorator (func):
+    def decorator(func):
         # no wrapping if journal is enabled, test is expected to pass
         if not cfg.disableJournal:
             return func
@@ -63,6 +66,7 @@ def expectedFailureJournal():
                 func(*args, **kwargs)
             except Exception:
                 raise ut.case._ExpectedFailure(sys.exc_info())
-            raise Exception ("Unexpected success. This should fail because journal is disabled")
+            raise Exception(
+                "Unexpected success. This should fail because journal is disabled")
         return wrapper
     return decorator
diff --git a/tests/functional-tests/common/utils/extractor.py 
b/tests/functional-tests/common/utils/extractor.py
index 8dd0560..36ed0b7 100644
--- a/tests/functional-tests/common/utils/extractor.py
+++ b/tests/functional-tests/common/utils/extractor.py
@@ -26,6 +26,7 @@ import subprocess
 
 
 class ExtractorParser(object):
+
     def parse_tracker_extract_output(self, text):
         """
         Parse stdout of `tracker-extract --file` to get SPARQL statements.
@@ -47,9 +48,9 @@ class ExtractorParser(object):
                 value = extras[value]
 
             if metadata.has_key(att):
-                metadata [att].append(value)
+                metadata[att].append(value)
             else:
-                metadata [att] = [value]
+                metadata[att] = [value]
 
         return metadata
 
@@ -104,7 +105,7 @@ class ExtractorParser(object):
         grouped_lines = []
         current_line = ""
         anon_node_open = False
-        for l in embedded.split ("\n\t"):
+        for l in embedded.split("\n\t"):
             if "[" in l:
                 current_line = current_line + l
                 anon_node_open = True
@@ -113,7 +114,7 @@ class ExtractorParser(object):
             if "]" in l:
                 anon_node_open = False
                 current_line += l
-                final_lines = self.__handle_anon_nodes (current_line.strip ())
+                final_lines = self.__handle_anon_nodes(current_line.strip())
                 grouped_lines = grouped_lines + final_lines
                 current_line = ""
                 continue
@@ -121,23 +122,24 @@ class ExtractorParser(object):
             if anon_node_open:
                 current_line += l
             else:
-                if (len (l.strip ()) == 0):
+                if (len(l.strip()) == 0):
                     continue
-                    
-                final_lines = self.__handle_multivalues (l.strip ())
+
+                final_lines = self.__handle_multivalues(l.strip())
                 grouped_lines = grouped_lines + final_lines
 
-        return map (self.__clean_value, grouped_lines)
+        return map(self.__clean_value, grouped_lines)
 
     def __process_where_part(self, where):
-        gettags = re.compile ("(\?\w+)\ a\ nao:Tag\ ;\ nao:prefLabel\ \"([\w\ -]+)\"")
+        gettags = re.compile(
+            "(\?\w+)\ a\ nao:Tag\ ;\ nao:prefLabel\ \"([\w\ -]+)\"")
         tags = {}
-        for l in where.split ("\n"):
-            if len (l) == 0:
+        for l in where.split("\n"):
+            if len(l) == 0:
                 continue
-            match = gettags.search (l)
+            match = gettags.search(l)
             if (match):
-                tags [match.group(1)] = match.group (2)
+                tags[match.group(1)] = match.group(2)
             else:
                 print "This line is not a tag:", l
 
@@ -150,17 +152,17 @@ class ExtractorParser(object):
            -> a nfo:Image ;
            -> a nmm:Photo ;
         """
-        hasEscapedComma = re.compile ("\".+,.+\"")
+        hasEscapedComma = re.compile("\".+,.+\"")
 
-        if "," in line and not hasEscapedComma.search (line):
-            prop, multival = line.split (" ", 1)
+        if "," in line and not hasEscapedComma.search(line):
+            prop, multival = line.split(" ", 1)
             results = []
-            for value in multival.split (","):
-                results.append ("%s %s" % (prop, value.strip ()))
+            for value in multival.split(","):
+                results.append("%s %s" % (prop, value.strip()))
             return results
         else:
             return [line]
-       
+
     def __handle_anon_nodes(self, line):
         """
         Traslates anonymous nodes in 'flat' properties:
@@ -175,11 +177,11 @@ class ExtractorParser(object):
                 -> nfo:hasMediaFileListEntry:entryUrl "file://x.mp3"
 
         """
-        
+
         # hasTag case
-        if line.startswith ("nao:hasTag"):
-            getlabel = re.compile ("nao:prefLabel\ \"([\w\ -]+)\"")
-            match = getlabel.search (line)
+        if line.startswith("nao:hasTag"):
+            getlabel = re.compile("nao:prefLabel\ \"([\w\ -]+)\"")
+            match = getlabel.search(line)
             if (match):
                 line = 'nao:hasTag:prefLabel "%s" ;' % (match.group(1))
                 return [line]
@@ -188,32 +190,34 @@ class ExtractorParser(object):
                 return [line]
 
         # location case
-        elif line.startswith ("slo:location"):
+        elif line.startswith("slo:location"):
             results = []
 
             # Can have country AND/OR city
-            getpa = re.compile ("slo:postalAddress\ \<([\w:-]+)\>")
-            pa_match = getpa.search (line)
-            
+            getpa = re.compile("slo:postalAddress\ \<([\w:-]+)\>")
+            pa_match = getpa.search(line)
+
             if (pa_match):
-                results.append ('slo:location:postalAddress "%s" ;' % (pa_match.group(1)))
+                results.append(
+                    'slo:location:postalAddress "%s" ;' % (pa_match.group(1)))
             else:
                 print "FIXME another location subproperty in ", line
 
             return results
-        elif line.startswith ("nco:creator"):
-            getcreator = re.compile ("nco:fullname\ \"([\w\ ]+)\"")
-            creator_match = getcreator.search (line)
+        elif line.startswith("nco:creator"):
+            getcreator = re.compile("nco:fullname\ \"([\w\ ]+)\"")
+            creator_match = getcreator.search(line)
 
             if (creator_match):
-                new_line = 'nco:creator:fullname "%s" ;' % (creator_match.group (1))
+                new_line = 'nco:creator:fullname "%s" ;' % (
+                    creator_match.group(1))
                 return [new_line]
             else:
                 print "Something special in this line '%s'" % (line)
 
-        elif line.startswith ("nfo:hasMediaFileListEntry"):
-            return self.__handle_playlist_entries (line)
-        
+        elif line.startswith("nfo:hasMediaFileListEntry"):
+            return self.__handle_playlist_entries(line)
+
         else:
             return [line]
 
@@ -225,14 +229,15 @@ class ExtractorParser(object):
           -> nfo:hMFLE:entryUrl '...'
           ...
         """
-        geturl = re.compile ("nfo:entryUrl \"([\w\.\:\/]+)\"")
-        entries = line.strip () [len ("nfo:hasMediaFileListEntry"):]
+        geturl = re.compile("nfo:entryUrl \"([\w\.\:\/]+)\"")
+        entries = line.strip()[len("nfo:hasMediaFileListEntry"):]
         results = []
-        for entry in entries.split (","):
-            url_match = geturl.search (entry)
+        for entry in entries.split(","):
+            url_match = geturl.search(entry)
             if (url_match):
-                new_line = 'nfo:hasMediaFileListEntry:entryUrl "%s" ;' % (url_match.group (1))
-                results.append (new_line)
+                new_line = 'nfo:hasMediaFileListEntry:entryUrl "%s" ;' % (
+                    url_match.group(1))
+                results.append(new_line)
             else:
                 print " *** Something special in this line '%s'" % (entry)
         return results
@@ -241,16 +246,16 @@ class ExtractorParser(object):
         """
         the value comes with a ';' or a '.' at the end
         """
-        if (len (value) < 2):
-            return value.strip ()
-        
-        clean = value.strip ()
+        if (len(value) < 2):
+            return value.strip()
+
+        clean = value.strip()
         if value[-1] in [';', '.']:
-            clean = value [:-1]
+            clean = value[:-1]
+
+        clean = clean.replace("\"", "")
 
-        clean = clean.replace ("\"", "")
-            
-        return clean.strip ()
+        return clean.strip()
 
 
 def get_tracker_extract_output(filename, mime_type=None):
@@ -258,14 +263,14 @@ def get_tracker_extract_output(filename, mime_type=None):
     Runs `tracker-extract --file` to extract metadata from a file.
     """
 
-    tracker_extract = os.path.join (cfg.EXEC_PREFIX, 'tracker-extract')
+    tracker_extract = os.path.join(cfg.EXEC_PREFIX, 'tracker-extract')
     command = [tracker_extract, '--file', filename]
     if mime_type is not None:
         command.extend(['--mime', mime_type])
 
     try:
-        log ('Running: %s' % ' '.join(command))
-        output = subprocess.check_output (command)
+        log('Running: %s' % ' '.join(command))
+        output = subprocess.check_output(command)
     except subprocess.CalledProcessError as e:
         raise Exception("Error %i from tracker-extract, output: %s" %
                         (e.returncode, e.output))
diff --git a/tests/functional-tests/common/utils/helpers.py b/tests/functional-tests/common/utils/helpers.py
index b34c3d7..cf0d3ee 100644
--- a/tests/functional-tests/common/utils/helpers.py
+++ b/tests/functional-tests/common/utils/helpers.py
@@ -30,16 +30,20 @@ import re
 import configuration as cfg
 import options
 
+
 class NoMetadataException (Exception):
     pass
 
 REASONABLE_TIMEOUT = 30
 
-def log (message):
-    if options.is_verbose ():
+
+def log(message):
+    if options.is_verbose():
         print (message)
 
+
 class Helper:
+
     """
     Abstract helper for Tracker processes. Launches the process manually
     and waits for it to appear on the session bus.
@@ -58,7 +62,7 @@ class Helper:
     BUS_NAME = None
     PROCESS_NAME = None
 
-    def __init__ (self):
+    def __init__(self):
         self.loop = None
         self.bus = None
         self.bus_admin = None
@@ -68,63 +72,66 @@ class Helper:
         Handler to abort test if an exception occurs inside the GLib main loop.
         """
         old_hook = sys.excepthook
+
         def new_hook(etype, evalue, etb):
             old_hook(etype, evalue, etb)
             GLib.MainLoop.quit(loop)
             sys.exit()
         sys.excepthook = new_hook
 
-    def _get_bus (self):
+    def _get_bus(self):
         if self.bus is not None:
             return
 
-        self.loop = GObject.MainLoop ()
+        self.loop = GObject.MainLoop()
 
         self.install_glib_excepthook(self.loop)
 
-        dbus_loop = DBusGMainLoop (set_as_default=True)
-        self.bus = dbus.SessionBus (dbus_loop)
+        dbus_loop = DBusGMainLoop(set_as_default=True)
+        self.bus = dbus.SessionBus(dbus_loop)
 
-        obj = self.bus.get_object ("org.freedesktop.DBus",
-                                   "/org/freedesktop/DBus")
-        self.bus_admin = dbus.Interface (obj, dbus_interface = "org.freedesktop.DBus")
+        obj = self.bus.get_object("org.freedesktop.DBus",
+                                  "/org/freedesktop/DBus")
+        self.bus_admin = dbus.Interface(
+            obj, dbus_interface="org.freedesktop.DBus")
 
-    def _start_process (self):
-        path = getattr (self,
-                        "PROCESS_PATH",
-                        os.path.join (cfg.EXEC_PREFIX, self.PROCESS_NAME))
-        flags = getattr (self,
-                         "FLAGS",
-                         [])
+    def _start_process(self):
+        path = getattr(self,
+                       "PROCESS_PATH",
+                       os.path.join(cfg.EXEC_PREFIX, self.PROCESS_NAME))
+        flags = getattr(self,
+                        "FLAGS",
+                        [])
 
-        if options.is_manual_start ():
+        if options.is_manual_start():
             print ("Start %s manually" % self.PROCESS_NAME)
         else:
             kws = {}
 
-            if not options.is_verbose ():
-                FNULL = open ('/dev/null', 'w')
-                kws = { 'stdout': FNULL, 'stderr': FNULL }
+            if not options.is_verbose():
+                FNULL = open('/dev/null', 'w')
+                kws = {'stdout': FNULL, 'stderr': FNULL}
 
             command = [path] + flags
-            log ("Starting %s" % ' '.join(command))
-            return subprocess.Popen ([path] + flags, **kws)
+            log("Starting %s" % ' '.join(command))
+            return subprocess.Popen([path] + flags, **kws)
 
-    def _name_owner_changed_cb (self, name, old_owner, new_owner):
+    def _name_owner_changed_cb(self, name, old_owner, new_owner):
         if name == self.BUS_NAME:
             if old_owner == '' and new_owner != '':
-                log ("[%s] appeared in the bus" % self.PROCESS_NAME)
+                log("[%s] appeared in the bus" % self.PROCESS_NAME)
                 self.available = True
-            elif old_owner != ''  and new_owner == '':
-                log ("[%s] disappeared from the bus" % self.PROCESS_NAME)
+            elif old_owner != '' and new_owner == '':
+                log("[%s] disappeared from the bus" % self.PROCESS_NAME)
                 self.available = False
             else:
-                log ("[%s] name change %s -> %s" % (self.PROCESS_NAME, old_owner, new_owner))
+                log("[%s] name change %s -> %s" %
+                    (self.PROCESS_NAME, old_owner, new_owner))
 
-            self.loop.quit ()
+            self.loop.quit()
 
-    def _process_watch_cb (self):
-        status = self.process.poll ()
+    def _process_watch_cb(self):
+        status = self.process.poll()
 
         if status is None:
             return True
@@ -132,46 +139,50 @@ class Helper:
         if status == 0 and not self.abort_if_process_exits_with_status_0:
             return True
 
-        raise Exception("%s exited with status: %i" % (self.PROCESS_NAME, status))
+        raise Exception("%s exited with status: %i" %
+                        (self.PROCESS_NAME, status))
 
-    def _timeout_on_idle_cb (self):
-        log ("[%s] Timeout waiting... asumming idle." % self.PROCESS_NAME)
-        self.loop.quit ()
+    def _timeout_on_idle_cb(self):
+        log("[%s] Timeout waiting... asumming idle." % self.PROCESS_NAME)
+        self.loop.quit()
         self.timeout_id = None
         return False
 
-
-    def start (self):
+    def start(self):
         """
         Start an instance of process and wait for it to appear on the bus.
         """
 
-        self._get_bus ()
+        self._get_bus()
 
-        if (self.bus_admin.NameHasOwner (self.BUS_NAME)):
-            raise Exception ("Unable to start test instance of %s: already running" % self.PROCESS_NAME)
+        if (self.bus_admin.NameHasOwner(self.BUS_NAME)):
+            raise Exception(
+                "Unable to start test instance of %s: already running" % self.PROCESS_NAME)
 
-        self.name_owner_match = self.bus.add_signal_receiver (self._name_owner_changed_cb,
+        self.name_owner_match = self.bus.add_signal_receiver(
+            self._name_owner_changed_cb,
                                                               signal_name="NameOwnerChanged",
                                                               path="/org/freedesktop/DBus",
                                                               dbus_interface="org.freedesktop.DBus")
 
-        self.process = self._start_process ()
-        log ('[%s] Started process %i' % (self.PROCESS_NAME, self.process.pid))
+        self.process = self._start_process()
+        log('[%s] Started process %i' % (self.PROCESS_NAME, self.process.pid))
 
-        self.process_watch_timeout = GLib.timeout_add (200, self._process_watch_cb)
+        self.process_watch_timeout = GLib.timeout_add(
+            200, self._process_watch_cb)
 
         self.abort_if_process_exits_with_status_0 = True
 
         # Run the loop until the bus name appears, or the process dies.
-        self.loop.run ()
+        self.loop.run()
 
         self.abort_if_process_exits_with_status_0 = False
 
-    def stop (self):
+    def stop(self):
         start = time.time()
         if self.process.poll() == None:
-            # It should step out of this loop when the miner disappear from the bus
+            # It should step out of this loop when the miner disappear from the
+            # bus
             GLib.source_remove(self.process_watch_timeout)
 
             self.process.terminate()
@@ -180,25 +191,27 @@ class Helper:
                 time.sleep(0.1)
 
                 if time.time() > (start + REASONABLE_TIMEOUT):
-                    log ("[%s] Failed to terminate, sending kill!" % self.PROCESS_NAME)
+                    log("[%s] Failed to terminate, sending kill!" %
+                        self.PROCESS_NAME)
                     self.process.kill()
                     self.process.wait()
 
-        log ("[%s] stopped." % self.PROCESS_NAME)
+        log("[%s] stopped." % self.PROCESS_NAME)
         # Disconnect the signals of the next start we get duplicated messages
-        self.bus._clean_up_signal_match (self.name_owner_match)
+        self.bus._clean_up_signal_match(self.name_owner_match)
 
-    def kill (self):
-        self.process.kill ()
+    def kill(self):
+        self.process.kill()
 
         # Name owner changed callback should take us out from this loop
-        self.loop.run ()
+        self.loop.run()
 
-        log ("[%s] killed." % self.PROCESS_NAME)
-        self.bus._clean_up_signal_match (self.name_owner_match)
+        log("[%s] killed." % self.PROCESS_NAME)
+        self.bus._clean_up_signal_match(self.name_owner_match)
 
 
 class StoreHelper (Helper):
+
     """
     Wrapper for the Store API
 
@@ -211,40 +224,46 @@ class StoreHelper (Helper):
 
     graph_updated_handler_id = 0
 
-    def start (self):
-        Helper.start (self)
+    def start(self):
+        Helper.start(self)
 
-        tracker = self.bus.get_object (cfg.TRACKER_BUSNAME,
-                                       cfg.TRACKER_OBJ_PATH)
+        tracker = self.bus.get_object(cfg.TRACKER_BUSNAME,
+                                      cfg.TRACKER_OBJ_PATH)
 
-        self.resources = dbus.Interface (tracker,
-                                         dbus_interface=cfg.RESOURCES_IFACE)
+        self.resources = dbus.Interface(tracker,
+                                        dbus_interface=cfg.RESOURCES_IFACE)
 
-        tracker_backup = self.bus.get_object (cfg.TRACKER_BUSNAME, cfg.TRACKER_BACKUP_OBJ_PATH)
-        self.backup_iface = dbus.Interface (tracker_backup, dbus_interface=cfg.BACKUP_IFACE)
+        tracker_backup = self.bus.get_object(
+            cfg.TRACKER_BUSNAME, cfg.TRACKER_BACKUP_OBJ_PATH)
+        self.backup_iface = dbus.Interface(
+            tracker_backup, dbus_interface=cfg.BACKUP_IFACE)
 
-        tracker_stats = self.bus.get_object (cfg.TRACKER_BUSNAME, cfg.TRACKER_STATS_OBJ_PATH)
+        tracker_stats = self.bus.get_object(
+            cfg.TRACKER_BUSNAME, cfg.TRACKER_STATS_OBJ_PATH)
 
-        self.stats_iface = dbus.Interface (tracker_stats, dbus_interface=cfg.STATS_IFACE)
+        self.stats_iface = dbus.Interface(
+            tracker_stats, dbus_interface=cfg.STATS_IFACE)
 
-        tracker_status = self.bus.get_object (cfg.TRACKER_BUSNAME,
-                                              cfg.TRACKER_STATUS_OBJ_PATH)
-        self.status_iface = dbus.Interface (tracker_status, dbus_interface=cfg.STATUS_IFACE)
+        tracker_status = self.bus.get_object(cfg.TRACKER_BUSNAME,
+                                             cfg.TRACKER_STATUS_OBJ_PATH)
+        self.status_iface = dbus.Interface(
+            tracker_status, dbus_interface=cfg.STATUS_IFACE)
 
-        log ("[%s] booting..." % self.PROCESS_NAME)
-        self.status_iface.Wait ()
-        log ("[%s] ready." % self.PROCESS_NAME)
+        log("[%s] booting..." % self.PROCESS_NAME)
+        self.status_iface.Wait()
+        log("[%s] ready." % self.PROCESS_NAME)
 
-        self.reset_graph_updates_tracking ()
-        self.graph_updated_handler_id = self.bus.add_signal_receiver (self._graph_updated_cb,
-                                                                      signal_name = "GraphUpdated",
-                                                                      path = cfg.TRACKER_OBJ_PATH,
-                                                                      dbus_interface = cfg.RESOURCES_IFACE)
+        self.reset_graph_updates_tracking()
+        self.graph_updated_handler_id = self.bus.add_signal_receiver(
+            self._graph_updated_cb,
+                                                                      signal_name="GraphUpdated",
+                                                                      path=cfg.TRACKER_OBJ_PATH,
+                                                                      dbus_interface=cfg.RESOURCES_IFACE)
 
-    def stop (self):
-        Helper.stop (self)
+    def stop(self):
+        Helper.stop(self)
 
-        self.bus._clean_up_signal_match (self.graph_updated_handler_id)
+        self.bus._clean_up_signal_match(self.graph_updated_handler_id)
 
     # A system to follow GraphUpdated and make sure all changes are tracked.
     # This code saves every change notification received, and exposes methods
@@ -252,20 +271,20 @@ class StoreHelper (Helper):
     # the list of events already received and wait for more if the event has
     # not yet happened.
 
-    def reset_graph_updates_tracking (self):
+    def reset_graph_updates_tracking(self):
         self.inserts_list = []
         self.deletes_list = []
         self.inserts_match_function = None
         self.deletes_match_function = None
         self.graph_updated_timed_out = False
 
-    def _graph_updated_timeout_cb (self):
+    def _graph_updated_timeout_cb(self):
         # Don't fail here, exceptions don't get propagated correctly
         # from the GMainLoop
         self.graph_updated_timed_out = True
-        self.loop.quit ()
+        self.loop.quit()
 
-    def _graph_updated_cb (self, class_name, deletes_list, inserts_list):
+    def _graph_updated_cb(self, class_name, deletes_list, inserts_list):
         """
         Process notifications from tracker-store on resource changes.
         """
@@ -274,24 +293,27 @@ class StoreHelper (Helper):
         if inserts_list is not None:
             if self.inserts_match_function is not None:
                 # The match function will remove matched entries from the list
-                (exit_loop, inserts_list) = self.inserts_match_function (inserts_list)
+                (exit_loop, inserts_list) = self.inserts_match_function(
+                    inserts_list)
             self.inserts_list += inserts_list
 
         if deletes_list is not None:
             if self.deletes_match_function is not None:
-                (exit_loop, deletes_list) = self.deletes_match_function (deletes_list)
+                (exit_loop, deletes_list) = self.deletes_match_function(
+                    deletes_list)
             self.deletes_list += deletes_list
 
         if exit_loop:
             GLib.source_remove(self.graph_updated_timeout_id)
             self.graph_updated_timeout_id = 0
-            self.loop.quit ()
+            self.loop.quit()
 
-    def _enable_await_timeout (self):
-        self.graph_updated_timeout_id = GLib.timeout_add_seconds (REASONABLE_TIMEOUT,
+    def _enable_await_timeout(self):
+        self.graph_updated_timeout_id = GLib.timeout_add_seconds(
+            REASONABLE_TIMEOUT,
                                                                   self._graph_updated_timeout_cb)
 
-    def await_resource_inserted (self, rdf_class, url = None, title = None, required_property = None):
+    def await_resource_inserted(self, rdf_class, url=None, title=None, required_property=None):
         """
         Block until a resource matching the parameters becomes available
         """
@@ -300,14 +322,18 @@ class StoreHelper (Helper):
         self.matched_resource_urn = None
         self.matched_resource_id = None
 
-        log ("Await new %s (%i existing inserts)" % (rdf_class, len (self.inserts_list)))
+        log("Await new %s (%i existing inserts)" %
+            (rdf_class, len(self.inserts_list)))
 
         if required_property is not None:
-            required_property_id = self.get_resource_id_by_uri(required_property)
-            log ("Required property %s id %i" % (required_property, required_property_id))
+            required_property_id = self.get_resource_id_by_uri(
+                required_property)
+            log("Required property %s id %i" %
+                (required_property, required_property_id))
+
+        known_subjects = set()
 
-        known_subjects = set ()
-        def find_resource_insertion (inserts_list):
+        def find_resource_insertion(inserts_list):
             matched_creation = (self.matched_resource_id is not None)
             matched_required_property = False
             remaining_events = []
@@ -319,7 +345,7 @@ class StoreHelper (Helper):
                 id = insert[1]
 
                 if not matched_creation and id not in known_subjects:
-                    known_subjects.add (id)
+                    known_subjects.add(id)
 
                     where = "  ?urn a %s " % rdf_class
 
@@ -329,24 +355,26 @@ class StoreHelper (Helper):
                     if title is not None:
                         where += "; nie:title \"%s\"" % title
 
-                    query = "SELECT ?urn WHERE { %s FILTER (tracker:id(?urn) = %s)}" % (where, insert[1])
-                    result_set = self.query (query)
+                    query = "SELECT ?urn WHERE { %s FILTER (tracker:id(?urn) = %s)}" % (
+                        where, insert[1])
+                    result_set = self.query(query)
 
-                    if len (result_set) > 0:
+                    if len(result_set) > 0:
                         matched_creation = True
                         self.matched_resource_urn = result_set[0][0]
                         self.matched_resource_id = insert[1]
-                        log ("Matched creation of resource %s (%i)" %
-                             (self.matched_resource_urn,
-                              self.matched_resource_id))
+                        log("Matched creation of resource %s (%i)" %
+                            (self.matched_resource_urn,
+                             self.matched_resource_id))
                         if required_property is not None:
-                            log ("Waiting for property %s (%i) to be set" %
-                                 (required_property, required_property_id))
+                            log("Waiting for property %s (%i) to be set" %
+                                (required_property, required_property_id))
 
                 if required_property is not None and matched_creation and not matched_required_property:
                     if id == self.matched_resource_id and insert[2] == required_property_id:
                         matched_required_property = True
-                        log ("Matched %s %s" % (self.matched_resource_urn, required_property))
+                        log("Matched %s %s" %
+                            (self.matched_resource_urn, required_property))
 
                 if not matched_creation or id != self.matched_resource_id:
                     remaining_events += [insert]
@@ -354,34 +382,37 @@ class StoreHelper (Helper):
             matched = matched_creation if required_property is None else matched_required_property
             return matched, remaining_events
 
-        def match_cb (inserts_list):
-            matched, remaining_events = find_resource_insertion (inserts_list)
+        def match_cb(inserts_list):
+            matched, remaining_events = find_resource_insertion(inserts_list)
             exit_loop = matched
             return exit_loop, remaining_events
 
         # Check the list of previously received events for matches
-        (existing_match, self.inserts_list) = find_resource_insertion (self.inserts_list)
+        (existing_match, self.inserts_list) = find_resource_insertion(
+            self.inserts_list)
 
         if not existing_match:
-            self._enable_await_timeout ()
+            self._enable_await_timeout()
             self.inserts_match_function = match_cb
             # Run the event loop until the correct notification arrives
-            self.loop.run ()
+            self.loop.run()
             self.inserts_match_function = None
 
         if self.graph_updated_timed_out:
-            raise Exception ("Timeout waiting for resource: class %s, URL %s, title %s" % (rdf_class, url, 
title))
+            raise Exception(
+                "Timeout waiting for resource: class %s, URL %s, title %s" % (rdf_class, url, title))
 
         return (self.matched_resource_id, self.matched_resource_urn)
 
-    def await_resource_deleted (self, id, fail_message = None):
+    def await_resource_deleted(self, id, fail_message=None):
         """
         Block until we are notified of a resources deletion
         """
         assert (self.deletes_match_function == None)
 
-        def find_resource_deletion (deletes_list):
-            log ("find_resource_deletion: looking for %i in %s" % (id, deletes_list))
+        def find_resource_deletion(deletes_list):
+            log("find_resource_deletion: looking for %i in %s" %
+                (id, deletes_list))
 
             matched = False
             remaining_events = []
@@ -394,31 +425,33 @@ class StoreHelper (Helper):
 
             return matched, remaining_events
 
-        def match_cb (deletes_list):
+        def match_cb(deletes_list):
             matched, remaining_events = find_resource_deletion(deletes_list)
             exit_loop = matched
             return exit_loop, remaining_events
 
-        log ("Await deletion of %i (%i existing)" % (id, len (self.deletes_list)))
+        log("Await deletion of %i (%i existing)" %
+            (id, len(self.deletes_list)))
 
-        (existing_match, self.deletes_list) = find_resource_deletion (self.deletes_list)
+        (existing_match, self.deletes_list) = find_resource_deletion(
+            self.deletes_list)
 
         if not existing_match:
-            self._enable_await_timeout ()
+            self._enable_await_timeout()
             self.deletes_match_function = match_cb
             # Run the event loop until the correct notification arrives
-            self.loop.run ()
+            self.loop.run()
             self.deletes_match_function = None
 
         if self.graph_updated_timed_out:
             if fail_message is not None:
-                raise Exception (fail_message)
+                raise Exception(fail_message)
             else:
-                raise Exception ("Resource %i has not been deleted." % id)
+                raise Exception("Resource %i has not been deleted." % id)
 
         return
 
-    def await_property_changed (self, subject_id, property_uri):
+    def await_property_changed(self, subject_id, property_uri):
         """
         Block until a property of a resource is updated or inserted.
         """
@@ -426,7 +459,7 @@ class StoreHelper (Helper):
 
         property_id = self.get_resource_id_by_uri(property_uri)
 
-        def find_property_change (inserts_list):
+        def find_property_change(inserts_list):
             matched = False
             remaining_events = []
 
@@ -439,103 +472,103 @@ class StoreHelper (Helper):
 
             return matched, remaining_events
 
-        def match_cb (inserts_list):
-            matched, remaining_events = find_property_change (inserts_list)
+        def match_cb(inserts_list):
+            matched, remaining_events = find_property_change(inserts_list)
             exit_loop = matched
             return exit_loop, remaining_events
 
         # Check the list of previously received events for matches
-        (existing_match, self.inserts_list) = find_property_change (self.inserts_list)
+        (existing_match, self.inserts_list) = find_property_change(
+            self.inserts_list)
 
         if not existing_match:
-            self._enable_await_timeout ()
+            self._enable_await_timeout()
             self.inserts_match_function = match_cb
             # Run the event loop until the correct notification arrives
-            self.loop.run ()
+            self.loop.run()
             self.inserts_match_function = None
 
         if self.graph_updated_timed_out:
-            raise Exception ("Timeout waiting for property change, subject %i "
-                             "property %s" % (subject_id, property_uri))
+            raise Exception("Timeout waiting for property change, subject %i "
+                            "property %s" % (subject_id, property_uri))
 
-    def query (self, query, timeout=5000):
+    def query(self, query, timeout=5000):
         try:
-            return self.resources.SparqlQuery (query, timeout=timeout)
+            return self.resources.SparqlQuery(query, timeout=timeout)
         except dbus.DBusException as (e):
-            if (e.get_dbus_name().startswith ("org.freedesktop.DBus")):
-                self.start ()
-                return self.resources.SparqlQuery (query, timeout=timeout)
+            if (e.get_dbus_name().startswith("org.freedesktop.DBus")):
+                self.start()
+                return self.resources.SparqlQuery(query, timeout=timeout)
             raise (e)
 
-    def update (self, update_sparql, timeout=5000):
+    def update(self, update_sparql, timeout=5000):
         try:
-            return self.resources.SparqlUpdate (update_sparql, timeout=timeout)
+            return self.resources.SparqlUpdate(update_sparql, timeout=timeout)
         except dbus.DBusException as (e):
-            if (e.get_dbus_name().startswith ("org.freedesktop.DBus")):
-                self.start ()
-                return self.resources.SparqlUpdate (update_sparql, timeout=timeout)
+            if (e.get_dbus_name().startswith("org.freedesktop.DBus")):
+                self.start()
+                return self.resources.SparqlUpdate(update_sparql, timeout=timeout)
             raise (e)
 
-    def batch_update (self, update_sparql):
+    def batch_update(self, update_sparql):
         try:
-            return self.resources.BatchSparqlUpdate (update_sparql)
+            return self.resources.BatchSparqlUpdate(update_sparql)
         except dbus.DBusException as (e):
-            if (e.get_dbus_name().startswith ("org.freedesktop.DBus")):
-                self.start ()
-                return self.resources.BatchSparqlUpdate (update_sparql)
+            if (e.get_dbus_name().startswith("org.freedesktop.DBus")):
+                self.start()
+                return self.resources.BatchSparqlUpdate(update_sparql)
             raise (e)
 
-    def batch_commit (self):
-        return self.resources.BatchCommit ()
+    def batch_commit(self):
+        return self.resources.BatchCommit()
 
-    def backup (self, backup_file):
+    def backup(self, backup_file):
         try:
-            self.backup_iface.Save (backup_file)
+            self.backup_iface.Save(backup_file)
         except dbus.DBusException as (e):
-            if (e.get_dbus_name().startswith ("org.freedesktop.DBus")):
-                self.start ()
-                return self.backup_iface.Save (backup_file)
+            if (e.get_dbus_name().startswith("org.freedesktop.DBus")):
+                self.start()
+                return self.backup_iface.Save(backup_file)
             raise (e)
-            
-    def restore (self, backup_file):
+
+    def restore(self, backup_file):
         try:
-            return self.backup_iface.Restore (backup_file)
+            return self.backup_iface.Restore(backup_file)
         except dbus.DBusException as (e):
-            if (e.get_dbus_name().startswith ("org.freedesktop.DBus")):
-                self.start ()
-                return self.backup_iface.Restore (backup_file)
+            if (e.get_dbus_name().startswith("org.freedesktop.DBus")):
+                self.start()
+                return self.backup_iface.Restore(backup_file)
             raise (e)
 
-    def get_stats (self):
+    def get_stats(self):
         try:
-            return self.stats_iface.Get ()
+            return self.stats_iface.Get()
         except dbus.DBusException as (e):
-            if (e.get_dbus_name().startswith ("org.freedesktop.DBus")):
-                self.start ()
-                return self.stats_iface.Get ()
+            if (e.get_dbus_name().startswith("org.freedesktop.DBus")):
+                self.start()
+                return self.stats_iface.Get()
             raise (e)
 
-
-    def get_tracker_iface (self):
+    def get_tracker_iface(self):
         return self.resources
 
-    def count_instances (self, ontology_class):
+    def count_instances(self, ontology_class):
         QUERY = """
         SELECT COUNT(?u) WHERE {
             ?u a %s .
         }
         """
         try:
-            result = self.resources.SparqlQuery (QUERY % (ontology_class))
+            result = self.resources.SparqlQuery(QUERY % (ontology_class))
         except dbus.DBusException as (e):
-            if (e.get_dbus_name().startswith ("org.freedesktop.DBus")):
-                self.start ()
-                result = self.resources.SparqlQuery (QUERY % (ontology_class))
+            if (e.get_dbus_name().startswith("org.freedesktop.DBus")):
+                self.start()
+                result = self.resources.SparqlQuery(QUERY % (ontology_class))
             else:
                 raise (e)
-            
-        if (len (result) == 1):
-            return int (result [0][0])
+
+        if (len(result) == 1):
+            return int(result[0][0])
         else:
             return -1
 
@@ -546,11 +579,11 @@ class StoreHelper (Helper):
         result = self.query(
             'SELECT tracker:id(%s) WHERE { }' % uri)
         if len(result) == 1:
-            return int (result [0][0])
+            return int(result[0][0])
         elif len(result) == 0:
-            raise Exception ("No entry for resource %s" % uri)
+            raise Exception("No entry for resource %s" % uri)
         else:
-            raise Exception ("Multiple entries for resource %s" % uri)
+            raise Exception("Multiple entries for resource %s" % uri)
 
     # FIXME: rename to get_resource_id_by_nepomuk_url !!
     def get_resource_id(self, url):
@@ -560,44 +593,44 @@ class StoreHelper (Helper):
         result = self.query(
             'SELECT tracker:id(?r) WHERE { ?r nie:url "%s" }' % url)
         if len(result) == 1:
-            return int (result [0][0])
+            return int(result[0][0])
         elif len(result) == 0:
-            raise Exception ("No entry for resource %s" % url)
+            raise Exception("No entry for resource %s" % url)
         else:
-            raise Exception ("Multiple entries for resource %s" % url)
+            raise Exception("Multiple entries for resource %s" % url)
 
-    def ask (self, ask_query):
-        assert ask_query.strip ().startswith ("ASK")
-        result = self.query (ask_query)
-        assert len (result) == 1
+    def ask(self, ask_query):
+        assert ask_query.strip().startswith("ASK")
+        result = self.query(ask_query)
+        assert len(result) == 1
         if result[0][0] == "true":
             return True
         elif result[0][0] == "false":
             return False
         else:
-            raise Exception ("Something fishy is going on")
+            raise Exception("Something fishy is going on")
 
 
 class MinerFsHelper (Helper):
 
     PROCESS_NAME = 'tracker-miner-fs'
-    PROCESS_PATH = os.path.join (cfg.EXEC_PREFIX, "tracker-miner-fs")
+    PROCESS_PATH = os.path.join(cfg.EXEC_PREFIX, "tracker-miner-fs")
     BUS_NAME = cfg.MINERFS_BUSNAME
 
     FLAGS = ['--initial-sleep=0']
     if cfg.haveMaemo:
-        FLAGS.append ('--disable-miner=userguides')
+        FLAGS.append('--disable-miner=userguides')
 
-    def start (self):
-        Helper.start (self)
+    def start(self):
+        Helper.start(self)
 
-        bus_object = self.bus.get_object (cfg.MINERFS_BUSNAME,
-                                          cfg.MINERFS_OBJ_PATH)
-        self.miner_fs = dbus.Interface (bus_object,
-                                        dbus_interface = cfg.MINER_IFACE)
+        bus_object = self.bus.get_object(cfg.MINERFS_BUSNAME,
+                                         cfg.MINERFS_OBJ_PATH)
+        self.miner_fs = dbus.Interface(bus_object,
+                                       dbus_interface=cfg.MINER_IFACE)
 
-    def stop (self):
-        Helper.stop (self)
+    def stop(self):
+        Helper.stop(self)
 
 
 class ExtractorHelper (Helper):
@@ -605,8 +638,9 @@ class ExtractorHelper (Helper):
     PROCESS_NAME = 'tracker-extract'
     BUS_NAME = cfg.TRACKER_EXTRACT_BUSNAME
 
+
 class WritebackHelper (Helper):
 
     PROCESS_NAME = 'tracker-writeback'
-    PROCESS_PATH = os.path.join (cfg.EXEC_PREFIX, 'tracker-writeback')
+    PROCESS_PATH = os.path.join(cfg.EXEC_PREFIX, 'tracker-writeback')
     BUS_NAME = cfg.WRITEBACK_BUSNAME
diff --git a/tests/functional-tests/common/utils/html.py b/tests/functional-tests/common/utils/html.py
index ec296fe..d13e6d3 100644
--- a/tests/functional-tests/common/utils/html.py
+++ b/tests/functional-tests/common/utils/html.py
@@ -2,63 +2,63 @@
 import unittest
 import os
 
+
 class html:
-       
-       def top(self):
 
-               os.remove('indexing-performance')       
-               self.file = 'indexing-performance' 
-               self.f = open(self.file, "a")
-               self.f.write('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" 
"http://www.w3.org/TR/html4/loose.dtd";>' + "\n" +
-               '<html>' + "\n" +
-               ' <head>' + "\n" +
-               ' <meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">' + "\n" +
-               ' <title>Tracker Indexing Performance</title>' + "\n" +
-               ' <H1 align="center"><font color="#CC0000" face="verdana" size="6">Tracker Indexing 
Performance</font></H1>' + "\n" +
-               ' <body>' + "\n" +
-               ' <table border="1", align="center">' + "\n" + 
-               '<th><font color="#8000FF" face="verdana" size="4">Test data</font></th>' + "\n" +
-               '<th><font color="#8000FF" face="verdana" size="4">Minimum</font></th>' + "\n" +
-               '<th><font color="#8000FF" face="verdana" size="4">Maximum</font></th>' + "\n" +
-               '<th><font color="#8000FF" face="verdana" size="4">Average</font></th>' + "\n" +
-               '<th><font color="#8000FF" face="verdana" size="4">Median</font></th>' + "\n" 
-               )
-               self.f.close() 
-               
-       
-       def mid(self,title,min,max,avg,median):
+    def top(self):
 
-               self.file = 'indexing-performance' 
-               self.f = open(self.file, "a")
-               self.f.write( '<tr>' + "\n" +
-               '<td>' + title + '</td>' + "\n" +
-               '<td>' + str(min) + '</td>' + "\n" +
-               '<td>' + str(max) + '</td>' + "\n" +
-               '<td>' + str(avg) + '</td>' + "\n" +
-               '<td>' + str(median) + '</td>' + "\n" +
-               '</tr>' + "\n" 
-               )
-               self.f.close() 
+        os.remove('indexing-performance')
+        self.file = 'indexing-performance'
+        self.f = open(self.file, "a")
+        self.f.write('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" 
"http://www.w3.org/TR/html4/loose.dtd";>' + "\n" +
+                     '<html>' + "\n" +
+                     ' <head>' + "\n" +
+                     ' <meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">' + "\n" +
+                     ' <title>Tracker Indexing Performance</title>' + "\n" +
+                     ' <H1 align="center"><font color="#CC0000" face="verdana" size="6">Tracker Indexing 
Performance</font></H1>' + "\n" +
+                     ' <body>' + "\n" +
+                     ' <table border="1", align="center">' + "\n" +
+                     '<th><font color="#8000FF" face="verdana" size="4">Test data</font></th>' + "\n" +
+                     '<th><font color="#8000FF" face="verdana" size="4">Minimum</font></th>' + "\n" +
+                     '<th><font color="#8000FF" face="verdana" size="4">Maximum</font></th>' + "\n" +
+                     '<th><font color="#8000FF" face="verdana" size="4">Average</font></th>' + "\n" +
+                     '<th><font color="#8000FF" face="verdana" size="4">Median</font></th>' +
+                     "\n"
+                     )
+        self.f.close()
 
-       def bottom(self):
+    def mid(self, title, min, max, avg, median):
 
-               self.file = 'indexing-performance' 
-               self.f = open(self.file, "a")
-               self.f.write( '</table>' + "\n" +
-               ' </body>' + "\n" +
-               ' </head>' + "\n" +
-               ' </html>' + "\n" 
-               )
-               self.f.close() 
+        self.file = 'indexing-performance'
+        self.f = open(self.file, "a")
+        self.f.write('<tr>' + "\n" +
+                     '<td>' + title + '</td>' + "\n" +
+                     '<td>' + str(min) + '</td>' + "\n" +
+                     '<td>' + str(max) + '</td>' + "\n" +
+                     '<td>' + str(avg) + '</td>' + "\n" +
+                     '<td>' + str(median) + '</td>' + "\n" +
+                     '</tr>' + "\n"
+                     )
+        self.f.close()
 
-class report(unittest.TestCase):
+    def bottom(self):
+
+        self.file = 'indexing-performance'
+        self.f = open(self.file, "a")
+        self.f.write('</table>' + "\n" +
+                     ' </body>' + "\n" +
+                     ' </head>' + "\n" +
+                     ' </html>' + "\n"
+                     )
+        self.f.close()
 
-        def first(self):                      
-                self.file = html()
-                self.file.top()                                    
-                                                             
-        def last(self):                                        
-                self.file = html()
-                self.file.bottom()
 
+class report(unittest.TestCase):
+
+    def first(self):
+        self.file = html()
+        self.file.top()
 
+    def last(self):
+        self.file = html()
+        self.file.bottom()
diff --git a/tests/functional-tests/common/utils/minertest.py 
b/tests/functional-tests/common/utils/minertest.py
index c84270e..fc927b8 100644
--- a/tests/functional-tests/common/utils/minertest.py
+++ b/tests/functional-tests/common/utils/minertest.py
@@ -30,16 +30,18 @@ from itertools import chain
 
 MINER_TMP_DIR = cfg.TEST_MONITORED_TMP_DIR
 
-def path (filename):
-    return os.path.join (MINER_TMP_DIR, filename)
 
-def uri (filename):
-    return "file://" + os.path.join (MINER_TMP_DIR, filename)
+def path(filename):
+    return os.path.join(MINER_TMP_DIR, filename)
+
+
+def uri(filename):
+    return "file://" + os.path.join(MINER_TMP_DIR, filename)
 
 
 DEFAULT_TEXT = "Some stupid content, to have a test file"
 
-index_dirs = [os.path.join (MINER_TMP_DIR, "test-monitored")]
+index_dirs = [os.path.join(MINER_TMP_DIR, "test-monitored")]
 CONF_OPTIONS = {
     cfg.DCONF_MINER_SCHEMA: {
         'index-recursive-directories': GLib.Variant.new_strv(index_dirs),
@@ -53,7 +55,7 @@ CONF_OPTIONS = {
 
 class CommonTrackerMinerTest (ut.TestCase):
 
-    def prepare_directories (self):
+    def prepare_directories(self):
         #
         #     ~/test-monitored/
         #                     /file1.txt
@@ -84,31 +86,31 @@ class CommonTrackerMinerTest (ut.TestCase):
         for tf in chain(monitored_files, unmonitored_files):
             testfile = path(tf)
             ensure_dir_exists(os.path.dirname(testfile))
-            with open (testfile, 'w') as f:
-                f.write (DEFAULT_TEXT)
+            with open(testfile, 'w') as f:
+                f.write(DEFAULT_TEXT)
 
         for tf in monitored_files:
             self.tracker.await_resource_inserted(
                 'nfo:TextDocument', url=uri(tf))
 
-    def setUp (self):
+    def setUp(self):
         for d in ['test-monitored', 'test-no-monitored']:
             dirname = path(d)
-            if os.path.exists (dirname):
+            if os.path.exists(dirname):
                 shutil.rmtree(dirname)
             os.makedirs(dirname)
 
-        self.system = TrackerSystemAbstraction ()
+        self.system = TrackerSystemAbstraction()
 
-        self.system.tracker_miner_fs_testing_start (CONF_OPTIONS)
+        self.system.tracker_miner_fs_testing_start(CONF_OPTIONS)
         self.tracker = self.system.store
 
         try:
-            self.prepare_directories ()
-            self.tracker.reset_graph_updates_tracking ()
+            self.prepare_directories()
+            self.tracker.reset_graph_updates_tracking()
         except Exception as e:
-            self.tearDown ()
+            self.tearDown()
             raise
 
-    def tearDown (self):
-        self.system.tracker_miner_fs_testing_stop ()
+    def tearDown(self):
+        self.system.tracker_miner_fs_testing_stop()
diff --git a/tests/functional-tests/common/utils/options.py b/tests/functional-tests/common/utils/options.py
index 6bc8379..7bbfad9 100644
--- a/tests/functional-tests/common/utils/options.py
+++ b/tests/functional-tests/common/utils/options.py
@@ -21,17 +21,19 @@ parser.add_option("-v", "--verbose", dest="verbose",
 # have their own simple commandline parsers will complain
 for option in ["--startmanually", "-m", "--verbose", "-v"]:
     try:
-        sys.argv.remove (option)
+        sys.argv.remove(option)
     except ValueError:
         pass
 
-def is_verbose ():
+
+def is_verbose():
     """
     True to log process status information to stdout
     """
     return options.verbose
 
-def is_manual_start ():
+
+def is_manual_start():
     """
     False to start the processes automatically
     """
diff --git a/tests/functional-tests/common/utils/storetest.py 
b/tests/functional-tests/common/utils/storetest.py
index be16b6c..3c35e49 100644
--- a/tests/functional-tests/common/utils/storetest.py
+++ b/tests/functional-tests/common/utils/storetest.py
@@ -28,18 +28,20 @@ from common.utils import configuration as cfg
 import unittest2 as ut
 #import unittest as ut
 
+
 class CommonTrackerStoreTest (ut.TestCase):
-        """
-        Common superclass for tests that just require a fresh store running
-        """
-        @classmethod 
-       def setUpClass (self):
-            #print "Starting the daemon in test mode"
-            self.system = TrackerSystemAbstraction ()
-            self.system.tracker_store_testing_start ()
-            self.tracker = self.system.store
 
-        @classmethod
-        def tearDownClass (self):
-            #print "Stopping the daemon in test mode (Doing nothing now)"
-            self.system.tracker_store_testing_stop ()
+    """
+    Common superclass for tests that just require a fresh store running
+    """
+    @classmethod
+    def setUpClass(self):
+        # print "Starting the daemon in test mode"
+        self.system = TrackerSystemAbstraction()
+        self.system.tracker_store_testing_start()
+        self.tracker = self.system.store
+
+    @classmethod
+    def tearDownClass(self):
+        # print "Stopping the daemon in test mode (Doing nothing now)"
+        self.system.tracker_store_testing_stop()
diff --git a/tests/functional-tests/common/utils/system.py b/tests/functional-tests/common/utils/system.py
index e305aa5..539d5de 100644
--- a/tests/functional-tests/common/utils/system.py
+++ b/tests/functional-tests/common/utils/system.py
@@ -18,26 +18,27 @@ import helpers
 # Add this after fixing the backup/restore and ontology changes tests
 #"G_DEBUG" : "fatal_criticals",
 
-TEST_ENV_DIRS =  { "XDG_DATA_HOME" : os.path.join (cfg.TEST_TMP_DIR, "data"),
-                   "XDG_CACHE_HOME": os.path.join (cfg.TEST_TMP_DIR, "cache")}
+TEST_ENV_DIRS = {"XDG_DATA_HOME": os.path.join(cfg.TEST_TMP_DIR, "data"),
+                 "XDG_CACHE_HOME": os.path.join(cfg.TEST_TMP_DIR, "cache")}
 
-TEST_ENV_VARS = {  "TRACKER_DISABLE_MEEGOTOUCH_LOCALE": "",
-                   "LC_COLLATE": "en_GB.utf8",
-                   "DCONF_PROFILE": os.path.join (cfg.DATADIR, "tracker-tests",
-                                                  "trackertest") }
+TEST_ENV_VARS = {"TRACKER_DISABLE_MEEGOTOUCH_LOCALE": "",
+                 "LC_COLLATE": "en_GB.utf8",
+                 "DCONF_PROFILE": os.path.join(cfg.DATADIR, "tracker-tests",
+                                               "trackertest")}
 
-EXTRA_DIRS = [os.path.join (cfg.TEST_TMP_DIR, "data", "tracker"),
-              os.path.join (cfg.TEST_TMP_DIR, "cache", "tracker")]
+EXTRA_DIRS = [os.path.join(cfg.TEST_TMP_DIR, "data", "tracker"),
+              os.path.join(cfg.TEST_TMP_DIR, "cache", "tracker")]
 
 REASONABLE_TIMEOUT = 30
 
+
 class UnableToBootException (Exception):
     pass
 
 
 class TrackerSystemAbstraction:
 
-    def set_up_environment (self, settings, ontodir):
+    def set_up_environment(self, settings, ontodir):
         """
         Sets up the XDG_*_HOME variables and make sure the directories exist
 
@@ -47,29 +48,30 @@ class TrackerSystemAbstraction:
         GLib.Variant instance.
         """
 
-        helpers.log ("[Conf] Setting test environment...")
+        helpers.log("[Conf] Setting test environment...")
 
-        for var, directory in TEST_ENV_DIRS.iteritems ():
-            helpers.log ("export %s=%s" %(var, directory))
-            self.__recreate_directory (directory)
-            os.environ [var] = directory
+        for var, directory in TEST_ENV_DIRS.iteritems():
+            helpers.log("export %s=%s" % (var, directory))
+            self.__recreate_directory(directory)
+            os.environ[var] = directory
 
         for directory in EXTRA_DIRS:
-            self.__recreate_directory (directory)
+            self.__recreate_directory(directory)
 
         if ontodir:
-            helpers.log ("export %s=%s" % ("TRACKER_DB_ONTOLOGIES_DIR", ontodir))
-            os.environ ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir
+            helpers.log("export %s=%s" %
+                        ("TRACKER_DB_ONTOLOGIES_DIR", ontodir))
+            os.environ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir
 
-        for var, value in TEST_ENV_VARS.iteritems ():
-            helpers.log ("export %s=%s" %(var, value))
-            os.environ [var] = value
+        for var, value in TEST_ENV_VARS.iteritems():
+            helpers.log("export %s=%s" % (var, value))
+            os.environ[var] = value
 
         # Previous loop should have set DCONF_PROFILE to the test location
         if settings is not None:
             self._apply_settings(settings)
 
-        helpers.log ("[Conf] environment ready")
+        helpers.log("[Conf] environment ready")
 
     def _apply_settings(self, settings):
         for schema_name, contents in settings.iteritems():
@@ -78,139 +80,148 @@ class TrackerSystemAbstraction:
             for key, value in contents.iteritems():
                 dconf.write(key, value)
 
-    def tracker_store_testing_start (self, confdir=None, ontodir=None):
+    def tracker_store_testing_start(self, confdir=None, ontodir=None):
         """
         Stops any previous instance of the store, calls set_up_environment,
         and starts a new instances of the store
         """
-        self.set_up_environment (confdir, ontodir)
+        self.set_up_environment(confdir, ontodir)
 
-        self.store = helpers.StoreHelper ()
-        self.store.start ()
+        self.store = helpers.StoreHelper()
+        self.store.start()
 
-    def tracker_store_start (self):
-        self.store.start ()
+    def tracker_store_start(self):
+        self.store.start()
 
-    def tracker_store_stop_nicely (self):
-        self.store.stop ()
+    def tracker_store_stop_nicely(self):
+        self.store.stop()
 
-    def tracker_store_stop_brutally (self):
-        self.store.kill ()
+    def tracker_store_stop_brutally(self):
+        self.store.kill()
 
-    def tracker_store_restart_with_new_ontologies (self, ontodir):
-        self.store.stop ()
+    def tracker_store_restart_with_new_ontologies(self, ontodir):
+        self.store.stop()
         if ontodir:
-            helpers.log ("[Conf] Setting %s - %s" % ("TRACKER_DB_ONTOLOGIES_DIR", ontodir))
-            os.environ ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir
+            helpers.log("[Conf] Setting %s - %s" %
+                        ("TRACKER_DB_ONTOLOGIES_DIR", ontodir))
+            os.environ["TRACKER_DB_ONTOLOGIES_DIR"] = ontodir
         try:
-            self.store.start ()
+            self.store.start()
         except dbus.DBusException, e:
-            raise UnableToBootException ("Unable to boot the store \n(" + str(e) + ")")
+            raise UnableToBootException(
+                "Unable to boot the store \n(" + str(e) + ")")
 
-    def tracker_store_prepare_journal_replay (self):
-        db_location = os.path.join (TEST_ENV_DIRS ['XDG_CACHE_HOME'], "tracker", "meta.db")
-        os.unlink (db_location)
+    def tracker_store_prepare_journal_replay(self):
+        db_location = os.path.join(
+            TEST_ENV_DIRS['XDG_CACHE_HOME'], "tracker", "meta.db")
+        os.unlink(db_location)
 
-        lockfile = os.path.join (TEST_ENV_DIRS ['XDG_DATA_HOME'], "tracker", "data", ".ismeta.running")
-        f = open (lockfile, 'w')
-        f.write (" ")
-        f.close ()
+        lockfile = os.path.join(
+            TEST_ENV_DIRS['XDG_DATA_HOME'], "tracker", "data", ".ismeta.running")
+        f = open(lockfile, 'w')
+        f.write(" ")
+        f.close()
 
-    def tracker_store_corrupt_dbs (self):
+    def tracker_store_corrupt_dbs(self):
         for filename in ["meta.db", "meta.db-wal"]:
-            db_path = os.path.join (TEST_ENV_DIRS ['XDG_CACHE_HOME'], "tracker", filename)
-            f = open (db_path, "w")
-            for i in range (0, 100):
-                f.write ("Some stupid content... hohohoho, not a sqlite file anymore!\n")
-            f.close ()
-
-    def tracker_store_remove_journal (self):
-        db_location = os.path.join (TEST_ENV_DIRS ['XDG_DATA_HOME'], "tracker", "data")
-        shutil.rmtree (db_location)
-        os.mkdir (db_location)
-
-    def tracker_store_remove_dbs (self):
-        db_location = os.path.join (TEST_ENV_DIRS ['XDG_CACHE_HOME'], "tracker")
-        shutil.rmtree (db_location)
-        os.mkdir (db_location)
-
-    def tracker_store_testing_stop (self):
+            db_path = os.path.join(
+                TEST_ENV_DIRS['XDG_CACHE_HOME'], "tracker", filename)
+            f = open(db_path, "w")
+            for i in range(0, 100):
+                f.write(
+                    "Some stupid content... hohohoho, not a sqlite file anymore!\n")
+            f.close()
+
+    def tracker_store_remove_journal(self):
+        db_location = os.path.join(
+            TEST_ENV_DIRS['XDG_DATA_HOME'], "tracker", "data")
+        shutil.rmtree(db_location)
+        os.mkdir(db_location)
+
+    def tracker_store_remove_dbs(self):
+        db_location = os.path.join(
+            TEST_ENV_DIRS['XDG_CACHE_HOME'], "tracker")
+        shutil.rmtree(db_location)
+        os.mkdir(db_location)
+
+    def tracker_store_testing_stop(self):
         """
         Stops a running tracker-store
         """
         assert self.store
-        self.store.stop ()
-
+        self.store.stop()
 
-    def tracker_miner_fs_testing_start (self, confdir=None):
+    def tracker_miner_fs_testing_start(self, confdir=None):
         """
         Stops any previous instance of the store and miner, calls set_up_environment,
         and starts a new instance of the store and miner-fs
         """
-        self.set_up_environment (confdir, None)
+        self.set_up_environment(confdir, None)
 
         # Start also the store. DBus autoactivation ignores the env variables.
-        self.store = helpers.StoreHelper ()
-        self.store.start ()
+        self.store = helpers.StoreHelper()
+        self.store.start()
 
-        self.extractor = helpers.ExtractorHelper ()
-        self.extractor.start ()
+        self.extractor = helpers.ExtractorHelper()
+        self.extractor.start()
 
-        self.miner_fs = helpers.MinerFsHelper ()
-        self.miner_fs.start ()
+        self.miner_fs = helpers.MinerFsHelper()
+        self.miner_fs.start()
 
-    def tracker_miner_fs_testing_stop (self):
+    def tracker_miner_fs_testing_stop(self):
         """
         Stops the extractor, miner-fs and store running
         """
-        self.extractor.stop ()
-        self.miner_fs.stop ()
-        self.store.stop ()
+        self.extractor.stop()
+        self.miner_fs.stop()
+        self.store.stop()
 
-    def tracker_writeback_testing_start (self, confdir=None):
+    def tracker_writeback_testing_start(self, confdir=None):
         # Start the miner-fs (and store) and then the writeback process
-        self.tracker_miner_fs_testing_start (confdir)
-        self.writeback = helpers.WritebackHelper ()
-        self.writeback.start ()
+        self.tracker_miner_fs_testing_start(confdir)
+        self.writeback = helpers.WritebackHelper()
+        self.writeback.start()
 
-    def tracker_writeback_testing_stop (self):
+    def tracker_writeback_testing_stop(self):
         # Tracker write must have been started before
-        self.writeback.stop ()
-        self.tracker_miner_fs_testing_stop ()
+        self.writeback.stop()
+        self.tracker_miner_fs_testing_stop()
 
-    def tracker_all_testing_start (self, confdir=None):
+    def tracker_all_testing_start(self, confdir=None):
         # This will start all miner-fs, store and writeback
-        self.tracker_writeback_testing_start (confdir)
+        self.tracker_writeback_testing_start(confdir)
 
-    def tracker_all_testing_stop (self):
+    def tracker_all_testing_stop(self):
         # This will stop all miner-fs, store and writeback
-        self.tracker_writeback_testing_stop ()
+        self.tracker_writeback_testing_stop()
 
-    def __recreate_directory (self, directory):
-        if (os.path.exists (directory)):
-            shutil.rmtree (directory)
-        os.makedirs (directory)
+    def __recreate_directory(self, directory):
+        if (os.path.exists(directory)):
+            shutil.rmtree(directory)
+        os.makedirs(directory)
 
 
 if __name__ == "__main__":
-    import gtk, glib, time
+    import gtk
+    import glib
+    import time
 
-    def destroy_the_world (a):
-        a.tracker_store_testing_stop ()
+    def destroy_the_world(a):
+        a.tracker_store_testing_stop()
         print "   stopped"
         Gtk.main_quit()
 
     print "-- Starting store --"
-    a = TrackerSystemAbstraction ()
-    a.tracker_store_testing_start ()
+    a = TrackerSystemAbstraction()
+    a.tracker_store_testing_start()
     print "   started, waiting 5 sec. to stop it"
-    GLib.timeout_add_seconds (5, destroy_the_world, a)
-    Gtk.main ()
+    GLib.timeout_add_seconds(5, destroy_the_world, a)
+    Gtk.main()
 
     print "-- Starting miner-fs --"
-    b = TrackerMinerFsLifeCycle ()
-    b.start ()
+    b = TrackerMinerFsLifeCycle()
+    b.start()
     print "  started, waiting 3 secs. to stop it"
-    time.sleep (3)
-    b.stop ()
+    time.sleep(3)
+    b.stop()
     print "  stopped"
diff --git a/tests/functional-tests/common/utils/writebacktest.py 
b/tests/functional-tests/common/utils/writebacktest.py
index 63c3ef7..dd96c4f 100644
--- a/tests/functional-tests/common/utils/writebacktest.py
+++ b/tests/functional-tests/common/utils/writebacktest.py
@@ -32,7 +32,7 @@ TEST_FILE_JPEG = "writeback-test-1.jpeg"
 TEST_FILE_TIFF = "writeback-test-2.tif"
 TEST_FILE_PNG = "writeback-test-4.png"
 
-WRITEBACK_TMP_DIR = os.path.join (cfg.TEST_MONITORED_TMP_DIR, "writeback")
+WRITEBACK_TMP_DIR = os.path.join(cfg.TEST_MONITORED_TMP_DIR, "writeback")
 
 index_dirs = [WRITEBACK_TMP_DIR]
 CONF_OPTIONS = {
@@ -45,81 +45,83 @@ CONF_OPTIONS = {
 }
 
 
-def uri (filename):
-    return "file://" + os.path.join (WRITEBACK_TMP_DIR, filename)
+def uri(filename):
+    return "file://" + os.path.join(WRITEBACK_TMP_DIR, filename)
+
 
 class CommonTrackerWritebackTest (ut.TestCase):
+
     """
     Superclass to share methods. Shouldn't be run by itself.
     Start all processes including writeback, miner pointing to WRITEBACK_TMP_DIR
     """
-            
+
     @classmethod
-    def __prepare_directories (self):
-        if (os.path.exists (os.getcwd() + "/test-writeback-data")):
+    def __prepare_directories(self):
+        if (os.path.exists(os.getcwd() + "/test-writeback-data")):
             # Use local directory if available
             datadir = os.getcwd() + "/test-writeback-data"
         else:
-            datadir = os.path.join (cfg.DATADIR, "tracker-tests",
-                                    "test-writeback-data")
+            datadir = os.path.join(cfg.DATADIR, "tracker-tests",
+                                   "test-writeback-data")
 
         if not os.path.exists(WRITEBACK_TMP_DIR):
             os.makedirs(WRITEBACK_TMP_DIR)
         else:
             if not os.path.isdir(WRITEBACK_TMP_DIR):
-                raise Exception("%s exists already and is not a directory" % WRITEBACK_TMP_DIR)
+                raise Exception(
+                    "%s exists already and is not a directory" % WRITEBACK_TMP_DIR)
 
-        for testfile in [TEST_FILE_JPEG, TEST_FILE_PNG,TEST_FILE_TIFF]:
-            origin = os.path.join (datadir, testfile)
-            log ("Copying %s -> %s" % (origin, WRITEBACK_TMP_DIR))
-            shutil.copy (origin, WRITEBACK_TMP_DIR)
+        for testfile in [TEST_FILE_JPEG, TEST_FILE_PNG, TEST_FILE_TIFF]:
+            origin = os.path.join(datadir, testfile)
+            log("Copying %s -> %s" % (origin, WRITEBACK_TMP_DIR))
+            shutil.copy(origin, WRITEBACK_TMP_DIR)
 
+    @classmethod
+    def setUpClass(self):
+        # print "Starting the daemon in test mode"
+        self.__prepare_directories()
 
-    @classmethod 
-    def setUpClass (self):
-        #print "Starting the daemon in test mode"
-        self.__prepare_directories ()
-        
-        self.system = TrackerSystemAbstraction ()
+        self.system = TrackerSystemAbstraction()
 
-        self.system.tracker_writeback_testing_start (CONF_OPTIONS)
+        self.system.tracker_writeback_testing_start(CONF_OPTIONS)
 
         def await_resource_extraction(url):
             # Make sure a resource has been crawled by the FS miner and by
             # tracker-extract. The extractor adds nie:contentCreated for
             # image resources, so know once this property is set the
             # extraction is complete.
-            self.system.store.await_resource_inserted('nfo:Image', url=url, required_property='nfo:width')
+            self.system.store.await_resource_inserted(
+                'nfo:Image', url=url, required_property='nfo:width')
 
-        await_resource_extraction (self.get_test_filename_jpeg())
-        await_resource_extraction (self.get_test_filename_tiff())
-        await_resource_extraction (self.get_test_filename_png())
+        await_resource_extraction(self.get_test_filename_jpeg())
+        await_resource_extraction(self.get_test_filename_tiff())
+        await_resource_extraction(self.get_test_filename_png())
 
         # Returns when ready
-        log ("Ready to go!")
-        
+        log("Ready to go!")
+
     @classmethod
-    def tearDownClass (self):
-        #print "Stopping the daemon in test mode (Doing nothing now)"
-        self.system.tracker_writeback_testing_stop ()
-    
+    def tearDownClass(self):
+        # print "Stopping the daemon in test mode (Doing nothing now)"
+        self.system.tracker_writeback_testing_stop()
 
     @staticmethod
-    def get_test_filename_jpeg ():
-        return uri (TEST_FILE_JPEG)
+    def get_test_filename_jpeg():
+        return uri(TEST_FILE_JPEG)
 
     @staticmethod
-    def get_test_filename_tiff ():
-        return uri (TEST_FILE_TIFF)
+    def get_test_filename_tiff():
+        return uri(TEST_FILE_TIFF)
 
     @staticmethod
-    def get_test_filename_png ():
-        return uri (TEST_FILE_PNG)
+    def get_test_filename_png():
+        return uri(TEST_FILE_PNG)
 
-    def get_mtime (self, filename):
+    def get_mtime(self, filename):
         return os.stat(filename).st_mtime
 
-    def wait_for_file_change (self, filename, initial_mtime):
+    def wait_for_file_change(self, filename, initial_mtime):
         start = time.time()
         while time.time() < start + 5:
             mtime = os.stat(filename).st_mtime
diff --git a/tests/functional-tests/create-tests-aegis.py b/tests/functional-tests/create-tests-aegis.py
index 57a5e5e..84b1158 100755
--- a/tests/functional-tests/create-tests-aegis.py
+++ b/tests/functional-tests/create-tests-aegis.py
@@ -6,7 +6,9 @@ import imp
 
 from common.utils import configuration as cfg
 
-### This function comes from pydoc. Cool!
+# This function comes from pydoc. Cool!
+
+
 def importfile(path):
     """Import a Python source file or compiled file given its path."""
     magic = imp.get_magic()
@@ -35,12 +37,13 @@ HEADER = """
 FOOTER = """
 </aegis>"""
 
-def print_aegis_perm_request (filename):
-    module = importfile (filename)
+
+def print_aegis_perm_request(filename):
+    module = importfile(filename)
     if not module:
         return
 
-    install_path = os.path.join (cfg.DATADIR, "tracker-tests", filename)
+    install_path = os.path.join(cfg.DATADIR, "tracker-tests", filename)
 
     print "\n   <request>"
     print '      <credential name="TrackerReadAccess" />'
@@ -55,9 +58,9 @@ def print_aegis_perm_request (filename):
 
 if __name__ == "__main__":
 
-    if (len (sys.argv) < 2):
+    if (len(sys.argv) < 2):
         print >> sys.stderr, "pass .py tests as parameter"
-        sys.exit (-1)
+        sys.exit(-1)
     print HEADER
-    map (print_aegis_perm_request, sys.argv[1:])
+    map(print_aegis_perm_request, sys.argv[1:])
     print FOOTER
diff --git a/tests/functional-tests/create-tests-xml.py b/tests/functional-tests/create-tests-xml.py
index 5bbca2a..d3af63e 100755
--- a/tests/functional-tests/create-tests-xml.py
+++ b/tests/functional-tests/create-tests-xml.py
@@ -6,7 +6,9 @@ import imp
 
 from common.utils import configuration as cfg
 
-### This function comes from pydoc. Cool!
+# This function comes from pydoc. Cool!
+
+
 def importfile(path):
     """Import a Python source file or compiled file given its path."""
     magic = imp.get_magic()
@@ -56,31 +58,33 @@ else:
         </pre_steps>
         """
 
-def __get_doc (obj):
+
+def __get_doc(obj):
     if obj.__doc__:
-        return obj.__doc__.strip ()
+        return obj.__doc__.strip()
     else:
         return "FIXME description here"
 
-def print_as_xml (filename):
 
-    module = importfile (filename)
+def print_as_xml(filename):
+
+    module = importfile(filename)
     if not module:
         return
-    
+
     print "\n    <set name=\"%s\">" % (module.__name__)
-    print "        <description>%s</description>" % (__get_doc (module))
+    print "        <description>%s</description>" % (__get_doc(module))
     print PRE_STEPS
-    for name, obj in inspect.getmembers (module):
-        if name.startswith ("Common") or name.endswith ("Template"):
+    for name, obj in inspect.getmembers(module):
+        if name.startswith("Common") or name.endswith("Template"):
             continue
-        
-        if (inspect.isclass (obj)
-            and obj.__module__ == filename[:-3]):
-            script = os.path.join (cfg.DATADIR, "tracker-tests", filename)
-            print  TEST_CASE_TMPL % (name,
-                                     __get_doc (obj),
-                                     script + " " + name)
+
+        if (inspect.isclass(obj)
+                and obj.__module__ == filename[:-3]):
+            script = os.path.join(cfg.DATADIR, "tracker-tests", filename)
+            print TEST_CASE_TMPL % (name,
+                                    __get_doc(obj),
+                                    script + " " + name)
 
     print """        <environments>
             <scratchbox>true</scratchbox>
@@ -92,18 +96,18 @@ def print_as_xml (filename):
     #
     # First time a module is loaded, __file__ is the .py
     #  once the file is compiled, __file__ is .pyc
-    if module.__file__.endswith (".py"):
+    if module.__file__.endswith(".py"):
         unlink = module.__file__ + "c"
     else:
         unlink = module.__file__
-    os.unlink (unlink)
+    os.unlink(unlink)
 
 
 if __name__ == "__main__":
 
-    if (len (sys.argv) < 2):
+    if (len(sys.argv) < 2):
         print >> sys.stderr, "pass .py tests as parameter"
-        sys.exit (-1)
+        sys.exit(-1)
     print HEADER
-    map (print_as_xml, sys.argv[1:])
+    map(print_as_xml, sys.argv[1:])
     print FOOTER
diff --git a/tests/functional-tests/fts-tc.py b/tests/functional-tests/fts-tc.py
index 4e36e7a..b0792ad 100644
--- a/tests/functional-tests/fts-tc.py
+++ b/tests/functional-tests/fts-tc.py
@@ -20,7 +20,9 @@
 # Boston, MA  02110-1301, USA.
 
 
-import sys,os,dbus
+import sys
+import os
+import dbus
 import unittest
 import time
 import random
@@ -29,7 +31,7 @@ import configuration
 from dbus.mainloop.glib import DBusGMainLoop
 from gi.repository import GObject
 import shutil
-from subprocess import Popen,PIPE
+from subprocess import Popen, PIPE
 
 TRACKER = 'org.freedesktop.Tracker1'
 
@@ -37,21 +39,21 @@ TRACKER_OBJ = '/org/freedesktop/Tracker1/Resources'
 RESOURCES_IFACE = "org.freedesktop.Tracker1.Resources"
 
 
-MINER="org.freedesktop.Tracker1.Miner.Files"
-MINER_OBJ="/org/freedesktop/Tracker1/Miner/Files"
-MINER_IFACE="org.freedesktop.Tracker1.Miner"
+MINER = "org.freedesktop.Tracker1.Miner.Files"
+MINER_OBJ = "/org/freedesktop/Tracker1/Miner/Files"
+MINER_IFACE = "org.freedesktop.Tracker1.Miner"
 
 TEST_IMAGE = "test-image-1.jpg"
 TEST_MUSIC = "tracker-mp3-test.mp3"
 TEST_VIDEO = "test-video.mp4"
-TEST_TEXT =  "test-text-01.txt"
+TEST_TEXT = "test-text-01.txt"
 TEST_TEXT_02 = "test-text-02.txt"
 TEST_TEXT_03 = "test-text-03.txt"
 TEXT_DB = "/home/user/.cache/tracker/fulltext.db"
 
-MOUNT_DUMMY = '/root/dummy/'                                       
-MYDOCS = configuration.MYDOCS                                     
-MOUNT_PARTITION = '/dev/mmcblk0p1' 
+MOUNT_DUMMY = '/root/dummy/'
+MYDOCS = configuration.MYDOCS
+MOUNT_PARTITION = '/dev/mmcblk0p1'
 
 """create two test directories in miner monitored path  """
 target = configuration.check_target()
@@ -66,32 +68,33 @@ if target == configuration.MAEMO6_HW:
     SRC_IMAGE_DIR = configuration.TEST_DATA_IMAGES
     SRC_MUSIC_DIR = configuration.TEST_DATA_MUSIC
     SRC_VIDEO_DIR = configuration.TEST_DATA_VIDEO
-    SRC_TEXT_DIR  = configuration.TEST_DATA_TEXT
-    MYDOCS_SUB =  configuration.MYDOCS + 's1/s2/s3/s4/s5/'
+    SRC_TEXT_DIR = configuration.TEST_DATA_TEXT
+    MYDOCS_SUB = configuration.MYDOCS + 's1/s2/s3/s4/s5/'
 
 elif target == configuration.DESKTOP:
     """target is DESKTOP """
     TEST_DIR_1 = os.path.expanduser("~") + '/' + "tracker_test_op_1"
     TEST_DIR_2 = os.path.expanduser("~") + '/' + "tracker_test_op_2"
-    TEST_DIR_3 = os.path.expanduser("~") + '/' + "core-dumps/" + "tracker_test_op_3"
+    TEST_DIR_3 = os.path.expanduser(
+        "~") + '/' + "core-dumps/" + "tracker_test_op_3"
     SRC_IMAGE_DIR = configuration.VCS_TEST_DATA_IMAGES
     SRC_MUSIC_DIR = configuration.VCS_TEST_DATA_MUSIC
     SRC_VIDEO_DIR = configuration.VCS_TEST_DATA_VIDEO
-    MYDOCS_SUB =  os.path.expanduser("~") + 's1/s2/s3/s4/s5/'
+    MYDOCS_SUB = os.path.expanduser("~") + 's1/s2/s3/s4/s5/'
 
-commands.getoutput('mkdir ' + TEST_DIR_1)           
-                                                                         
-"""creating hidden directory """                                                                             
                
-HIDDEN_DIR = configuration.MYDOCS + '.test_hidden' 
-commands.getoutput('mkdir -p  ' + HIDDEN_DIR)       
-                                                                                                             
                
-tdcpy = configuration.TDCopy()                                                                               
                
-tdcpy.set_test_data(target)   
+commands.getoutput('mkdir ' + TEST_DIR_1)
+
+"""creating hidden directory """
+HIDDEN_DIR = configuration.MYDOCS + '.test_hidden'
+commands.getoutput('mkdir -p  ' + HIDDEN_DIR)
+
+tdcpy = configuration.TDCopy()
+tdcpy.set_test_data(target)
 
 """creating text file"""
 
-print SRC_TEXT_DIR+TEST_TEXT
-f1=open(SRC_TEXT_DIR+TEST_TEXT,'w')
+print SRC_TEXT_DIR + TEST_TEXT
+f1 = open(SRC_TEXT_DIR + TEST_TEXT, 'w')
 
 lines = "The Content framework subsystem provides data and metadata storage and retrieval for the platform. 
Its stack contains\
     * A store of information (tracker) based on the triplet-store concept including a specific query 
language (SparQL, W3C standard)\
@@ -103,609 +106,657 @@ lines = "The Content framework subsystem provides data and metadata storage and
 f1.write(lines)
 f1.close()
 
+
 class TestUpdate (unittest.TestCase):
 
     def setUp(self):
         bus = dbus.SessionBus()
         tracker = bus.get_object(TRACKER, TRACKER_OBJ)
-        self.resources = dbus.Interface (tracker,
-                                         dbus_interface=RESOURCES_IFACE)
-
-        miner_obj= bus.get_object(MINER,MINER_OBJ)
-        self.miner=dbus.Interface (miner_obj,dbus_interface=MINER_IFACE)
+        self.resources = dbus.Interface(tracker,
+                                        dbus_interface=RESOURCES_IFACE)
 
+        miner_obj = bus.get_object(MINER, MINER_OBJ)
+        self.miner = dbus.Interface(miner_obj, dbus_interface=MINER_IFACE)
 
         self.loop = GObject.MainLoop()
         self.dbus_loop = DBusGMainLoop(set_as_default=True)
-        self.bus = dbus.SessionBus (self.dbus_loop)
+        self.bus = dbus.SessionBus(self.dbus_loop)
 
-        self.bus.add_signal_receiver (self.miner_processing_cb,
-                                      signal_name="Progress",
-                                      dbus_interface=MINER_IFACE,
-                                      path=MINER_OBJ)
+        self.bus.add_signal_receiver(self.miner_processing_cb,
+                                     signal_name="Progress",
+                                     dbus_interface=MINER_IFACE,
+                                     path=MINER_OBJ)
 
-    def miner_processing_cb (self,status,handle):
+    def miner_processing_cb(self, status, handle):
         print "GOT PROGRESS FROM MINER"
 
-        if (status == "Processing Files") :
+        if (status == "Processing Files"):
             print "Miner started"
-        elif (status == "Idle" ):
+        elif (status == "Idle"):
             """if the string is "Idle" quit the loop """
             print "Miner Idle"
             self.loop.quit()
-        else :
+        else:
             print "No specific Signal"
 
-    def wait_for_fileop (self, cmd, src, dst=''):
+    def wait_for_fileop(self, cmd, src, dst=''):
         if (cmd == "rm"):
             os.remove(src)
         elif (cmd == "cp"):
             shutil.copy2(src, dst)
         else:
-            shutil.move(src,dst)
+            shutil.move(src, dst)
         self.loop.run()
 
-    def edit_text (self, file,word) :                                                                        
  
-        test_file =  file                                             
-        f=open(test_file,"w")                                                                             
-        f.writelines(word)                                                                               
-        f.close()    
+    def edit_text(self, file, word):
+        test_file = file
+        f = open(test_file, "w")
+        f.writelines(word)
+        f.close()
+
+
+class basic (TestUpdate):
 
-class basic (TestUpdate) :
+    def test_text_01(self):
+        """ To check if tracker search for a long word gives results """
 
-      def test_text_01 (self) :
-       
-       """ To check if tracker search for a long word gives results """
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
-                    
-        """copy the test files """          
-       time.sleep(1)
+        """copy the test files """
+        time.sleep(1)
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
 
-       word = "fsfsfsdfskfweeqrewqkmnbbvkdasdjefjewriqjfnc"
+        word = "fsfsfsdfskfweeqrewqkmnbbvkdasdjefjewriqjfnc"
 
-       self.edit_text(file_path,word)
+        self.edit_text(file_path, word)
 
-       self.loop.run()
+        self.loop.run()
 
-       result=commands.getoutput ('tracker search  ' + word + '|grep  '+file_path+ '|wc -l ')
-       print result
-       self.assert_(result=='1','search for the word is not giving results')
-       os.remove(file_path)
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
+        os.remove(file_path)
 
-      def test_text_02 (self) :
+    def test_text_02(self):
+        """ To check if tracker search for a word gives results """
 
-       """ To check if tracker search for a word gives results """
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
-                    
-        """copy the test files """          
+        """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-       word = "this is a meego file "
-       search_word = "meego"
+        word = "this is a meego file "
+        search_word = "meego"
 
-       self.edit_text(file_path,word)
-       self.loop.run()
+        self.edit_text(file_path, word)
+        self.loop.run()
+
+        result = commands.getoutput(
+            'tracker search  ' + search_word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
 
-       result=commands.getoutput ('tracker search  ' + search_word+ '|grep  '+file_path+ '|wc -l ' )
-       print result
-       self.assert_(result=='1','search for the word is not giving results')
+        os.remove(file_path)
 
-       os.remove (file_path)
+    def test_text_03(self):
+        """ To check if tracker search for a non existing word gives results """
 
-      def test_text_03 (self) :
-       
-       """ To check if tracker search for a non existing word gives results """
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
-                    
-        """copy the test files """          
+        """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-       word = "trick "
-       search_word = "trikc"
+        word = "trick "
+        search_word = "trikc"
 
-       self.edit_text(file_path,word)
-       self.loop.run()
+        self.edit_text(file_path, word)
+        self.loop.run()
 
-       result=commands.getoutput ('tracker search  ' + search_word + '|grep  '+file_path+ '|wc -l ')
-       print result
-       self.assert_(result=='0','search for the non existing  word is giving results')
+        result = commands.getoutput(
+            'tracker search  ' + search_word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '0', 'search for the non existing  word is giving results')
 
-       os.remove (file_path)
+        os.remove(file_path)
 
-      def test_text_04 (self) :
+    def test_text_04(self):
+        """ To check if tracker search for a word gives results (File contains same word multiple times)"""
 
-       """ To check if tracker search for a word gives results (File contains same word multiple times)"""  
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
-                    
-        """copy the test files """          
+        """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-       sentence= "this is a crazy day. i m feeling crazy. rnt u crazy. everyone is crazy."
-       search_word = "crazy"
+        sentence = "this is a crazy day. i m feeling crazy. rnt u crazy. everyone is crazy."
+        search_word = "crazy"
 
-       self.edit_text(file_path,sentence)
-       self.loop.run()
+        self.edit_text(file_path, sentence)
+        self.loop.run()
 
-       result=commands.getoutput ('tracker search  ' + search_word + '|grep  '+file_path+ '|wc -l ')
-       print result
-       self.assert_(result=='1','search for the word  not giving results')
+        result = commands.getoutput(
+            'tracker search  ' + search_word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(result == '1', 'search for the word  not giving results')
 
-       os.remove (file_path)
+        os.remove(file_path)
 
-      def test_text_05 (self) :
+    def test_text_05(self):
+        """ To check if tracker search for sentence gives results """
 
-       """ To check if tracker search for sentence gives results """  
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
-                    
-        """copy the test files """          
+        """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-       sentence= " 'this is a lazy fox. '"
+        sentence = " 'this is a lazy fox. '"
 
-       self.edit_text(file_path,sentence)
-       self.loop.run()
+        self.edit_text(file_path, sentence)
+        self.loop.run()
 
-       result=commands.getoutput ('tracker search  ' +sentence+ '|grep  '+file_path+ '|wc -l ')
-       print result
-       self.assert_(result=='1','search for the sentence is not giving results')
+        result = commands.getoutput(
+            'tracker search  ' + sentence + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-       os.remove (file_path)
+        os.remove(file_path)
 
-    
-      def test_text_06 (self) :
+    def test_text_06(self):
+        """ To check if tracker search for part of sentenece gives results """
 
-       """ To check if tracker search for part of sentenece gives results """  
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
-                    
-        """copy the test files """          
+        """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-       sentence= " 'this is a lazy fox. '"
-       search_sentence =  " 'this is a lazy  '"
+        sentence = " 'this is a lazy fox. '"
+        search_sentence = " 'this is a lazy  '"
 
-       self.edit_text(file_path,sentence)
-       self.loop.run()
+        self.edit_text(file_path, sentence)
+        self.loop.run()
 
-       result=commands.getoutput ('tracker search  ' + search_sentence+ '|grep  '+file_path+ '|wc -l ')
-       print result
-       self.assert_(result=='1','search for the sentence is not giving results')
+        result = commands.getoutput(
+            'tracker search  ' + search_sentence + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-       os.remove (file_path)
-    
+        os.remove(file_path)
 
-      def test_text_07 (self) :
+    def test_text_07(self):
+        """ To check if tracker search for  sentenece gives results """
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-       """ To check if tracker search for  sentenece gives results """
-        file_path =  configuration.MYDOCS + TEST_TEXT
-                    
-        """copy the test files """          
+        """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-       sentence= " 'summer.time '"
+        sentence = " 'summer.time '"
 
-       self.edit_text(file_path,sentence)
-       self.loop.run()
+        self.edit_text(file_path, sentence)
+        self.loop.run()
 
-       result=commands.getoutput ('tracker search  ' + sentence+ '|grep  '+file_path+ '|wc -l ')
-       print result
-       self.assert_(result=='1','search for the sentence is not giving results')
+        result = commands.getoutput(
+            'tracker search  ' + sentence + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-       os.remove (file_path)
+        os.remove(file_path)
 
+    def test_text_08(self):
+        """ To check if tracker search for  sentenece gives results """
 
-      def test_text_08 (self) :
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-       """ To check if tracker search for  sentenece gives results """
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        sentence = " 'summer.time '"
+        search_word = '.'
 
-       file_path =  configuration.MYDOCS + TEST_TEXT                   
-                                                                        
-        """copy the test files """                                      
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path) 
-        sentence= " 'summer.time '"                                     
-        search_word = '.'                                           
+        self.edit_text(file_path, sentence)
+        self.loop.run()
 
-        self.edit_text(file_path,sentence)                              
-        self.loop.run()                                                 
+        result = commands.getoutput(
+            'tracker search  ' + search_word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '0', 'search for the word is not giving results')
 
-        result=commands.getoutput ('tracker search  ' + search_word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                    
-       self.assert_(result=='0','search for the word is not giving results')
+        os.remove(file_path)
 
-        os.remove (file_path)
+    def test_text_09(self):
+        """ To check if tracker search for a word (combination of alphabets and numbers)  gives results """
 
-      def test_text_09 (self) :
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-       """ To check if tracker search for a word (combination of alphabets and numbers)  gives results """ 
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        word = "abc123"
 
-       file_path =  configuration.MYDOCS + TEST_TEXT                   
-                                                                        
-        """copy the test files """                                      
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path) 
-        word = "abc123"                                     
+        self.edit_text(file_path, word)
+        self.loop.run()
 
-        self.edit_text(file_path,word)                              
-        self.loop.run()                                                 
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
 
-        result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                    
-       self.assert_(result=='1','search for the word is not giving results')
+        os.remove(file_path)
 
-        os.remove (file_path)
+    def test_text_10(self):
+        """ To check if tracker search for a number (from a combination of alphabets and numbers)  gives 
results """
 
-      def test_text_10 (self) :
-       """ To check if tracker search for a number (from a combination of alphabets and numbers)  gives 
results """
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-       file_path =  configuration.MYDOCS + TEST_TEXT                   
-                                                                        
-        """copy the test files """                                      
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path) 
-        sentence = "abc 123"                                     
-       search_word = "123"
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        sentence = "abc 123"
+        search_word = "123"
 
-        self.edit_text(file_path,search_word)                              
-        self.loop.run()                                                 
+        self.edit_text(file_path, search_word)
+        self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' + search_word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                    
-       self.assert_(result=='0','search for the word is not giving results')
+        result = commands.getoutput(
+            'tracker search  ' + search_word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '0', 'search for the word is not giving results')
 
-        os.remove (file_path)
+        os.remove(file_path)
 
-      def test_text_12 (self) :
+    def test_text_12(self):
+        """ To check if tracker search for a word(file which contains this file is removed) gives result"""
 
-       """ To check if tracker search for a word(file which contains this file is removed) gives result"""
+        file_path = configuration.MYDOCS + TEST_TEXT
 
-       file_path =  configuration.MYDOCS + TEST_TEXT                   
-                                                                        
-        """copy the test files """                                      
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path) 
-       word = "abc"
-        self.edit_text(file_path,word)                              
-        self.loop.run()                                                 
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        word = "abc"
+        self.edit_text(file_path, word)
+        self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                    
-       self.assert_(result=='1','search for the word is not giving results')
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
 
-        self.wait_for_fileop('rm' , file_path)
+        self.wait_for_fileop('rm', file_path)
 
-        result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-       
-        result1=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path)
-       print result1
-        print result                                                    
-       self.assert_(result=='0','search for the non existing files giving results')
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
 
-      def test_text_13 (self) :                                                                              
                
-                                                                                                             
                
+        result1 = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path)
+        print result1
+        print result
+        self.assert_(
+            result == '0', 'search for the non existing files giving results')
+
+    def test_text_13(self):
         """ To check if tracker search for a word in different text files with similar 3 letter words and 
search for the word gives result """
-                                                                                                             
                
-        file_path =  configuration.MYDOCS + TEST_TEXT                                                        
                
-        file_path_02 =  configuration.MYDOCS + TEST_TEXT_02                                                  
                
-        file_path_03 =  configuration.MYDOCS + TEST_TEXT_03                                                  
                
-                                                                                                             
                
-        """copy the test files """                                                                           
                
-
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)                                      
                
-        sentence= " 'feet '"                                                                                 
                
-        self.edit_text(file_path,sentence)                                                                   
                
-        self.loop.run()                                                                                      
                
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path_02)                                   
                
-        sentence= " 'feel '"                                                                                 
                
-        self.edit_text(file_path_02,sentence)                                                                
                
-        self.loop.run()                                                                                      
                
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path_03)                                   
                
-        sentence= " 'fee '"                                                                                  
                
-        self.edit_text(file_path_03,sentence)                                                                
                
-        self.loop.run()                                                                                      
                
-                                                                                                             
                
-        search_word = 'feet'                                                                                 
                
-        result=commands.getoutput ('tracker search  ' + search_word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                                                         
                
-        self.assert_(result=='1','search for the word is not giving results')                                
                
-                                                                                                             
                
-        os.remove (file_path)                                                                                
                
-        os.remove (file_path_02)                                                                             
                
-        os.remove (file_path_03)      
-
-      def test_text_14 (self) :                                                                              
                
-                                                                                                             
                
+
+        file_path = configuration.MYDOCS + TEST_TEXT
+        file_path_02 = configuration.MYDOCS + TEST_TEXT_02
+        file_path_03 = configuration.MYDOCS + TEST_TEXT_03
+
+        """copy the test files """
+
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        sentence = " 'feet '"
+        self.edit_text(file_path, sentence)
+        self.loop.run()
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path_02)
+        sentence = " 'feel '"
+        self.edit_text(file_path_02, sentence)
+        self.loop.run()
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path_03)
+        sentence = " 'fee '"
+        self.edit_text(file_path_03, sentence)
+        self.loop.run()
+
+        search_word = 'feet'
+        result = commands.getoutput(
+            'tracker search  ' + search_word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
+
+        os.remove(file_path)
+        os.remove(file_path_02)
+        os.remove(file_path_03)
+
+    def test_text_14(self):
         """ To check if tracker search for a word in unwatched directory and gives result"""
-                                                                                                             
                
-        file_path =  "/root/" + TEST_TEXT                                                                    
                
-                                                                                                             
                
-        """copy the test files """                                                                           
                
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)                                      
                
-        word = "framework"                                                                                   
                
-                                                                                                             
                
-        result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                                                         
                
-        self.assert_(result=='0','search for the word is not giving results')                                
                
-                                                                                                             
                
-        os.remove (file_path)                                                                                
                
-                                                                                                             
                
-      def test_text_15 (self) :                                                                              
                
-                                                                                                             
                
+
+        file_path = "/root/" + TEST_TEXT
+
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        word = "framework"
+
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '0', 'search for the word is not giving results')
+
+        os.remove(file_path)
+
+    def test_text_15(self):
         """ To check if tracker search for a word(file which is copied from no watch directories to watched 
directories) gives results """
-                                                                                                             
                
-        FILE_NAME =  "/root/" + TEST_TEXT                                                                    
                
-        file_path =  configuration.MYDOCS + TEST_TEXT                                                        
                
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)                                      
                
-        word= "The Content framework subsystem provides data"                                                
                
-                                                                                                             
                
-        self.edit_text(FILE_NAME,word)                                                                       
                
-                                                                                                             
                
-        """copy the test files """                                                                           
                
-        self.wait_for_fileop('cp', FILE_NAME, file_path)                                                     
                
-                                                                                                             
                
-        word = "framework"                                                                                   
                
-                                                                                                             
                
-        result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                                                         
                
-        self.assert_(result=='1','search for the word is giving results')                                    
                
-                                                                                                             
                
-        os.remove (file_path)                               
-
-      def test_text_16 (self) :                                                                              
                
-                                                                                                             
                
+
+        FILE_NAME = "/root/" + TEST_TEXT
+        file_path = configuration.MYDOCS + TEST_TEXT
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        word = "The Content framework subsystem provides data"
+
+        self.edit_text(FILE_NAME, word)
+
+        """copy the test files """
+        self.wait_for_fileop('cp', FILE_NAME, file_path)
+
+        word = "framework"
+
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(result == '1', 'search for the word is giving results')
+
+        os.remove(file_path)
+
+    def test_text_16(self):
         """ To check if tracker search for a word(file which is in hidden directory) gives result"""
-        file_path =  HIDDEN_DIR+TEST_TEXT                                                                    
               
-                                                                                                             
                
-        """copy the test files """                                                                           
                
-        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT , file_path)                                     
                
-        word = "framework"                                                                                   
                
-                                                                                                             
                
-        result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                                                         
                
-        self.assert_(result=='0','search for the word is giving results')                                    
                
-                                                                                                             
                
-        os.remove (file_path)       
-
-
-class stopwords (TestUpdate):  
-     
-      def test_sw_01 (self) :
-       file_path =  configuration.MYDOCS + TEST_TEXT                   
-                                                                        
-        """copy the test files """                                      
-       test_file='/usr/share/tracker/stop-words/stopwords.en'
-       f1=open(test_file,'r')
-       lines = f1.readlines()
-       f1.close()
-       list = []
-       for word in lines:
-               result=Popen(['tracker-search',word],stdout=PIPE).stdout.read().split()
-               if result[1] == '1':
-                       list.append(word)
-       self.assert_(len(list) == 0 , 'tracker search is giving results for stopwords %s '%list)
-
-      def test_sw_02 (self) :
-       
-        word= "AND"                                     
-       result=Popen(['tracker-search',word],stdout=PIPE).stdout.read().split()             
-        self.assert_(result[1] == '0' , 'tracker search is giving results for stopwords')
-
-class db_text ( TestUpdate ) :
-
-      def test_db_01 (self):
-       file_path =  configuration.MYDOCS + TEST_TEXT           
-       self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path) 
-        word = "summer"                                     
-        self.edit_text(file_path,word)
-       time.sleep(2)
-       result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                                                         
                
-        self.assert_(result=='1','search for the word is not giving results')   
-       commands.getoutput('cp  '+SRC_IMAGE_DIR+TEST_IMAGE+ '  '+TEXT_DB)
-       time.sleep(1)
-       result=commands.getoutput ('tracker search  ' + word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                                                         
                
-        self.assert_(result=='1','search for the word is not giving results')   
-
-
-
-
-class msm (TestUpdate) :
-       
-      def test_msm_01 (self) :
-
-       """ To check if tracker search gives results for the word search which is copied in mass storage mode 
"""
-       
-       commands.getoutput ('umount  ' + MOUNT_PARTITION )
-       commands.getoutput ('mount -t vfat -o rw ' +MOUNT_PARTITION+'  '+MOUNT_DUMMY)
-
-       dummy_path =  MOUNT_DUMMY + TEST_TEXT           
-       file_path  =   configuration.MYDOCS+TEST_TEXT 
-       commands.getoutput('cp   '+SRC_TEXT_DIR + TEST_TEXT +'  '+ dummy_path) 
-
-       commands.getoutput ('umount  ' + MOUNT_PARTITION )
-       commands.getoutput ('mount -t vfat -o rw ' +MOUNT_PARTITION+'  '+MYDOCS)
-       time.sleep(10)
-        search_word = "information"                                     
-       result=commands.getoutput ('tracker search  ' + search_word+ '|grep  '+file_path+ '|wc -l ')
-        print result                                                                                         
                
-        self.assert_(result=='1','search for the word is not giving results')   
-
-class specialchar (TestUpdate) :
-       
-      def test_sc_01 (self):
-       """ To check if tracker search for non English characters """
-
-        file_path =  configuration.MYDOCS + TEST_TEXT
+        file_path = HIDDEN_DIR + TEST_TEXT
+
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        word = "framework"
+
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(result == '0', 'search for the word is giving results')
+
+        os.remove(file_path)
+
+
+class stopwords (TestUpdate):
+
+    def test_sw_01(self):
+        file_path = configuration.MYDOCS + TEST_TEXT
 
         """copy the test files """
+        test_file = '/usr/share/tracker/stop-words/stopwords.en'
+        f1 = open(test_file, 'r')
+        lines = f1.readlines()
+        f1.close()
+        list = []
+        for word in lines:
+            result = Popen(
+                ['tracker-search', word], stdout=PIPE).stdout.read().split()
+            if result[1] == '1':
+                list.append(word)
+        self.assert_(
+            len(list) == 0, 'tracker search is giving results for stopwords %s ' % list)
+
+    def test_sw_02(self):
+
+        word = "AND"
+        result = Popen(
+            ['tracker-search', word], stdout=PIPE).stdout.read().split()
+        self.assert_(
+            result[1] == '0', 'tracker search is giving results for stopwords')
+
+
+class db_text (TestUpdate):
+
+    def test_db_01(self):
+        file_path = configuration.MYDOCS + TEST_TEXT
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-        sentence= " 'andaübÃc' Ã"
+        word = "summer"
+        self.edit_text(file_path, word)
+        time.sleep(2)
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
+        commands.getoutput(
+            'cp  ' + SRC_IMAGE_DIR + TEST_IMAGE + '  ' + TEXT_DB)
+        time.sleep(1)
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
+
+
+class msm (TestUpdate):
+
+    def test_msm_01(self):
+        """ To check if tracker search gives results for the word search which is copied in mass storage 
mode """
+
+        commands.getoutput('umount  ' + MOUNT_PARTITION)
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + '  ' + MOUNT_DUMMY)
+
+        dummy_path = MOUNT_DUMMY + TEST_TEXT
+        file_path = configuration.MYDOCS + TEST_TEXT
+        commands.getoutput(
+            'cp   ' + SRC_TEXT_DIR + TEST_TEXT + '  ' + dummy_path)
+
+        commands.getoutput('umount  ' + MOUNT_PARTITION)
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + '  ' + MYDOCS)
+        time.sleep(10)
+        search_word = "information"
+        result = commands.getoutput(
+            'tracker search  ' + search_word + '|grep  ' + file_path + '|wc -l ')
+        print result
+        self.assert_(
+            result == '1', 'search for the word is not giving results')
 
-        self.edit_text(file_path,sentence)
+
+class specialchar (TestUpdate):
+
+    def test_sc_01(self):
+        """ To check if tracker search for non English characters """
+
+        file_path = configuration.MYDOCS + TEST_TEXT
+
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
+        sentence = " 'andaübÃc' Ã"
+
+        self.edit_text(file_path, sentence)
         self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' +sentence+ '|grep  '+file_path+ '|wc -l ')
+        result = commands.getoutput(
+            'tracker search  ' + sentence + '|grep  ' + file_path + '|wc -l ')
         print result
-        self.assert_(result=='1','search for the sentence is not giving results')
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-        os.remove (file_path)
+        os.remove(file_path)
 
-      def test_cs_02 (self):
+    def test_cs_02(self):
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
+        file_path = configuration.MYDOCS + TEST_TEXT
 
         """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
 
-       sentence = "выйшщхжюб"
+        sentence = "выйшщхжюб"
 
-        self.edit_text(file_path,sentence)
+        self.edit_text(file_path, sentence)
 
         self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' +sentence+ '|grep  '+file_path+ '|wc -l ')
+        result = commands.getoutput(
+            'tracker search  ' + sentence + '|grep  ' + file_path + '|wc -l ')
         print result
-        self.assert_(result=='1','search for the sentence is not giving results')
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-        os.remove (file_path)
+        os.remove(file_path)
 
-      def test_cs_03 (self) :
-        
-       """ To check if tracker search for non English characters """
+    def test_cs_03(self):
+        """ To check if tracker search for non English characters """
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
+        file_path = configuration.MYDOCS + TEST_TEXT
 
         """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
         sentence = " 'кÑ<80>аÑ<81>Ð' "
 
-        self.edit_text(file_path,sentence)
+        self.edit_text(file_path, sentence)
         self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' +sentence+ '|grep  '+file_path+ '|wc -l ')
+        result = commands.getoutput(
+            'tracker search  ' + sentence + '|grep  ' + file_path + '|wc -l ')
         print result
-        self.assert_(result=='1','search for the sentence is not giving results')
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-        os.remove (file_path)
+        os.remove(file_path)
 
-      def test_cs_04 (self) :
+    def test_cs_04(self):
+        """ To check if tracker search for non English  accented characters """
 
-       """ To check if tracker search for non English  accented characters """
-
-        file_path =  configuration.MYDOCS + TEST_TEXT
+        file_path = configuration.MYDOCS + TEST_TEXT
 
         """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
         sentence = " 'aüéâbc äàç xêëèïyz üéâäàçêëèï and aübêcëdçeï' "
 
-        self.edit_text(file_path,sentence)
+        self.edit_text(file_path, sentence)
         self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' +sentence+ '|grep  '+file_path+ '|wc -l ')
+        result = commands.getoutput(
+            'tracker search  ' + sentence + '|grep  ' + file_path + '|wc -l ')
         print result
-        self.assert_(result=='1','search for the sentence is not giving results')
-
-        os.remove (file_path)
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-      def test_cs_05 (self) :
+        os.remove(file_path)
 
-       """ To check if tracker search for combination of English characters and accented characters"""
+    def test_cs_05(self):
+        """ To check if tracker search for combination of English characters and accented characters"""
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
+        file_path = configuration.MYDOCS + TEST_TEXT
 
         """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
         sentence = " 'beautiful and xêëèïyz üéâäàçêëèï and aübêcëdçeï' "
 
-        self.edit_text(file_path,sentence)
+        self.edit_text(file_path, sentence)
         self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' +sentence+ '|grep  '+file_path+ '|wc -l ')
+        result = commands.getoutput(
+            'tracker search  ' + sentence + '|grep  ' + file_path + '|wc -l ')
         print result
-        self.assert_(result=='1','search for the sentence is not giving results')
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
+
+        os.remove(file_path)
 
-        os.remove (file_path)
-         
-      def test_cs_06 (self):
-         
-       """ To check if tracker search for normalisation """
+    def test_cs_06(self):
+        """ To check if tracker search for normalisation """
 
-        file_path =  configuration.MYDOCS + TEST_TEXT
+        file_path = configuration.MYDOCS + TEST_TEXT
 
         """copy the test files """
         self.wait_for_fileop('cp', SRC_TEXT_DIR + TEST_TEXT, file_path)
-       sentence = "école"
+        sentence = "école"
         word = " 'ecole' "
 
-        self.edit_text(file_path,sentence)
+        self.edit_text(file_path, sentence)
         self.loop.run()
 
-        result=commands.getoutput ('tracker search  ' +word+ '|grep  '+file_path+ '|wc -l ')
+        result = commands.getoutput(
+            'tracker search  ' + word + '|grep  ' + file_path + '|wc -l ')
         print result
-        self.assert_(result=='1','search for the sentence is not giving results')
+        self.assert_(
+            result == '1', 'search for the sentence is not giving results')
 
-        os.remove (file_path)
-       
-class applications (TestUpdate) :
+        os.remove(file_path)
 
-       def test_app_Images (self) :
 
-           result= commands.getoutput ('tracker search -i ' + TEST_IMAGE+ '|wc -l')
-           self.assert_(result!=0 , 'tracker search for images is not giving results')
+class applications (TestUpdate):
 
+    def test_app_Images(self):
 
-       def test_app_Music (self) :
+        result = commands.getoutput(
+            'tracker search -i ' + TEST_IMAGE + '|wc -l')
+        self.assert_(
+            result != 0, 'tracker search for images is not giving results')
 
-           result= commands.getoutput ('tracker search -m ' + TEST_MUSIC+ '|wc -l')
-           self.assert_(result!=0 , 'tracker search for music is not giving results')
+    def test_app_Music(self):
 
-       def test_app_Vidoes (self) :
+        result = commands.getoutput(
+            'tracker search -m ' + TEST_MUSIC + '|wc -l')
+        self.assert_(
+            result != 0, 'tracker search for music is not giving results')
 
-           result= commands.getoutput ('tracker search -v ' + TEST_VIDEO+ '|wc -l')
-           self.assert_(result!=0 , 'tracker search for Videos is not giving results')
+    def test_app_Vidoes(self):
 
+        result = commands.getoutput(
+            'tracker search -v ' + TEST_VIDEO + '|wc -l')
+        self.assert_(
+            result != 0, 'tracker search for Videos is not giving results')
 
-       def test_app_music_albums (self) :
+    def test_app_music_albums(self):
 
-           result= commands.getoutput ('tracker search --music-albums SinCos  | wc -l')
-           self.assert_(result!=0 , 'tracker search for music albums is not giving results')
+        result = commands.getoutput(
+            'tracker search --music-albums SinCos  | wc -l')
+        self.assert_(
+            result != 0, 'tracker search for music albums is not giving results')
 
-       
-       def test_app_music_artists (self) :
+    def test_app_music_artists(self):
 
-           result= commands.getoutput ('tracker search --music-artists AbBaby  | wc -l')
-           self.assert_(result!=0 , 'tracker search for music artists is not giving results')
+        result = commands.getoutput(
+            'tracker search --music-artists AbBaby  | wc -l')
+        self.assert_(
+            result != 0, 'tracker search for music artists is not giving results')
 
-       def test_app_folders (self) :
+    def test_app_folders(self):
 
-           result= commands.getoutput ('tracker search -s '+TEST_DIR_1 + '| wc -l')
-           self.assert_(result!=0 , 'tracker search for folders is not giving results')
+        result = commands.getoutput(
+            'tracker search -s ' + TEST_DIR_1 + '| wc -l')
+        self.assert_(
+            result != 0, 'tracker search for folders is not giving results')
 
-       
-       def test_app_email (self) :
+    def test_app_email(self):
 
-           INSERT = """ INSERT {<qmf://fenix.nokia.com/email#3333> a nmo:Email ;
+        INSERT = """ INSERT {<qmf://fenix.nokia.com/email#3333> a nmo:Email ;
                         nmo:receivedDate '2010-05-24T15:17:26Z' ;
                         nmo:messageSubject 'searching_for_Email' } 
                       """
-           self.resources_new.SparqlUpdate (INSERT)
-           
-           result = commands.getoutput ('tracker search -ea searching_for_Email |wc -l ')
-           self.assert_(result!=0 , 'tracker search for files is not giving results')
+        self.resources_new.SparqlUpdate(INSERT)
 
-       def test_app_email (self) :
+        result = commands.getoutput(
+            'tracker search -ea searching_for_Email |wc -l ')
+        self.assert_(
+            result != 0, 'tracker search for files is not giving results')
 
-           INSERT = """ INSERT {<qmf://fenix.nokia.com/email#3333> a nmo:Email ;
+    def test_app_email(self):
+
+        INSERT = """ INSERT {<qmf://fenix.nokia.com/email#3333> a nmo:Email ;
                         nmo:receivedDate '2010-05-24T15:17:26Z' ;
                         nmo:messageSubject 'searching_for_Email' } 
                       """
-           #self.resources.SparqlUpdate (INSERT)
-            self.resources.SparqlUpdate  (INSERT)
-           
-           result = commands.getoutput ('tracker search -ea searching_for_Email |wc -l ')
-           self.assert_(result!=0 , 'tracker search for files is not giving results')
+        #self.resources.SparqlUpdate (INSERT)
+        self.resources.SparqlUpdate(INSERT)
 
+        result = commands.getoutput(
+            'tracker search -ea searching_for_Email |wc -l ')
+        self.assert_(
+            result != 0, 'tracker search for files is not giving results')
 
-if __name__ == "__main__":  
-       unittest.main()
-       """
+
+if __name__ == "__main__":
+    unittest.main()
+    """
        basic_tcs_list=unittest.TestLoader().getTestCaseNames(basic)       
         basic_testsuite=unittest.TestSuite(map(basic, basic_tcs_list))
 
diff --git a/tests/functional-tests/mass-storage-mode.py b/tests/functional-tests/mass-storage-mode.py
index 9e8316c..34a3862 100644
--- a/tests/functional-tests/mass-storage-mode.py
+++ b/tests/functional-tests/mass-storage-mode.py
@@ -18,7 +18,9 @@
 # Boston, MA  02110-1301, USA.
 
 
-import sys,os,dbus
+import sys
+import os
+import dbus
 import unittest
 import time
 import random
@@ -32,23 +34,22 @@ TRACKER = 'org.freedesktop.Tracker1'
 TRACKER_OBJ = '/org/freedesktop/Tracker1/Resources/Classes'
 RESOURCES_IFACE = "org.freedesktop.Tracker1.Resources.Class"
 
-MINER="org.freedesktop.Tracker1.Miner.Files"
-MINER_OBJ="/org/freedesktop/Tracker1/Miner/Files"
-MINER_IFACE="org.freedesktop.Tracker1.Miner"
-
+MINER = "org.freedesktop.Tracker1.Miner.Files"
+MINER_OBJ = "/org/freedesktop/Tracker1/Miner/Files"
+MINER_IFACE = "org.freedesktop.Tracker1.Miner"
 
 
 TEST_IMAGE = "test-image-1.jpg"
 TEST_MUSIC = "tracker-mp3-test.mp3"
 TEST_VIDEO = "test-video.mp4"
-TEST_TEXT =  "test-text-01.txt"
+TEST_TEXT = "test-text-01.txt"
 
 MOUNT_DUMMY = '/root/dummy/'
 MYDOCS = configuration.MYDOCS
 MOUNT_PARTITION = '/dev/mmcblk0p1'
 
-SUB_FOLDER_DUMMY =  MOUNT_DUMMY + '/' + 's1/s2/s3/s4/s5/'
-SUB_FOLDER_MYDOCS =  MYDOCS + 's1/s2/s3/s4/s5/'
+SUB_FOLDER_DUMMY = MOUNT_DUMMY + '/' + 's1/s2/s3/s4/s5/'
+SUB_FOLDER_MYDOCS = MYDOCS + 's1/s2/s3/s4/s5/'
 
 
 DUMMY_DIR = '/root/dummy/TEST_DIR/'
@@ -60,27 +61,28 @@ RENAME_VIDEO = 'rename_video.mp4'
 """create two test directories in miner monitored path  """
 target = configuration.check_target()
 
-def check_mount() :
-       if not os.path.exists (MOUNT_DUMMY) :
-               commands.getoutput ('mkdir  ' + MOUNT_DUMMY)
 
-       commands.getoutput ('umount ' + MOUNT_PARTITION )
+def check_mount():
+    if not os.path.exists(MOUNT_DUMMY):
+        commands.getoutput('mkdir  ' + MOUNT_DUMMY)
+
+    commands.getoutput('umount ' + MOUNT_PARTITION)
 
-       if commands.getoutput('df').find('/home/user/MyDocs') == -1  and 
commands.getoutput('df').find('/dev/mmcblk0p1') == -1 :
-               print 'in else if'
-               commands.getoutput ('mount  ' + MOUNT_PARTITION + '  ' + MYDOCS )
-       else :
-                print "in else else"
-                commands.getoutput ('umount ' + MOUNT_PARTITION )
-                commands.getoutput ('mount ' + MOUNT_PARTITION + '  ' + MYDOCS )
+    if commands.getoutput('df').find('/home/user/MyDocs') == -1 and 
commands.getoutput('df').find('/dev/mmcblk0p1') == -1:
+        print 'in else if'
+        commands.getoutput('mount  ' + MOUNT_PARTITION + '  ' + MYDOCS)
+    else:
+        print "in else else"
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+        commands.getoutput('mount ' + MOUNT_PARTITION + '  ' + MYDOCS)
 
 
 if target == configuration.MAEMO6_HW:
-       """target is device """
-       SRC_IMAGE_DIR = configuration.TEST_DATA_IMAGES
-       SRC_MUSIC_DIR = configuration.TEST_DATA_MUSIC
-       SRC_VIDEO_DIR = configuration.TEST_DATA_VIDEO
-       MYDOCS_SUB =  configuration.MYDOCS + 's1/s2/s3/s4/s5/'
+    """target is device """
+    SRC_IMAGE_DIR = configuration.TEST_DATA_IMAGES
+    SRC_MUSIC_DIR = configuration.TEST_DATA_MUSIC
+    SRC_VIDEO_DIR = configuration.TEST_DATA_VIDEO
+    MYDOCS_SUB = configuration.MYDOCS + 's1/s2/s3/s4/s5/'
 
 
 class TestUpdate (unittest.TestCase):
@@ -88,471 +90,503 @@ class TestUpdate (unittest.TestCase):
     def setUp(self):
         bus = dbus.SessionBus()
         tracker = bus.get_object(TRACKER, TRACKER_OBJ)
-        self.resources = dbus.Interface (tracker,
-                                         dbus_interface=RESOURCES_IFACE)
-
-        miner_obj= bus.get_object(MINER,MINER_OBJ)
-        self.miner=dbus.Interface (miner_obj,dbus_interface=MINER_IFACE)
+        self.resources = dbus.Interface(tracker,
+                                        dbus_interface=RESOURCES_IFACE)
 
+        miner_obj = bus.get_object(MINER, MINER_OBJ)
+        self.miner = dbus.Interface(miner_obj, dbus_interface=MINER_IFACE)
 
         self.loop = GObject.MainLoop()
         self.dbus_loop = DBusGMainLoop(set_as_default=True)
-        self.bus = dbus.SessionBus (self.dbus_loop)
-
-        self.bus.add_signal_receiver (self.miner_processing_cb,
-                                      signal_name="Progress",
-                                      dbus_interface=MINER_IFACE,
-                                      path=MINER_OBJ)
-
+        self.bus = dbus.SessionBus(self.dbus_loop)
 
+        self.bus.add_signal_receiver(self.miner_processing_cb,
+                                     signal_name="Progress",
+                                     dbus_interface=MINER_IFACE,
+                                     path=MINER_OBJ)
 
-    def miner_processing_cb (self,status,handle):
+    def miner_processing_cb(self, status, handle):
         print "GOT PROGRESS FROM MINER"
-        if (status == "Idle" ):
+        if (status == "Idle"):
             print "Miner is in idle status "
-           self.loop.quit ()
-        else :
+            self.loop.quit()
+        else:
             print "Miner not in Idle "
 
-    def wait_for_fileop (self, cmd, src, dst=''):
+    def wait_for_fileop(self, cmd, src, dst=''):
 
         if (cmd == "rm"):
             os.remove(src)
         elif (cmd == "cp"):
             shutil.copy2(src, dst)
         else:
-            shutil.move(src,dst)
+            shutil.move(src, dst)
         self.loop.run()
 
-    def edit_text (self, file) :
-       test_file = MOUNT_DUMMY + file
-       f=open(test_file,"w")
-       lines = "Editing this file to test massstorage mode"
-       f.writelines(lines)
-       f.close()
+    def edit_text(self, file):
+        test_file = MOUNT_DUMMY + file
+        f = open(test_file, "w")
+        lines = "Editing this file to test massstorage mode"
+        f.writelines(lines)
+        f.close()
 
 """ copy in mass storage mode test cases """
-class copy (TestUpdate):
 
-    def test_image_01 (self):
 
+class copy (TestUpdate):
+
+    def test_image_01(self):
         """ To check if tracker indexes image file copied in massstorage mode """
 
         file_path = MYDOCS + TEST_IMAGE
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw  '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput(
+            'mount -t vfat -o rw  ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
         """copy the test file """
         shutil.copy2(SRC_IMAGE_DIR + TEST_IMAGE, MOUNT_DUMMY)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        print commands.getoutput ( 'ls ' + MYDOCS)
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        self.loop.run ()
+        print commands.getoutput('ls ' + MYDOCS)
+
+        self.loop.run()
 
         """ verify if miner indexed these file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '1' , 'copied image file is not shown as indexed')
+        self.assert_(
+            result == '1', 'copied image file is not shown as indexed')
 
         os.remove(file_path)
 
-    def test_music_02 (self):
-
+    def test_music_02(self):
         """ To check if tracker indexes audio file copied in massstorage mode """
 
         file_path = MYDOCS + TEST_MUSIC
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
+
         """copy the test files """
         shutil.copy2(SRC_MUSIC_DIR + TEST_MUSIC, MOUNT_DUMMY)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount  -t vfat -o rw  '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
-        print commands.getoutput ( 'ls ' + MYDOCS)
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        self.loop.run ()
+        commands.getoutput(
+            'mount  -t vfat -o rw  ' + MOUNT_PARTITION + ' ' + MYDOCS)
+
+        print commands.getoutput('ls ' + MYDOCS)
+
+        self.loop.run()
 
         """ verify if miner indexed these file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '1' , 'copied music file is not shown as indexed')
+        self.assert_(
+            result == '1', 'copied music file is not shown as indexed')
 
         os.remove(file_path)
 
-    def test_video_03 (self):
-
+    def test_video_03(self):
         """ To check if tracker indexes video file copied in massstorage mode """
 
         file_path = MYDOCS + TEST_VIDEO
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw'  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput(
+            'mount -t vfat -o rw' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
+
         """copy the test files """
         shutil.copy2(SRC_VIDEO_DIR + TEST_VIDEO, MOUNT_DUMMY)
 
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        print commands.getoutput ( 'ls ' + MYDOCS)
+        print commands.getoutput('ls ' + MYDOCS)
 
-        self.loop.run ()
+        self.loop.run()
 
         """ verify if miner indexed these file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path + ' | wc -l')
-       print result
-        self.assert_(result == '1' , 'copied video file is not shown as indexed')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path + ' | wc -l')
+        print result
+        self.assert_(
+            result == '1', 'copied video file is not shown as indexed')
 
         os.remove(file_path)
 
 
 """ move in mass storage mode"""
-class move (TestUpdate):                                                     
-                                                            
-                                                            
-    def test_image_01 (self):                               
-                                                               
-        """ To check if tracker indexes moved image file in massstorage mode """      
-                                                                                
-        file_path_dst =  MYDOCS_DIR + TEST_IMAGE                                
-        file_path_src =  MYDOCS + TEST_IMAGE                
-                                                                                
-        check_mount()                                                           
-                                                                                                             
 
-        """copy the test files """                                              
-        self.wait_for_fileop('cp', SRC_IMAGE_DIR + TEST_IMAGE, file_path_src)      
-                                                                                
-        result =  commands.getoutput(' tracker search -i -l 10000 | grep  ' + file_path_src + ' |wc -l' )
-        self.assert_(result == '1' , 'copied image file is not shown as indexed')                            
         
-                                                                                                         
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )                       
-                                                                                                         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)                            
-                                                                                                         
-        shutil.move( MOUNT_DUMMY + TEST_IMAGE , DUMMY_DIR)                                               
-                                                                                                         
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )                                               
-                                                                                 
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)                   
-                                                                                                         
-        print commands.getoutput ( 'ls ' + MYDOCS)                                                       
-        self.loop.run()                                                                                  
-                                                                                                         
-        """ verify if miner indexed these file.  """                                                     
-        result =  commands.getoutput(' tracker search -i -l 10000 | grep  ' + file_path_dst + ' |wc -l' )
-                                                                                                         
-        self.assert_(result == '1'  , 'moved file is not listed in tracker search')                      
-        result1 = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' and result1 == '0' , 'Both the  original and moved files are listed in 
tracker search')
-                                                                                                             
             
-        os.remove(file_path_dst)                                                                             
             
-                                                                                                             
  
-                                                                                                             
             
-    def test_music_01 (self):                                                                                
             
-                                                                                                             
             
-        """ To check if tracker indexes moved audio files in massstorage mode """                            
             
-                                                                                        
-
-       file_path_dst =  MYDOCS_DIR + TEST_MUSIC                                                              
            
-        file_path_src =  MYDOCS + TEST_MUSIC                                                                 
             
-                                                                                                             
             
-        check_mount()                                                                                        
             
-                                                                                                             
             
-        """copy the test files """                                                                           
             
-        self.wait_for_fileop('cp', SRC_MUSIC_DIR + TEST_MUSIC, file_path_src)                                
             
-                                                                                                             
             
-        result = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' , 'copied music file is not shown as indexed')                            
             
-                                                                                                             
             
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )                                                   
             
-                                                                                                             
             
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)                                
             
-                                                                                                             
             
-        shutil.move( MOUNT_DUMMY  + TEST_MUSIC , DUMMY_DIR)                                                  
             
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )                                                   
             
-                                                                                                             
             
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)                       
             
-                                                                                                             
             
-        print commands.getoutput ( 'ls ' + MYDOCS)                                                           
             
-                                                                                                             
             
-        self.loop.run()                                                                                      
  
-                                                                                                             
             
-        """ verify if miner indexed these file.  """                                                         
             
-                                                                                                             
             
-        result = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path_dst + ' | wc 
-l')
-        print result                                                                                         
             
-        self.assert_(result == '1' , 'moved music file is not shown as indexed')                             
             
-                                                                                                             
             
-        result1 = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' and result1 == '0' , 'Both original and moved files are listed in tracker 
search ')
-                                                                                                             
             
-        os.remove(file_path_dst)                                                                             
         
-                                                                                                             
             
-                                                                                                             
             
-    def test_video_01 (self):                                                                                
             
-                                                                                                             
             
-        """ To check if tracker indexes moved video files in massstorage mode """                            
         
-                                                                                                             
             
-        file_path_dst =  MYDOCS_DIR + TEST_VIDEO                                                             
             
-        file_path_src =  MYDOCS + TEST_VIDEO                                                                 
             
-                                                                                                             
             
-        check_mount()                         
-       """copy the test files """                                                                            
            
-        self.wait_for_fileop('cp', SRC_VIDEO_DIR + TEST_VIDEO, file_path_src)                                
             
-                                                                                                             
             
-        result = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' , 'copied video file is not shown as indexed')                            
             
-                                                                                                             
             
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )                                                   
             
-                                                                                                             
             
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)                                
             
-                                                                                                             
         
-        shutil.move(MOUNT_DUMMY + TEST_VIDEO , DUMMY_DIR)                                                    
             
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )                                                   
             
-                                                                                                             
             
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)                       
             
-                                                                                                             
             
-        print commands.getoutput ( 'ls ' + MYDOCS)                                                           
             
-        self.loop.run()                                                                                      
             
-        """ verify if miner indexed these file.  """                                                         
             
-                                                                                                             
             
-        result = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path_dst + ' | wc 
-l')
-                                                                                                             
         
-        self.assert_(result == '1' , 'moved file is not listed in tracker search ')
-                                                                                                             
             
-        result1 = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' and result1 == '0' , 'Both original and moved files are listed in tracker 
search ')    
-                                                                                                             
         
-        os.remove(file_path_dst)                                                                             
             
-                                                
 
-class rename (TestUpdate):
 
+class move (TestUpdate):
 
-    def test_image_01 (self):
+    def test_image_01(self):
+        """ To check if tracker indexes moved image file in massstorage mode """
 
-       """ To check if tracker indexes renamed image file in massstorage mode """
-
-       file_path_dst =  MYDOCS + RENAME_IMAGE
-       file_path_src =  MYDOCS + TEST_IMAGE
+        file_path_dst = MYDOCS_DIR + TEST_IMAGE
+        file_path_src = MYDOCS + TEST_IMAGE
 
         check_mount()
 
-       """copy the test files """
-       self.wait_for_fileop('cp', SRC_IMAGE_DIR + TEST_IMAGE, file_path_src)
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_IMAGE_DIR + TEST_IMAGE, file_path_src)
+
+        result = commands.getoutput(
+            ' tracker search -i -l 10000 | grep  ' + file_path_src + ' |wc -l')
+        self.assert_(
+            result == '1', 'copied image file is not shown as indexed')
+
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-       result =  commands.getoutput(' tracker search -i -l 10000 | grep  ' + file_path_src + ' |wc -l' )
-        self.assert_(result == '1' , 'copied image file is not shown as indexed')
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
-       shutil.move( MOUNT_DUMMY + TEST_IMAGE ,  MOUNT_DUMMY+RENAME_IMAGE)
+        shutil.move(MOUNT_DUMMY + TEST_IMAGE, DUMMY_DIR)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
-        print commands.getoutput ( 'ls ' + MYDOCS)
-       self.loop.run()
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-       """ verify if miner indexed these file.  """
-       result =  commands.getoutput(' tracker search -i -l 10000 | grep  ' + file_path_dst + ' |wc -l' )
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        self.assert_(result == '1'  , 'renamed file s not listed in tracker search')
-       result1 = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' and result1 == '0' , 'Both the  original and renamed files are listed in 
tracker search')
+        print commands.getoutput('ls ' + MYDOCS)
+        self.loop.run()
 
-       os.remove(file_path_dst)
+        """ verify if miner indexed these file.  """
+        result = commands.getoutput(
+            ' tracker search -i -l 10000 | grep  ' + file_path_dst + ' |wc -l')
 
+        self.assert_(
+            result == '1', 'moved file is not listed in tracker search')
+        result1 = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(result == '1' and result1 == '0',
+                     'Both the  original and moved files are listed in tracker search')
 
-    def test_music_01 (self):
+        os.remove(file_path_dst)
 
-       """ To check if tracker indexes renamed audio files in massstorage mode """
+    def test_music_01(self):
+        """ To check if tracker indexes moved audio files in massstorage mode """
 
-       file_path_dst =  MYDOCS + RENAME_MUSIC
-       file_path_src =  MYDOCS + TEST_MUSIC
+        file_path_dst = MYDOCS_DIR + TEST_MUSIC
+        file_path_src = MYDOCS + TEST_MUSIC
 
         check_mount()
 
-       """copy the test files """
-       self.wait_for_fileop('cp', SRC_MUSIC_DIR + TEST_MUSIC, file_path_src)
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_MUSIC_DIR + TEST_MUSIC, file_path_src)
+
+        result = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(
+            result == '1', 'copied music file is not shown as indexed')
+
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        result = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' , 'copied music file is not shown as indexed')
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
-       shutil.move( MOUNT_DUMMY  + TEST_MUSIC , MOUNT_DUMMY+RENAME_MUSIC)
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
-        print commands.getoutput ( 'ls ' + MYDOCS)
+        shutil.move(MOUNT_DUMMY + TEST_MUSIC, DUMMY_DIR)
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-       self.loop.run()
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-       """ verify if miner indexed these file.  """
+        print commands.getoutput('ls ' + MYDOCS)
+
+        self.loop.run()
+
+        """ verify if miner indexed these file.  """
 
-        result = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path_dst + ' | wc 
-l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path_dst + ' | wc -l')
         print result
-        self.assert_(result == '1' , 'renamed music file is not shown as indexed')
+        self.assert_(result == '1', 'moved music file is not shown as indexed')
+
+        result1 = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(result == '1' and result1 == '0',
+                     'Both original and moved files are listed in tracker search ')
+
+        os.remove(file_path_dst)
+
+    def test_video_01(self):
+        """ To check if tracker indexes moved video files in massstorage mode """
+
+        file_path_dst = MYDOCS_DIR + TEST_VIDEO
+        file_path_src = MYDOCS + TEST_VIDEO
+
+        check_mount()
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_VIDEO_DIR + TEST_VIDEO, file_path_src)
+
+        result = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(
+            result == '1', 'copied video file is not shown as indexed')
+
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
+
+        shutil.move(MOUNT_DUMMY + TEST_VIDEO, DUMMY_DIR)
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
+
+        print commands.getoutput('ls ' + MYDOCS)
+        self.loop.run()
+        """ verify if miner indexed these file.  """
+
+        result = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path_dst + ' | wc -l')
+
+        self.assert_(
+            result == '1', 'moved file is not listed in tracker search ')
+
+        result1 = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(result == '1' and result1 == '0',
+                     'Both original and moved files are listed in tracker search ')
+
+        os.remove(file_path_dst)
+
+
+class rename (TestUpdate):
+
+    def test_image_01(self):
+        """ To check if tracker indexes renamed image file in massstorage mode """
+
+        file_path_dst = MYDOCS + RENAME_IMAGE
+        file_path_src = MYDOCS + TEST_IMAGE
+
+        check_mount()
+
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_IMAGE_DIR + TEST_IMAGE, file_path_src)
+
+        result = commands.getoutput(
+            ' tracker search -i -l 10000 | grep  ' + file_path_src + ' |wc -l')
+        self.assert_(
+            result == '1', 'copied image file is not shown as indexed')
+
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
+
+        shutil.move(MOUNT_DUMMY + TEST_IMAGE,  MOUNT_DUMMY + RENAME_IMAGE)
+
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
+
+        print commands.getoutput('ls ' + MYDOCS)
+        self.loop.run()
 
-        result1 = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' and result1 == '0' , 'Both original and renamed files are listed in 
tracker search ')
+        """ verify if miner indexed these file.  """
+        result = commands.getoutput(
+            ' tracker search -i -l 10000 | grep  ' + file_path_dst + ' |wc -l')
+
+        self.assert_(
+            result == '1', 'renamed file s not listed in tracker search')
+        result1 = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(result == '1' and result1 == '0',
+                     'Both the  original and renamed files are listed in tracker search')
+
+        os.remove(file_path_dst)
+
+    def test_music_01(self):
+        """ To check if tracker indexes renamed audio files in massstorage mode """
+
+        file_path_dst = MYDOCS + RENAME_MUSIC
+        file_path_src = MYDOCS + TEST_MUSIC
+
+        check_mount()
+
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_MUSIC_DIR + TEST_MUSIC, file_path_src)
+
+        result = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(
+            result == '1', 'copied music file is not shown as indexed')
 
-       os.remove(file_path_dst)
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
+
+        shutil.move(MOUNT_DUMMY + TEST_MUSIC, MOUNT_DUMMY + RENAME_MUSIC)
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
+
+        print commands.getoutput('ls ' + MYDOCS)
+
+        self.loop.run()
+
+        """ verify if miner indexed these file.  """
+
+        result = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path_dst + ' | wc -l')
+        print result
+        self.assert_(
+            result == '1', 'renamed music file is not shown as indexed')
 
-    def test_video_01 (self):
+        result1 = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(result == '1' and result1 == '0',
+                     'Both original and renamed files are listed in tracker search ')
 
-       """ To check if tracker indexes renamed video files in massstorage mode """
+        os.remove(file_path_dst)
 
-       file_path_dst =  MYDOCS + RENAME_VIDEO
-       file_path_src =  MYDOCS + TEST_VIDEO
+    def test_video_01(self):
+        """ To check if tracker indexes renamed video files in massstorage mode """
+
+        file_path_dst = MYDOCS + RENAME_VIDEO
+        file_path_src = MYDOCS + TEST_VIDEO
 
         check_mount()
 
-       """copy the test files """
-       self.wait_for_fileop('cp', SRC_VIDEO_DIR + TEST_VIDEO, file_path_src)
+        """copy the test files """
+        self.wait_for_fileop('cp', SRC_VIDEO_DIR + TEST_VIDEO, file_path_src)
+
+        result = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(
+            result == '1', 'copied video file is not shown as indexed')
 
-        result = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' , 'copied video file is not shown as indexed')
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
-       shutil.move(MOUNT_DUMMY + TEST_VIDEO , MOUNT_DUMMY+RENAME_VIDEO)
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
-        print commands.getoutput ( 'ls ' + MYDOCS)
-       self.loop.run()
-       """ verify if miner indexed these file.  """
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
-        result = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path_dst + ' | wc 
-l')
+        shutil.move(MOUNT_DUMMY + TEST_VIDEO, MOUNT_DUMMY + RENAME_VIDEO)
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        self.assert_(result == '1' , 'renamed file is not listed in tracker search ')
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        result1 = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc 
-l')
-        self.assert_(result == '1' and result1 == '0' , 'Both original and renamed files are listed in 
tracker search ')
+        print commands.getoutput('ls ' + MYDOCS)
+        self.loop.run()
+        """ verify if miner indexed these file.  """
 
-       os.remove(file_path_dst)
+        result = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path_dst + ' | wc -l')
+
+        self.assert_(
+            result == '1', 'renamed file is not listed in tracker search ')
+
+        result1 = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path_src + ' | wc -l')
+        self.assert_(result == '1' and result1 == '0',
+                     'Both original and renamed files are listed in tracker search ')
+
+        os.remove(file_path_dst)
 
 
 """ subfolder operations in mass storage mode """
-class subfolder (TestUpdate):
 
-    def test_create_01 (self):
 
+class subfolder (TestUpdate):
+
+    def test_create_01(self):
         """ To check if tracker indexes image file copied to a
         newly created subfolder in massstorage mode """
 
         file_path = SUB_FOLDER_MYDOCS + TEST_IMAGE
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
         """create a subfolder """
-        commands.getoutput('mkdir -p '+ SUB_FOLDER_DUMMY)
+        commands.getoutput('mkdir -p ' + SUB_FOLDER_DUMMY)
 
         """copy the test file """
         shutil.copy2(SRC_IMAGE_DIR + TEST_IMAGE, SUB_FOLDER_DUMMY)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        print commands.getoutput ( 'ls ' + SUB_FOLDER_MYDOCS)
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        
-        self.loop.run ()
+        print commands.getoutput('ls ' + SUB_FOLDER_MYDOCS)
+
+        self.loop.run()
 
         """ verify if miner indexed these file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '1' , 'copied image file is not shown as indexed')
+        self.assert_(
+            result == '1', 'copied image file is not shown as indexed')
 
         shutil.rmtree(MYDOCS + 's1')
 
-
-    def test_delete_02 (self):
-       """ To check if tracker un-indexes image file in a
+    def test_delete_02(self):
+        """ To check if tracker un-indexes image file in a
 
         subfolder if subfolder is deleted in massstorage mode """
 
         file_path = SUB_FOLDER_MYDOCS + TEST_IMAGE
 
         """create a subfolder """
-        commands.getoutput('mkdir -p '+ SUB_FOLDER_MYDOCS)
+        commands.getoutput('mkdir -p ' + SUB_FOLDER_MYDOCS)
 
         """copy the test file """
         shutil.copy2(SRC_IMAGE_DIR + TEST_IMAGE, SUB_FOLDER_MYDOCS)
-        self.loop.run ()
+        self.loop.run()
 
         """ 2. verify if miner indexed this file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '1' , 'copied file is not shown as indexed')
+        self.assert_(result == '1', 'copied file is not shown as indexed')
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
         shutil.rmtree(MOUNT_DUMMY + '/' + 's1')
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        print commands.getoutput ( 'ls ' + SUB_FOLDER_MYDOCS)
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        
-        self.loop.run ()
+        print commands.getoutput('ls ' + SUB_FOLDER_MYDOCS)
+
+        self.loop.run()
 
         """ verify if miner un-indexed these file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '0' , 'deleted image file is shown as indexed')
+        self.assert_(result == '0', 'deleted image file is shown as indexed')
 
 
 """ delete files in mass storage mode """
-class delete (TestUpdate):
 
-    def test_image_01 (self):
 
+class delete (TestUpdate):
+
+    def test_image_01(self):
         """ To check if tracker indexes image if its deleted in massstorage mode """
 
         file_path = MYDOCS + TEST_IMAGE
@@ -561,36 +595,34 @@ class delete (TestUpdate):
         self.wait_for_fileop('cp', SRC_IMAGE_DIR + TEST_IMAGE, file_path)
 
         """ 2. verify if miner indexed this file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '1' , 'copied file is not shown as indexed')
+        self.assert_(result == '1', 'copied file is not shown as indexed')
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
+
         """remove the test files """
         os.remove(MOUNT_DUMMY + '/' + TEST_IMAGE)
 
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        print commands.getoutput ( 'ls ' + MYDOCS)
+        print commands.getoutput('ls ' + MYDOCS)
 
-       
-        self.loop.run ()
+        self.loop.run()
 
         """ verify if miner un-indexed this file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -i  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '0' , 'deleted image file is shown as indexed')
-
-    def test_music_02 (self):
+        self.assert_(result == '0', 'deleted image file is shown as indexed')
 
+    def test_music_02(self):
         """ To check if tracker indexes music if its deleted in massstorage mode """
 
         file_path = MYDOCS + TEST_MUSIC
@@ -599,36 +631,34 @@ class delete (TestUpdate):
         self.wait_for_fileop('cp', SRC_MUSIC_DIR + TEST_MUSIC, file_path)
 
         """ 2. verify if miner indexed this file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '1' , 'copied file is not shown as indexed')
+        self.assert_(result == '1', 'copied file is not shown as indexed')
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
+
         """remove the test files """
         os.remove(MOUNT_DUMMY + '/' + TEST_MUSIC)
 
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        print commands.getoutput ( 'ls ' + MYDOCS)
+        print commands.getoutput('ls ' + MYDOCS)
 
-       
-        self.loop.run ()
+        self.loop.run()
 
         """ verify if miner un-indexed this file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -m  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -m  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '0' , 'deleted music file is shown as indexed')
-
-    def test_video_03 (self):
+        self.assert_(result == '0', 'deleted music file is shown as indexed')
 
+    def test_video_03(self):
         """ To check if tracker indexes video if its deleted in massstorage mode """
 
         file_path = MYDOCS + TEST_VIDEO
@@ -637,147 +667,137 @@ class delete (TestUpdate):
         self.wait_for_fileop('cp', SRC_VIDEO_DIR + TEST_VIDEO, file_path)
 
         """ 2. verify if miner indexed this file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path + ' | wc -l')
 
-       print result
-        self.assert_(result == '1' , 'copied file is not shown as indexed')
+        print result
+        self.assert_(result == '1', 'copied file is not shown as indexed')
 
         check_mount()
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
+        commands.getoutput('umount ' + MOUNT_PARTITION)
+
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
         """remove the test files """
         os.remove(MOUNT_DUMMY + '/' + TEST_VIDEO)
 
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-        print commands.getoutput ( 'ls ' + MYDOCS)
+        print commands.getoutput('ls ' + MYDOCS)
 
-      
-        self.loop.run ()
+        self.loop.run()
 
         """ verify if miner un-indexed this file.  """
-        result = commands.getoutput ('tracker search --limit=10000  -v  | grep ' + file_path + ' | wc -l')
+        result = commands.getoutput(
+            'tracker search --limit=10000  -v  | grep ' + file_path + ' | wc -l')
         print result
-        self.assert_(result == '0' , 'deleted video file is shown as indexed')
-
-class text (TestUpdate) :
+        self.assert_(result == '0', 'deleted video file is shown as indexed')
 
-    def test_text_01 (self):
 
-       """ To check if tracker indexes changes made to a text file  in massstorage mode """
+class text (TestUpdate):
 
-       file_path =  MYDOCS + TEST_TEXT
+    def test_text_01(self):
+        """ To check if tracker indexes changes made to a text file  in massstorage mode """
 
-       """ Creating text file """
-       f1=open(file_path,"w")
-       f1.writelines("This is a new text file")
-       f1.close()
+        file_path = MYDOCS + TEST_TEXT
 
+        """ Creating text file """
+        f1 = open(file_path, "w")
+        f1.writelines("This is a new text file")
+        f1.close()
 
         check_mount()
 
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount '  + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
-         
-       self.edit_text(TEST_TEXT)
-
-        commands.getoutput ( 'umount ' + MOUNT_PARTITION )
-         
-        commands.getoutput ( 'mount -t vfat -o rw '  + MOUNT_PARTITION + ' ' + MYDOCS)
-         
-
-        print commands.getoutput ( 'ls ' + MYDOCS)
-       self.loop.run()
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-       """ verify if miner indexed these file.  """
+        commands.getoutput('mount ' + MOUNT_PARTITION + ' ' + MOUNT_DUMMY)
 
-        result = commands.getoutput ('tracker search  -t  massstorage | grep ' + file_path + ' | wc -l')
-        print result
-        self.assert_(result == '1' , 'copied text file is not shown as indexed')
-
-       os.remove(file_path)
-
-
-class no_file_op(TestUpdate):
+        self.edit_text(TEST_TEXT)
 
-       def test_msm_01(self):
+        commands.getoutput('umount ' + MOUNT_PARTITION)
 
-            """1. check if tracker is idle. wait till it gets to idle state. """
+        commands.getoutput(
+            'mount -t vfat -o rw ' + MOUNT_PARTITION + ' ' + MYDOCS)
 
-            check_mount()
-             commands.getoutput ('umount /dev/mmcblk0p1')
-              
-             result = self.miner.GetStatus ()
-             self.assert_(result == 'Idle' , 'Tracker is not in idle state')
-
-       def test_msm_02(self):
-
-            """
-           1. unmount the MyDocs
+        print commands.getoutput('ls ' + MYDOCS)
+        self.loop.run()
 
-            2. check if tracker search -i is retrieving result """
+        """ verify if miner indexed these file.  """
 
-           check_mount()
-           commands.getoutput ('umount /dev/mmcblk0p1')
-             
-           result = commands.getoutput ('tracker search -f -l 10000 | grep ' + MYDOCS + '  |wc -l ')
-           print result
-           self.assert_(result == '0' , 'Tracker is listing the files when the device is connected in mass 
storage mode')
+        result = commands.getoutput(
+            'tracker search  -t  massstorage | grep ' + file_path + ' | wc -l')
+        print result
+        self.assert_(result == '1', 'copied text file is not shown as indexed')
 
+        os.remove(file_path)
 
-       def test_msm_03(self):
 
-            """1. unmount the MyDocs
+class no_file_op(TestUpdate):
 
-               2. check if tracker search -ia is retrieving result """
+    def test_msm_01(self):
+        """1. check if tracker is idle. wait till it gets to idle state. """
 
-           check_mount()
-            commands.getoutput ('umount /dev/mmcblk0p1')
-             
-            result = commands.getoutput ('tracker search -f -l 10000 |wc -l ')
-            self.assert_(result != 0 , 'Tracker(checked with -a) is not listing the files when the device is 
connected in mass storage mode')
+        check_mount()
+        commands.getoutput('umount /dev/mmcblk0p1')
 
-if __name__ == "__main__":
+        result = self.miner.GetStatus()
+        self.assert_(result == 'Idle', 'Tracker is not in idle state')
 
-        #unittest.main()
-        copy_tcs_list=unittest.TestLoader().getTestCaseNames(copy)
-        copy_testsuite=unittest.TestSuite(map(copy, copy_tcs_list))
+    def test_msm_02(self):
+        """
+        1. unmount the MyDocs
 
+        2. check if tracker search -i is retrieving result """
 
-        move_tcs_list=unittest.TestLoader().getTestCaseNames(move)
-        move_testsuite=unittest.TestSuite(map(move, move_tcs_list))
+        check_mount()
+        commands.getoutput('umount /dev/mmcblk0p1')
 
+        result = commands.getoutput(
+            'tracker search -f -l 10000 | grep ' + MYDOCS + '  |wc -l ')
+        print result
+        self.assert_(
+            result == '0', 'Tracker is listing the files when the device is connected in mass storage mode')
 
-        rename_tcs_list=unittest.TestLoader().getTestCaseNames(rename)
-        rename_testsuite=unittest.TestSuite(map(rename, rename_tcs_list))
+    def test_msm_03(self):
+        """1. unmount the MyDocs
 
-        delete_tcs_list=unittest.TestLoader().getTestCaseNames(delete)
-        delete_testsuite=unittest.TestSuite(map(delete, delete_tcs_list))
+           2. check if tracker search -ia is retrieving result """
 
+        check_mount()
+        commands.getoutput('umount /dev/mmcblk0p1')
 
-        subfolder_tcs_list=unittest.TestLoader().getTestCaseNames(subfolder)
-        subfolder_testsuite=unittest.TestSuite(map(subfolder, subfolder_tcs_list))
+        result = commands.getoutput('tracker search -f -l 10000 |wc -l ')
+        self.assert_(
+            result != 0, 'Tracker(checked with -a) is not listing the files when the device is connected in 
mass storage mode')
 
+if __name__ == "__main__":
 
-        text_tcs_list=unittest.TestLoader().getTestCaseNames(text)
-        text_testsuite=unittest.TestSuite(map(text, text_tcs_list))
+    # unittest.main()
+    copy_tcs_list = unittest.TestLoader().getTestCaseNames(copy)
+    copy_testsuite = unittest.TestSuite(map(copy, copy_tcs_list))
 
-        file_tcs_list=unittest.TestLoader().getTestCaseNames(no_file_op)
-        file_testsuite=unittest.TestSuite(map(no_file_op, file_tcs_list))
+    move_tcs_list = unittest.TestLoader().getTestCaseNames(move)
+    move_testsuite = unittest.TestSuite(map(move, move_tcs_list))
 
-        all_testsuites = 
unittest.TestSuite((rename_testsuite,move_testsuite,copy_testsuite,subfolder_testsuite,text_testsuite))
+    rename_tcs_list = unittest.TestLoader().getTestCaseNames(rename)
+    rename_testsuite = unittest.TestSuite(map(rename, rename_tcs_list))
 
-        unittest.TextTestRunner(verbosity=2).run(all_testsuites)
+    delete_tcs_list = unittest.TestLoader().getTestCaseNames(delete)
+    delete_testsuite = unittest.TestSuite(map(delete, delete_tcs_list))
 
+    subfolder_tcs_list = unittest.TestLoader().getTestCaseNames(subfolder)
+    subfolder_testsuite = unittest.TestSuite(
+        map(subfolder, subfolder_tcs_list))
 
+    text_tcs_list = unittest.TestLoader().getTestCaseNames(text)
+    text_testsuite = unittest.TestSuite(map(text, text_tcs_list))
 
+    file_tcs_list = unittest.TestLoader().getTestCaseNames(no_file_op)
+    file_testsuite = unittest.TestSuite(map(no_file_op, file_tcs_list))
 
+    all_testsuites = unittest.TestSuite(
+        (rename_testsuite, move_testsuite, copy_testsuite, subfolder_testsuite, text_testsuite))
 
+    unittest.TextTestRunner(verbosity=2).run(all_testsuites)
diff --git a/tests/functional-tests/performance-tc-modified.py 
b/tests/functional-tests/performance-tc-modified.py
index 62071aa..d37553a 100644
--- a/tests/functional-tests/performance-tc-modified.py
+++ b/tests/functional-tests/performance-tc-modified.py
@@ -18,9 +18,11 @@
 # 02110-1301, USA.
 #
 
-import sys,os
+import sys
+import os
 import dbus
-import commands, signal
+import commands
+import signal
 import unittest
 import time
 import random
@@ -32,7 +34,6 @@ TRACKER_OBJ = '/org/freedesktop/Tracker1/Resources'
 RESOURCES_IFACE = "org.freedesktop.Tracker1.Resources"
 
 
-
 """import .ttl files """
 """
 def stats() :
@@ -69,108 +70,105 @@ def import_ttl (music_ttl):
 """
 
 
-
-
-
-
 class TestUpdate (unittest.TestCase):
 
-        def setUp(self):
-                bus = dbus.SessionBus()
-                tracker = bus.get_object(TRACKER, TRACKER_OBJ)
-                self.resources = dbus.Interface (tracker,
-                                                 dbus_interface=RESOURCES_IFACE)
+    def setUp(self):
+        bus = dbus.SessionBus()
+        tracker = bus.get_object(TRACKER, TRACKER_OBJ)
+        self.resources = dbus.Interface(tracker,
+                                        dbus_interface=RESOURCES_IFACE)
 
-       def parsing(self, method, query):
-               f=open("./report/"+method.__name__+"_query.html", "w")
-               f.write(query)
-               f.close()
-               value=hd.parseDoc(method)
-               l=hd.Link(method.__name__+"_query", "report/"+method.__name__+"_query.html")
-               return value, l
+    def parsing(self, method, query):
+        f = open("./report/" + method.__name__ + "_query.html", "w")
+        f.write(query)
+        f.close()
+        value = hd.parseDoc(method)
+        l = hd.Link(method.__name__ + "_query",
+                    "report/" + method.__name__ + "_query.html")
+        return value, l
 
 
 """ email performance test cases """
-class email(TestUpdate):
 
 
-        def test_email_01(self):
+class email(TestUpdate):
 
-               """
-               Time taken for querying the emails.     
-               @param description: Time taken for querying the emails 
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_email_01(self):
+        """
+        Time taken for querying the emails.
+        @param description: Time taken for querying the emails
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = "SELECT ?m ?From  ?date ?email1 WHERE { \
+        query = "SELECT ?m ?From  ?date ?email1 WHERE { \
                        ?m a  nmo:Email ; \
                        nmo:receivedDate ?date ;\
                        nmo:from ?From . ?from nco:hasEmailAddress ?email1 } LIMIT 10000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       """Query for emails """
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            """Query for emails """
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
 
-                       print "Time taken for querying emails = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(email.test_email_01, query)
-               t.rows.append(hd.TableRow([email.test_email_01.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            print "Time taken for querying emails = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(email.test_email_01, query)
+        t.rows.append(
+            hd.TableRow([email.test_email_01.__name__, value[0], value[1], list, value[2], len(result), l]))
 
 
 """ calls performance  test cases """
-class calls(TestUpdate):
 
 
-        def test_calls_01(self):
+class calls(TestUpdate):
 
-               """
-               Time taken for querying duration of calls from phonenumbers.    
-               @param description: Time taken for querying duration of calls from phonenumbers
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_calls_01(self):
+        """
+        Time taken for querying duration of calls from phonenumbers.
+        @param description: Time taken for querying duration of calls from phonenumbers
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = "SELECT ?duration ?phonenumber WHERE {\
+        query = "SELECT ?duration ?phonenumber WHERE {\
                        ?call  a  nmo:Call ;\
                        nmo:duration ?duration ;\
                        nmo:from [a nco:Contact ; nco:hasPhoneNumber ?phonenumber] }LIMIT 10000"
-               list=[]
-               for i in range ( 0, iterations ):
+        list = []
+        for i in range(0, iterations):
 
-                       """Querying the duration of calls of contacts """
-                       start=time.time()
+            """Querying the duration of calls of contacts """
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying duration of calls from phonenumbers  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-                       
-               value, l=self.parsing(calls.test_calls_01, query)
-               t.rows.append(hd.TableRow([calls.test_calls_01.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying duration of calls from phonenumbers  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_calls_02(self):
+        value, l = self.parsing(calls.test_calls_01, query)
+        t.rows.append(
+            hd.TableRow([calls.test_calls_01.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-               """
-               Time taken for querying dialed calls.   
-               @param description: Time taken for querying dialed calls 
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_calls_02(self):
+        """
+        Time taken for querying dialed calls.
+        @param description: Time taken for querying dialed calls
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = "SELECT ?name ?date ?number ?duration \
+        query = "SELECT ?name ?date ?number ?duration \
                        WHERE {?m a nmo:Call; \
                        nmo:sentDate ?date ; \
                        nmo:duration ?duration; \
@@ -183,33 +181,32 @@ class calls(TestUpdate):
                        FILTER (?duration > 0) .} \
                        ORDER BY desc(?date) LIMIT 1000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       """Querying the dialed calls"""
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            """Querying the dialed calls"""
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying dialed calls  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying dialed calls  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(calls.test_calls_02, query)
-               t.rows.append(hd.TableRow([calls.test_calls_02.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+        value, l = self.parsing(calls.test_calls_02, query)
+        t.rows.append(
+            hd.TableRow([calls.test_calls_02.__name__, value[0], value[1], list, value[2], len(result), l]))
 
+    def test_calls_03(self):
+        """
+        Time taken for querying received calls
+        @param description: Time taken for querying received calls
+        @param expected_value:
+        @param querytype:
+        """
 
-        def test_calls_03(self):
-
-               """
-               Time taken for querying received calls
-               @param description: Time taken for querying received calls
-               @param expected_value: 
-               @param querytype: 
-               """
-
-               query = "SELECT ?name ?date ?number ?duration \
+        query = "SELECT ?name ?date ?number ?duration \
                        WHERE {?m a nmo:Call; \
                        nmo:receivedDate ?date ; \
                        nmo:duration ?duration; \
@@ -220,32 +217,32 @@ class calls(TestUpdate):
                        FILTER (?duration > 0) .} \
                        ORDER BY desc(?date) LIMIT 1000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       """Querying the received calls"""
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            """Querying the received calls"""
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying received calls  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(calls.test_calls_03, query)
-               t.rows.append(hd.TableRow([calls.test_calls_03.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying received calls  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_calls_04(self):
+        value, l = self.parsing(calls.test_calls_03, query)
+        t.rows.append(
+            hd.TableRow([calls.test_calls_03.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-               """
-               Time taken for querying missed calls.   
-               @param description: Time taken for querying missed calls
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_calls_04(self):
+        """
+        Time taken for querying missed calls.
+        @param description: Time taken for querying missed calls
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = "SELECT ?name ?date ?number ?duration \
+        query = "SELECT ?name ?date ?number ?duration \
                        WHERE {?m a nmo:Call; \
                        nmo:receivedDate ?date ; \
                        nmo:duration ?duration; \
@@ -256,108 +253,107 @@ class calls(TestUpdate):
                        FILTER (?duration > 0) .} \
                        ORDER BY desc(?date) LIMIT 1000"
 
+        list = []
+        for i in range(0, iterations):
+            """Querying the missed calls"""
+            start = time.time()
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       """Querying the missed calls"""
-                       start=time.time()
+            result = self.resources.SparqlQuery(query)
 
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying missed calls  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(calls.test_calls_04, query)
-               t.rows.append(hd.TableRow([calls.test_calls_04.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying missed calls  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(calls.test_calls_04, query)
+        t.rows.append(
+            hd.TableRow([calls.test_calls_04.__name__, value[0], value[1], list, value[2], len(result), l]))
 
 
 """ IM performance  test cases """
-class instant_messages(TestUpdate):
-
 
-        def test_im_01(self):
 
-               """
-               Time taken for querying  messages.      
-               @param description: Time taken for querying  messages 
-               @param expected_value: 
-               @param querytype: 
-               """
+class instant_messages(TestUpdate):
 
+    def test_im_01(self):
+        """
+        Time taken for querying  messages.
+        @param description: Time taken for querying  messages
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = "SELECT ?message ?from ?date ?content WHERE { \
+        query = "SELECT ?message ?from ?date ?content WHERE { \
                        ?message a nmo:IMMessage ; \
                        nmo:from ?from ; \
                        nmo:receivedDate ?date ;  \
                        nie:plainTextContent ?content} LIMIT 10000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       """Querying the messages """
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            """Querying the messages """
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying  messages  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(instant_messages.test_im_01, query)
-               t.rows.append(hd.TableRow([instant_messages.test_im_01.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying  messages  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_im_02(self):
+        value, l = self.parsing(instant_messages.test_im_01, query)
+        t.rows.append(
+            hd.TableRow([instant_messages.test_im_01.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken for querying status of contacts.     
-               @param description: Time taken for querying status of contacts
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_im_02(self):
+        """
+        Time taken for querying status of contacts.
+        @param description: Time taken for querying status of contacts
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = "SELECT ?contact ?status WHERE{\
+        query = "SELECT ?contact ?status WHERE{\
                        ?contact a  nco:IMAccount; \
                        nco:imPresence ?status }LIMIT 10000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       """Querying the status of contacts every sec"""
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            """Querying the status of contacts every sec"""
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying status of contacts = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(instant_messages.test_im_02, query)
-               t.rows.append(hd.TableRow([instant_messages.test_im_02.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying status of contacts = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(instant_messages.test_im_02, query)
+        t.rows.append(
+            hd.TableRow([instant_messages.test_im_02.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
 
 """ rtcom performance  test cases """
-class rtcom(TestUpdate):
 
 
-        def test_rtcom_01(self):
+class rtcom(TestUpdate):
 
-               """
-               Time taken for querying (old) conversation view (without contact info). 
-               @param description: Time taken for querying (old) conversation view (without contact info)
-               @param expected_value: 0.5 sec
-               @param querytype: 
-               """
+    def test_rtcom_01(self):
+        """
+        Time taken for querying (old) conversation view (without contact info).
+        @param description: Time taken for querying (old) conversation view (without contact info)
+        @param expected_value: 0.5 sec
+        @param querytype:
+        """
 
-               # A version of the next one that skips the contact parts that are not generated properly
+        # A version of the next one that skips the contact parts that are not
+        # generated properly
 
-               query = "SELECT ?msg ?date ?text ?contact \
+        query = "SELECT ?msg ?date ?text ?contact \
                        WHERE { \
                        ?msg nmo:communicationChannel <urn:channel:1> ; \
                        nmo:receivedDate ?date ; \
@@ -365,48 +361,47 @@ class rtcom(TestUpdate):
                        <urn:channel:1> nmo:hasParticipant ?contact . \
                        } ORDER BY DESC(?date) LIMIT 50"
 
-               #query = "SELECT ?msg ?date ?text ?contact \
-               #       WHERE { \
-               #       ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
-               #       nmo:receivedDate ?date ; \
-               #       nie:plainTextContent ?text ; \
-               #       nmo:from [ nco:hasIMAddress ?fromAddress ] . \
-               #       <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
-               #       ?contact nco:hasIMAddress ?fromAddress . \
-               #       } ORDER BY DESC(?date) LIMIT 50"
-
-
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying (old) conversation view (without contact info)  = %s " 
%elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(rtcom.test_rtcom_01, query)
-               t.rows.append(hd.TableRow([rtcom.test_rtcom_01.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
-
-        def test_rtcom_02(self):
-
-               """
-               Time taken for querying conversation view.      
-               @param description: Time taken for querying conversation view
-               @param expected_value: 0.5 sec
-               @param querytype: 
-               """
-
-               #
-               # Current rtcom queries, please do not "quietly optimize".
-               #
-               
-               # requires secondary index support to be fast
-
-               query = " \
+        # query = "SELECT ?msg ?date ?text ?contact \
+        #      WHERE { \
+        #      ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
+        #      nmo:receivedDate ?date ; \
+        #      nie:plainTextContent ?text ; \
+        #      nmo:from [ nco:hasIMAddress ?fromAddress ] . \
+        #      <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
+        #      ?contact nco:hasIMAddress ?fromAddress . \
+        #      } ORDER BY DESC(?date) LIMIT 50"
+
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying (old) conversation view (without contact info)  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(rtcom.test_rtcom_01, query)
+        t.rows.append(
+            hd.TableRow([rtcom.test_rtcom_01.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
+
+    def test_rtcom_02(self):
+        """
+        Time taken for querying conversation view.
+        @param description: Time taken for querying conversation view
+        @param expected_value: 0.5 sec
+        @param querytype:
+        """
+
+        #
+        # Current rtcom queries, please do not "quietly optimize".
+        #
+
+        # requires secondary index support to be fast
+
+        query = " \
                        SELECT ?message ?date ?from ?to \
                             rdf:type(?message) \
                             tracker:coalesce(fn:concat(nco:nameGiven(?contact), ' ', 
nco:nameFamily(?contact)), nco:nickname(?contact)) \
@@ -475,35 +470,35 @@ class rtcom(TestUpdate):
                        LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying conversation view  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(rtcom.test_rtcom_02, query)
-               t.rows.append(hd.TableRow([rtcom.test_rtcom_02.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying conversation view  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_rtcom_03(self):
+        value, l = self.parsing(rtcom.test_rtcom_02, query)
+        t.rows.append(
+            hd.TableRow([rtcom.test_rtcom_02.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
 
-               """
-               Time taken for querying call history.
-               @param description: Time taken for querying call history
-               @param expected_value: 0.5 sec
-               @param querytype: 
-               """
+    def test_rtcom_03(self):
+        """
+        Time taken for querying call history.
+        @param description: Time taken for querying call history
+        @param expected_value: 0.5 sec
+        @param querytype:
+        """
 
-               #
-               # Current rtcom queries, please do not "quietly optimize".
-               #
+        #
+        # Current rtcom queries, please do not "quietly optimize".
+        #
 
-               query = " \
+        query = " \
                        SELECT ?call ?date ?from ?to \
                             rdf:type(?call) \
                             nmo:isSent(?call) \
@@ -560,31 +555,31 @@ class rtcom(TestUpdate):
                        ORDER BY DESC(?date) LIMIT 50\
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying call history  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(rtcom.test_rtcom_03, query)
-               t.rows.append(hd.TableRow([rtcom.test_rtcom_03.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying call history  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_rtcom_04(self):
+        value, l = self.parsing(rtcom.test_rtcom_03, query)
+        t.rows.append(
+            hd.TableRow([rtcom.test_rtcom_03.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
 
-               """
-               Time taken for querying (old) conversation list view.   
-               @param description: Time taken for querying (old) conversation list view 
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_rtcom_04(self):
+        """
+        Time taken for querying (old) conversation list view.
+        @param description: Time taken for querying (old) conversation list view
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = "SELECT ?channel ?participant nco:fullname(?participant) ?last_date 
nie:plainTextContent(?last_message) \
+        query = "SELECT ?channel ?participant nco:fullname(?participant) ?last_date 
nie:plainTextContent(?last_message) \
                                (SELECT COUNT(?message) AS ?message_count  \
                                        WHERE { ?message nmo:communicationChannel ?channel }) \
                                (SELECT COUNT(?message) AS ?message_count  \
@@ -598,33 +593,31 @@ class rtcom(TestUpdate):
                                FILTER (?participant != nco:default-contact-me ) \
                                } ORDER BY DESC(?last_date) LIMIT 50 }"
 
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+            result = self.resources.SparqlQuery(query)
 
-                       result=self.resources.SparqlQuery(query)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying (old) conversation list view  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying (old) conversation list view  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        value, l = self.parsing(rtcom.test_rtcom_04, query)
+        t.rows.append(
+            hd.TableRow([rtcom.test_rtcom_04.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-               value, l=self.parsing(rtcom.test_rtcom_04, query)
-               t.rows.append(hd.TableRow([rtcom.test_rtcom_04.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+    def test_rtcom_05(self):
+        """
+        Time taken for querying (old) conversation view.
+        @param description: Time taken for querying (old) conversation view
+        @param expected_value:
+        @param querytype:
+        """
 
-
-        def test_rtcom_05(self):
-
-               """
-               Time taken for querying (old) conversation view.        
-               @param description: Time taken for querying (old) conversation view
-               @param expected_value: 
-               @param querytype: 
-               """
-
-               query = "SELECT ?msg ?date ?text ?contact \
+        query = "SELECT ?msg ?date ?text ?contact \
                        WHERE { \
                        ?msg nmo:communicationChannel <urn:channel:1> ; \
                        nmo:receivedDate ?date ; \
@@ -634,46 +627,45 @@ class rtcom(TestUpdate):
                        ?contact nco:hasIMAddress ?fromAddress . \
                        } ORDER BY DESC(?date) LIMIT 50"
 
-               #query = "SELECT ?msg ?date ?text ?contact \
-               #       WHERE { \
-               #       ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
-               #       nmo:receivedDate ?date ; \
-               #       nie:plainTextContent ?text ; \
-               #       nmo:from [ nco:hasIMAddress ?fromAddress ] . \
-               #       <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
-               #       ?contact nco:hasIMAddress ?fromAddress . \
-               #       } ORDER BY DESC(?date) LIMIT 50"
-
-
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying (old) conversation view  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(rtcom.test_rtcom_05, query)
-               t.rows.append(hd.TableRow([rtcom.test_rtcom_05.__name__, value[0], value[1], list, value[2], 
len(result), l]))
-
-        def test_rtcom_06(self):
-
-               """
-               Time taken for querying conversation list.      
-               @param description: Time taken for querying conversation list
-               @param expected_value: 
-               @param querytype: 
-               """
-
-               #
-               # Current rtcom queries, please do not "quietly optimize".
-               #
-
-               query = " \
+        # query = "SELECT ?msg ?date ?text ?contact \
+        #      WHERE { \
+        #      ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
+        #      nmo:receivedDate ?date ; \
+        #      nie:plainTextContent ?text ; \
+        #      nmo:from [ nco:hasIMAddress ?fromAddress ] . \
+        #      <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
+        #      ?contact nco:hasIMAddress ?fromAddress . \
+        #      } ORDER BY DESC(?date) LIMIT 50"
+
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying (old) conversation view  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(rtcom.test_rtcom_05, query)
+        t.rows.append(
+            hd.TableRow([rtcom.test_rtcom_05.__name__, value[0], value[1], list, value[2], len(result), l]))
+
+    def test_rtcom_06(self):
+        """
+        Time taken for querying conversation list.
+        @param description: Time taken for querying conversation list
+        @param expected_value:
+        @param querytype:
+        """
+
+        #
+        # Current rtcom queries, please do not "quietly optimize".
+        #
+
+        query = " \
                        SELECT ?channel ?subject nie:generator(?channel) \
                          tracker:coalesce(fn:concat(nco:nameGiven(?contact), ' ', nco:nameFamily(?contact)), 
nco:nickname(?contact)) AS ?contactName \
                          nco:contactUID(?contact) AS ?contactUID \
@@ -714,38 +706,39 @@ class rtcom(TestUpdate):
                        ORDER BY DESC(?lastDate) LIMIT 50\
                        "
 
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+            result = self.resources.SparqlQuery(query)
 
-                       result=self.resources.SparqlQuery(query)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying conversation list  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying conversation list  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(rtcom.test_rtcom_06, query)
-               t.rows.append(hd.TableRow([rtcom.test_rtcom_06.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+        value, l = self.parsing(rtcom.test_rtcom_06, query)
+        t.rows.append(
+            hd.TableRow([rtcom.test_rtcom_06.__name__, value[0], value[1], list, value[2], len(result), l]))
 
 
 """ Audio, Video, Images  performance  test cases """
-class audio(TestUpdate):
 
-        def test_audio_01(self):
 
-               """
-               Time taken for querying all songs.      
-               @param description: Time taken for querying all songs
-               @param expected_value: First 20 clips in 0.2 sec and the rest up to 10000 in 2 sec 
-               @param querytype: 
-               """
+class audio(TestUpdate):
+
+    def test_audio_01(self):
+        """
+        Time taken for querying all songs.
+        @param description: Time taken for querying all songs
+        @param expected_value: First 20 clips in 0.2 sec and the rest up to 10000 in 2 sec
+        @param querytype:
+        """
 
-                """Query all songs """
+        """Query all songs """
 
-               query = "SELECT ?title ?artist nmm:albumTitle (nmm:musicAlbum (?song))\
+        query = "SELECT ?title ?artist nmm:albumTitle (nmm:musicAlbum (?song))\
                        WHERE { { \
                        ?song a nmm:MusicPiece . \
                        ?song nie:title ?title .\
@@ -753,97 +746,96 @@ class audio(TestUpdate):
                        ?perf nmm:artistName ?artist .}}  \
                        ORDER BY ?title "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result = self.resources.SparqlQuery(query,timeout = 1000)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all songs  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query, timeout=1000)
 
-               value, l=self.parsing(audio.test_audio_01, query)
-               t.rows.append(hd.TableRow([audio.test_audio_01.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all songs  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_audio_02 (self) :
+        value, l = self.parsing(audio.test_audio_01, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_01.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
 
-               """
-               Time taken for querying 5000 albums.
-               @param description: Time taken for querying 5000 albums
-               @param expected_value: First 20 albums in 0.2sec rest up to 1000 in 2 sec 
-               @param querytype: 
-               """
+    def test_audio_02(self):
+        """
+        Time taken for querying 5000 albums.
+        @param description: Time taken for querying 5000 albums
+        @param expected_value: First 20 albums in 0.2sec rest up to 1000 in 2 sec
+        @param querytype:
+        """
 
-                """Query all albums """
+        """Query all albums """
 
-                query = "SELECT nmm:albumTitle(?album) AS ?Album  nmm:artistName (nmm:performer (?Songs)) 
COUNT(?Songs)  AS ?Songs  ?album \
+        query = "SELECT nmm:albumTitle(?album) AS ?Album  nmm:artistName (nmm:performer (?Songs)) 
COUNT(?Songs)  AS ?Songs  ?album \
                        WHERE { { ?Songs a nmm:MusicPiece .\
                        ?Songs nmm:musicAlbum ?album . \
                         }}GROUP BY ?album ORDER BY ?album LIMIT 5000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result = self.resources.SparqlQuery(query,timeout = 1000)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 5000 albums  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(audio.test_audio_02, query)
-               t.rows.append(hd.TableRow([audio.test_audio_02.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
-
-       def test_audio_03 (self):
-
-               """
-               Time taken for querying 5000 artists.   
-               @param description: Time taken for querying 5000 artists 
-               @param expected_value: First 20 artists in 0.2 secrst up to 100 in 0.5sec
-               @param querytype: 
-               """
-
-                """ Query all artists """
-               """simplified version of test_audio_08  """
-               query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query, timeout=1000)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 5000 albums  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(audio.test_audio_02, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_02.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
+
+    def test_audio_03(self):
+        """
+        Time taken for querying 5000 artists.
+        @param description: Time taken for querying 5000 artists
+        @param expected_value: First 20 artists in 0.2 secrst up to 100 in 0.5sec
+        @param querytype:
+        """
+
+        """ Query all artists """
+        """simplified version of test_audio_08  """
+        query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
                                ?song a nmm:MusicPiece . \
                                ?song nmm:performer ?artist . \
                                OPTIONAL { ?song nmm:musicAlbum ?album . } } \
                                GROUP BY ?artist ORDER BY ?artist LIMIT 5000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-                       print query
-                       result = self.resources.SparqlQuery(query,timeout= 600)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 5000 artists  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(audio.test_audio_03, query)
-               t.rows.append(hd.TableRow([audio.test_audio_03.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
-
-
-        def test_audio_04(self):
-
-               """
-               Time taken for querying Artist and finding the no.of albums in each artist.
-               @param description: Time taken for querying Artist and finding the no.of albums in each artist
-               @param expected_value: First 20 albums in 0.2sec and the rest up to 100 in 0.5 sec 
-               @param querytype: 
-               """
-
-               """ Querying for Artist and finding the no.of albums in each artist.  """
-
-               query = "SELECT ?artist ?name COUNT(?album) COUNT (?song) \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+            print query
+            result = self.resources.SparqlQuery(query, timeout=600)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 5000 artists  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(audio.test_audio_03, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_03.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
+
+    def test_audio_04(self):
+        """
+        Time taken for querying Artist and finding the no.of albums in each artist.
+        @param description: Time taken for querying Artist and finding the no.of albums in each artist
+        @param expected_value: First 20 albums in 0.2sec and the rest up to 100 in 0.5 sec
+        @param querytype:
+        """
+
+        """ Querying for Artist and finding the no.of albums in each artist.  """
+
+        query = "SELECT ?artist ?name COUNT(?album) COUNT (?song) \
                       WHERE { \
                       ?song a nmm:MusicPiece ; \
                       nmm:musicAlbum ?album;  \
@@ -851,264 +843,260 @@ class audio(TestUpdate):
                       ?artist nmm:artistName ?name. \
                       } GROUP BY ?artist"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying Artist and finding the no.of albums in each artist  = 
%s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(audio.test_audio_04, query)
-               t.rows.append(hd.TableRow([audio.test_audio_04.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying Artist and finding the no.of albums in each artist  = %s " % 
elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_audio_05(self):
+        value, l = self.parsing(audio.test_audio_04, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_04.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
 
-               """
-               Time taken for querying all artists and count their songs.
-               @param description: Time taken for querying all artists and count their songs
-               @param expected_value: First 20 clips in 0.2 sec and the rest up to 1000 in 2sec
-               @param querytype: 
-               """
+    def test_audio_05(self):
+        """
+        Time taken for querying all artists and count their songs.
+        @param description: Time taken for querying all artists and count their songs
+        @param expected_value: First 20 clips in 0.2 sec and the rest up to 1000 in 2sec
+        @param querytype:
+        """
 
-                """Query all aritists also count of songs in each artists """
+        """Query all aritists also count of songs in each artists """
 
-               query= "SELECT nmm:artistName(?artist) COUNT(?songs) WHERE { \
+        query = "SELECT nmm:artistName(?artist) COUNT(?songs) WHERE { \
                        ?mp a nmm:MusicPiece . \
                        ?mp nmm:performer ?artist ; \
                        nie:title ?songs . } \
                        GROUP BY ?artist ORDER BY DESC(nmm:artistName(?artist))"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result = self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all artists and count their songs  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all artists and count their songs  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(audio.test_audio_05, query)
-               t.rows.append(hd.TableRow([audio.test_audio_05.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
+        value, l = self.parsing(audio.test_audio_05, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_05.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
 
-        def test_audio_06(self):
+    def test_audio_06(self):
+        """
+        Time taken for querying all albums and count their songs.
+        @param description: Time taken for querying all albums and count their songs
+        @param expected_value: First 20 clips in 0.2 sec
+        @param querytype:
+        """
 
-               """
-               Time taken for querying all albums and count their songs.
-               @param description: Time taken for querying all albums and count their songs
-               @param expected_value: First 20 clips in 0.2 sec 
-               @param querytype: 
-               """
+        """Query all albums also count of songs in each album """
 
-                """Query all albums also count of songs in each album """
-
-               query= "SELECT nie:title(?a) COUNT(?songs) WHERE { \
+        query = "SELECT nie:title(?a) COUNT(?songs) WHERE { \
                        ?a a nmm:MusicAlbum . \
                        ?mp nmm:musicAlbum ?a ; \
                        nie:title ?songs . } \
                        GROUP BY ?a ORDER BY DESC(nie:title(?a))"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result = self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all albums and count their songs  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(audio.test_audio_06, query)
-               t.rows.append(hd.TableRow([audio.test_audio_06.__name__, value[0], value[1], list, value[2], 
len(result), l], bgcolor="#C3FDB8"))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all albums and count their songs  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_audio_07(self):
+        value, l = self.parsing(audio.test_audio_06, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_06.__name__, value[0], value[1], list, value[2], len(result), l], 
bgcolor="#C3FDB8"))
 
-               """
-               Time taken for querying all albums and count their songs.       
-               @param description: Time taken for querying all albums and count their songs 
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_audio_07(self):
+        """
+        Time taken for querying all albums and count their songs.
+        @param description: Time taken for querying all albums and count their songs
+        @param expected_value:
+        @param querytype:
+        """
 
-                """Query all albums also count of songs in each album """
+        """Query all albums also count of songs in each album """
 
-               query= "SELECT  ?album COUNT(?songs) AS ?count  WHERE { \
+        query = "SELECT  ?album COUNT(?songs) AS ?count  WHERE { \
                        ?a a nmm:MusicAlbum; \
                        nie:title ?album. \
                        ?mp nmm:musicAlbum ?a;\
                        nie:title ?songs.\
                         }GROUP BY ?album ORDER BY DESC(?album)"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result = self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all albums and count their songs  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               value, l=self.parsing(audio.test_audio_07, query)
-               t.rows.append(hd.TableRow([audio.test_audio_07.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            result = self.resources.SparqlQuery(query)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all albums and count their songs  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_audio_08 (self):
+        value, l = self.parsing(audio.test_audio_07, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_07.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-               """
-               Time taken for querying 5000 artists.   
-               @param description: Time taken for querying 5000 artists 
-               @param expected_value: 1.5
-               @param querytype: 
-               """
+    def test_audio_08(self):
+        """
+        Time taken for querying 5000 artists.
+        @param description: Time taken for querying 5000 artists
+        @param expected_value: 1.5
+        @param querytype:
+        """
 
-                """ Query all artists """
-                query = " SELECT nmm:artistName(?artist) AS ?artistTitle (nmm:musicAlbum (?song)) 
(nmm:albumTitle (?album))COUNT(?album) AS ?album ?artist \
+        """ Query all artists """
+        query = " SELECT nmm:artistName(?artist) AS ?artistTitle (nmm:musicAlbum (?song)) (nmm:albumTitle 
(?album))COUNT(?album) AS ?album ?artist \
                        WHERE {  \
                        ?song a nmm:MusicPiece  .\
                        ?song nmm:performer ?artist . \
                         } GROUP BY ?artist  ORDER BY ?artist LIMIT 5000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-                       print query
-                       result = self.resources.SparqlQuery(query,timeout= 600)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 5000 artists  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(audio.test_audio_08, query)
-               t.rows.append(hd.TableRow([audio.test_audio_08.__name__, value[0], value[1], list, value[2], 
len(result), l]))
-
-       def test_audio_09 (self) :
-
-               """
-               Time taken for querying 100 albums.     
-               @param description: Time taken for querying 100 albums 
-               @param expected_value: 
-               @param querytype: 
-               """
-
-                """Query 100 albums """
-
-                query = "SELECT nmm:albumTitle(?album) AS ?Album  (nmm:performer(?Songs)) nmm:artistName 
COUNT(?Songs)  AS ?Songs  ?album \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+            print query
+            result = self.resources.SparqlQuery(query, timeout=600)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 5000 artists  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(audio.test_audio_08, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_08.__name__, value[0], value[1], list, value[2], len(result), l]))
+
+    def test_audio_09(self):
+        """
+        Time taken for querying 100 albums.
+        @param description: Time taken for querying 100 albums
+        @param expected_value:
+        @param querytype:
+        """
+
+        """Query 100 albums """
+
+        query = "SELECT nmm:albumTitle(?album) AS ?Album  (nmm:performer(?Songs)) nmm:artistName 
COUNT(?Songs)  AS ?Songs  ?album \
                        WHERE { { ?Songs a nmm:MusicPiece .\
                        ?Songs nmm:musicAlbum ?album .\
                        }}GROUP BY ?album ORDER BY ?album LIMIT 100"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result = self.resources.SparqlQuery(query,timeout = 1000)
+            result = self.resources.SparqlQuery(query, timeout=1000)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 100 albums  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 100 albums  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(audio.test_audio_09, query)
-               t.rows.append(hd.TableRow([audio.test_audio_09.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+        value, l = self.parsing(audio.test_audio_09, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_09.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-       def test_audio_10 (self):
+    def test_audio_10(self):
+        """
+        Time taken for querying 100 artist.
+        @param description: Time taken for querying 100 artist
+        @param expected_value:
+        @param querytype:
+        """
 
-               """
-               Time taken for querying 100 artist.
-               @param description: Time taken for querying 100 artist
-               @param expected_value: 
-               @param querytype: 
-               """
+        """ Query 100 artists """
 
-                """ Query 100 artists """
-
-                query = "SELECT nmm:artistName(?artist) AS ?artistTitle (nmm:musicAlbum (?song)) 
nmm:albumTitle COUNT(?album) AS\
+        query = "SELECT nmm:artistName(?artist) AS ?artistTitle (nmm:musicAlbum (?song)) nmm:albumTitle 
COUNT(?album) AS\
                            ?album ?artist \
                           WHERE {  \
                           ?song a nmm:MusicPiece  .\
                           ?song nmm:performer ?artist . \
                           } GROUP BY ?artist  ORDER BY ?artist  LIMIT 100"""
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-                       print query
-                       result = self.resources.SparqlQuery(query,timeout=600)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 100 artist  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(audio.test_audio_10, query)
-               t.rows.append(hd.TableRow([audio.test_audio_10.__name__, value[0], value[1], list, value[2], 
len(result), l]))
-
-
-
-       def test_audio_11 (self):
-
-               """
-               Time taken for querying 100 artist.     
-               @param description: Time taken for querying 100 artist
-               @param expected_value: 
-               @param querytype: 
-               """
-
-                """ Query 100 artists """
-               """simplified version of test_audio_10  """
-
-               query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+            print query
+            result = self.resources.SparqlQuery(query, timeout=600)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 100 artist  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(audio.test_audio_10, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_10.__name__, value[0], value[1], list, value[2], len(result), l]))
+
+    def test_audio_11(self):
+        """
+        Time taken for querying 100 artist.
+        @param description: Time taken for querying 100 artist
+        @param expected_value:
+        @param querytype:
+        """
+
+        """ Query 100 artists """
+        """simplified version of test_audio_10  """
+
+        query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
                        ?song a nmm:MusicPiece . \
                        ?song nmm:performer ?artist . \
                        OPTIONAL  { ?song nmm:musicAlbum ?album . } } \
                        GROUP BY ?artist ORDER BY ?artist LIMIT 100"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-                       print query
-                       result = self.resources.SparqlQuery(query,timeout=600)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+            print query
+            result = self.resources.SparqlQuery(query, timeout=600)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 100 artist  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-               value, l=self.parsing(audio.test_audio_11, query)
-               t.rows.append(hd.TableRow([audio.test_audio_11.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 100 artist  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+        value, l = self.parsing(audio.test_audio_11, query)
+        t.rows.append(
+            hd.TableRow([audio.test_audio_11.__name__, value[0], value[1], list, value[2], len(result), l]))
 
 
 class gallery(TestUpdate):
 
+    def test_gallery_01(self):
+        """
+        Time taken for querying all images and videos.
+        @param description: Time taken for querying all images and videos
+        @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results in 2 
second.
+        @param querytype:
+        """
 
-        def test_gallery_01(self):
-
-               """
-               Time taken for querying all images and videos.  
-               @param description: Time taken for querying all images and videos 
-               @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results 
in 2 second.
-               @param querytype: 
-               """
+        """ Querying for all Images and Videos """
 
-               """ Querying for all Images and Videos """
-
-               query = "SELECT ?url \
+        query = "SELECT ?url \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -1118,96 +1106,94 @@ class gallery(TestUpdate):
                      OPTIONAL   { ?media nfo:height ?_height .} } \
                      ORDER BY ?modified LIMIT 10000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query, timeout=25)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all images and videos  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query, timeout=25)
 
-               value, l=self.parsing(gallery.test_gallery_01, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_01.__name__, value[0], value[1], list, 
value[2], len(result), l], bgcolor="#C3FDB8"))
-               
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all images and videos  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_gallery_02(self):
+        value, l = self.parsing(gallery.test_gallery_01, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_01.__name__, value[0], value[1], list, value[2], len(result), 
l], bgcolor="#C3FDB8"))
 
-               """
-               Time taken for querying all images and videos taken with phone's camera.
-               @param description: Time taken for querying all images and videos taken with phone's camera
-               @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results 
in 1.0 second.
-               @param querytype: 
-               """
+    def test_gallery_02(self):
+        """
+        Time taken for querying all images and videos taken with phone's camera.
+        @param description: Time taken for querying all images and videos taken with phone's camera
+        @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results in 1.0 
second.
+        @param querytype:
+        """
 
-               """Querying all images and videos taken with phone's camera """
+        """Querying all images and videos taken with phone's camera """
 
-               query = "SELECT ?media WHERE { \
+        query = "SELECT ?media WHERE { \
                         ?media a nfo:Visual; \
                         nfo:equipment [ a nfo:Equipment; nfo:make 'NOKIA' ] }"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all images and videos taken with phone's camera  = %s 
" %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(gallery.test_gallery_02, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_02.__name__, value[0], value[1], list, 
value[2], len(result), l], bgcolor="#C3FDB8"))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all images and videos taken with phone's camera  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_gallery_03(self):
+        value, l = self.parsing(gallery.test_gallery_02, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_02.__name__, value[0], value[1], list, value[2], len(result), 
l], bgcolor="#C3FDB8"))
 
-               """
-               Time taken for querying all images and videos with a tag.
-               @param description: Time taken for querying all images and videos with a tag
-               @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results 
in 2 second.
-               @param querytype: 
-               """
+    def test_gallery_03(self):
+        """
+        Time taken for querying all images and videos with a tag.
+        @param description: Time taken for querying all images and videos with a tag
+        @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results in 2 
second.
+        @param querytype:
+        """
 
-               """ Querying for images, videos which have tag TEST """
+        """ Querying for images, videos which have tag TEST """
 
-               query  = "SELECT ?media \
+        query  = "SELECT ?media \
                         WHERE { \
                        ?media a nfo:Visual; \
                         nao:hasTag ?tag . \
                        ?tag nao:prefLabel 'TEST' }"
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all images and videos with a tag  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all images and videos with a tag  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(gallery.test_gallery_03, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_03.__name__, value[0], value[1], list, 
value[2], len(result), l], bgcolor="#C3FDB8"))
+        value, l = self.parsing(gallery.test_gallery_03, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_03.__name__, value[0], value[1], list, value[2], len(result), 
l], bgcolor="#C3FDB8"))
 
+    def test_gallery_04(self):
+        """
+        Time taken for querying all images and videos without OPTIONALS.
+        @param description: Time taken for querying all images and videos without OPTIONALS
+        @param expected_value:
+        @param querytype:
+        """
 
-        def test_gallery_04(self):
+        """ Querying for all Images and Videos without OPTIONALS"""
 
-               """
-               Time taken for querying all images and videos without OPTIONALS.        
-               @param description: Time taken for querying all images and videos without OPTIONALS
-               @param expected_value: 
-               @param querytype: 
-               """
-
-               """ Querying for all Images and Videos without OPTIONALS"""
-
-               query = "SELECT ?url \
+        query = "SELECT ?url \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -1215,34 +1201,33 @@ class gallery(TestUpdate):
                      nfo:fileLastModified ?modified .}\
                      ORDER BY ?modified LIMIT 10000"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query, timeout=25)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all images and videos without OPTIONALS  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query, timeout=25)
 
-               value, l=self.parsing(gallery.test_gallery_04, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_04.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all images and videos without OPTIONALS  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(gallery.test_gallery_04, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_04.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-        def test_gallery_05(self):
+    def test_gallery_05(self):
+        """
+        Time taken for querying 500 images and videos.
+        @param description: Time taken for querying 500 images and videos
+        @param expected_value:
+        @param querytype:
+        """
 
-               """
-               Time taken for querying 500 images and videos.  
-               @param description: Time taken for querying 500 images and videos
-               @param expected_value: 
-               @param querytype: 
-               """
+        """ Querying for 500 Images and Videos """
 
-               """ Querying for 500 Images and Videos """
-
-               query = "SELECT ?url \
+        query = "SELECT ?url \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -1251,35 +1236,33 @@ class gallery(TestUpdate):
                      OPTIONAL    {?media nfo:width ?_width. } \
                      OPTIONAL   { ?media nfo:height ?_height .} } \
                      ORDER BY ?modified LIMIT 500"
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query, timeout=25)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 500 images and videos  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               value, l=self.parsing(gallery.test_gallery_05, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_05.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            result = self.resources.SparqlQuery(query, timeout=25)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 500 images and videos  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(gallery.test_gallery_05, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_05.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-        def test_gallery_06(self):
+    def test_gallery_06(self):
+        """
+        Time taken for querying 100 images and videos without OPTIONALS.
+        @param description: Time taken for querying 100 images and videos without OPTIONALS
+        @param expected_value:
+        @param querytype:
+        """
 
-               """
-               Time taken for querying 100 images and videos without OPTIONALS.
-               @param description: Time taken for querying 100 images and videos without OPTIONALS
-               @param expected_value: 
-               @param querytype: 
-               """
+        """ Querying for 500 Images and Videos without OPTIONALS"""
 
-               """ Querying for 500 Images and Videos without OPTIONALS"""
-
-               query = "SELECT ?url \
+        query = "SELECT ?url \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -1287,99 +1270,94 @@ class gallery(TestUpdate):
                      nfo:fileLastModified ?modified .} \
                      ORDER BY ?modified LIMIT 500"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query, timeout=25)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 100 images and videos without OPTIONALS  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               value, l=self.parsing(gallery.test_gallery_06, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_06.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            result = self.resources.SparqlQuery(query, timeout=25)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 100 images and videos without OPTIONALS  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(gallery.test_gallery_06, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_06.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-        def test_gallery_07(self):
+    def test_gallery_07(self):
+        """
+        Time taken for querying 500 images and videos with a tag.
+        @param description: Time taken for querying 500 images and videos with a tag
+        @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results in 2 
second.
+        @param querytype:
+        """
 
-               """
-               Time taken for querying 500 images and videos with a tag.       
-               @param description: Time taken for querying 500 images and videos with a tag
-               @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results 
in 2 second.
-               @param querytype: 
-               """
-
-               """ Querying for 500 images, videos which have tag TEST """
-               query  = "SELECT ?media \
+        """ Querying for 500 images, videos which have tag TEST """
+        query  = "SELECT ?media \
                         WHERE { \
                        ?media a nfo:Visual; \
                         nao:hasTag ?tag . \
                        ?tag nao:prefLabel 'TEST' } LIMIT 500"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 500 images and videos with a tag  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               value, l=self.parsing(gallery.test_gallery_07, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_07.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            result = self.resources.SparqlQuery(query)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 500 images and videos with a tag  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(gallery.test_gallery_07, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_07.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-        def test_gallery_08(self):
+    def test_gallery_08(self):
+        """
+        Time taken for querying 500 images and videos taken with phone's camera.
+        @param description: Time taken for querying 500 images and videos taken with phone's camera
+        @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results in 1.0 
second.
+        @param querytype:
+        """
 
-               """
-               Time taken for querying 500 images and videos taken with phone's camera.        
-               @param description: Time taken for querying 500 images and videos taken with phone's camera
-               @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results 
in 1.0 second.
-               @param querytype: 
-               """
+        """Querying 500 images and videos taken with phone's camera """
 
-               """Querying 500 images and videos taken with phone's camera """
-
-               query = "SELECT ?media WHERE { \
+        query = "SELECT ?media WHERE { \
                         ?media a nfo:Visual; \
                         nfo:equipment [ a nfo:Equipment; nfo:make 'NOKIA' ] } LIMIT 500"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 500 images and videos taken with phone's camera  = %s 
" %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               value, l=self.parsing(gallery.test_gallery_08, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_08.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            result = self.resources.SparqlQuery(query)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 500 images and videos taken with phone's camera  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_gallery_09(self):
+        value, l = self.parsing(gallery.test_gallery_08, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_08.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-               """
-               Time taken for querying all images.     
-               @param description: Time taken for querying all images
-               @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results 
in 1.0 second.
-               @param querytype: 
-               """
+    def test_gallery_09(self):
+        """
+        Time taken for querying all images.
+        @param description: Time taken for querying all images
+        @param expected_value: Use streaming mode, get first 500 results in 0.1 second, all results in 1.0 
second.
+        @param querytype:
+        """
 
-               """Querying all images """
+        """Querying all images """
 
-               query = " SELECT ?url ?mime ?camera ?exposuretime ?fnumber ?focallength \
+        query = " SELECT ?url ?mime ?camera ?exposuretime ?fnumber ?focallength \
                         WHERE {\
                        ?image a nmm:Photo; \
                         nie:url ?url; \
@@ -1391,37 +1369,33 @@ class gallery(TestUpdate):
                        OPTIONAL { ?image nmm:fnumber ?fnumber .}\
                        OPTIONAL { ?image nmm:focalLength ?focallength .}} LIMIT 10000"
 
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+            result = self.resources.SparqlQuery(query)
 
-                       result=self.resources.SparqlQuery(query)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all images = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all images = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        value, l = self.parsing(gallery.test_gallery_09, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_09.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-               value, l=self.parsing(gallery.test_gallery_09, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_09.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+    def test_gallery_10(self):
+        """
+        Time taken for querying 500 images.
+        @param description: Time taken for querying 500 images
+        @param expected_value:
+        @param querytype:
+        """
 
+        """Querying 500 images """
 
-
-
-        def test_gallery_10(self):
-
-               """
-               Time taken for querying 500 images.     
-               @param description: Time taken for querying 500 images
-               @param expected_value: 
-               @param querytype: 
-               """
-
-               """Querying 500 images """
-
-               query = " SELECT ?url ?mime ?camera ?exposuretime ?fnumber ?focallength \
+        query = " SELECT ?url ?mime ?camera ?exposuretime ?fnumber ?focallength \
                         WHERE {\
                        ?image a nmm:Photo; \
                         nie:url ?url; \
@@ -1433,36 +1407,33 @@ class gallery(TestUpdate):
                        OPTIONAL { ?image nmm:fnumber ?fnumber .}\
                        OPTIONAL { ?image nmm:focalLength ?focallength .}} LIMIT 500"
 
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 500 images = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(gallery.test_gallery_10, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_10.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 500 images = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(gallery.test_gallery_10, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_10.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
+    def test_gallery_11(self):
+        """
+        Time taken for querying 500 images and videos.
+        @param description: Time taken for querying 500 images and videos
+        @param expected_value:
+        @param querytype:
+        """
 
-        def test_gallery_11(self):
+        """ Querying for 500 Images and Videos with UNION for them """
 
-               """
-               Time taken for querying 500 images and videos.  
-               @param description: Time taken for querying 500 images and videos
-               @param expected_value: 
-               @param querytype: 
-               """
-
-               """ Querying for 500 Images and Videos with UNION for them """
-
-               query = "SELECT ?url \
+        query = "SELECT ?url \
                     WHERE { \
                      {?media a nmm:Photo.} UNION {?media a nmm:Video.} \
                      ?media nie:url ?url.\
@@ -1471,338 +1442,327 @@ class gallery(TestUpdate):
                      OPTIONAL    {?media nfo:width ?_width. } \
                      OPTIONAL   { ?media nfo:height ?_height .} } \
                      ORDER BY ?modified LIMIT 500"
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query,timeout=1000)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 500 images and videos  = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(gallery.test_gallery_11, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_11.__name__, value[0], value[1], list, 
value[2], len(result), l]))
-
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-        def test_gallery_12(self):
+            result = self.resources.SparqlQuery(query, timeout=1000)
 
-               """
-               Time taken for querying all images.     
-               @param description: TTime taken for querying all images
-               @param expected_value: 
-               @param querytype: 
-               """
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 500 images and videos  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               """Querying all images """
-               """simplified version of test_gallery_09 """
+        value, l = self.parsing(gallery.test_gallery_11, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_11.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-               query = "SELECT nie:url(?image) nie:mimeType(?image) nfo:model (nfo:equipment (?image)) 
nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE { ?image a nmm:Photo . } limit 
10000"
+    def test_gallery_12(self):
+        """
+        Time taken for querying all images.
+        @param description: TTime taken for querying all images
+        @param expected_value:
+        @param querytype:
+        """
 
+        """Querying all images """
+        """simplified version of test_gallery_09 """
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        query = "SELECT nie:url(?image) nie:mimeType(?image) nfo:model (nfo:equipment (?image)) 
nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE { ?image a nmm:Photo . } limit 
10000"
 
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying all images = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(gallery.test_gallery_12, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_12.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying all images = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(gallery.test_gallery_12, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_12.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-        def test_gallery_13(self):
+    def test_gallery_13(self):
+        """
+        Time taken for querying 500 images.
+        @param description: Time taken for querying 500 images
+        @param expected_value:
+        @param querytype:
+        """
 
-               """
-               Time taken for querying 500 images.     
-               @param description: Time taken for querying 500 images
-               @param expected_value: 
-               @param querytype: 
-               """
+        """Querying 500 images """
+        """simplified version of test_gallery_10 """
 
-               """Querying 500 images """
-               """simplified version of test_gallery_10 """
+        query = "SELECT nie:url(?image) nie:mimeType(?image) nfo:model (nfo:equipment (?image)) 
nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE { ?image a nmm:Photo . } limit 500"
 
-               query = "SELECT nie:url(?image) nie:mimeType(?image) nfo:model (nfo:equipment (?image)) 
nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE { ?image a nmm:Photo . } limit 500"
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
+            result = self.resources.SparqlQuery(query)
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for querying 500 images = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-                       result=self.resources.SparqlQuery(query)
+        value, l = self.parsing(gallery.test_gallery_13, query)
+        t.rows.append(
+            hd.TableRow([gallery.test_gallery_13.__name__, value[0], value[1], list, value[2], len(result), 
l]))
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken for querying 500 images = %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
 
-               value, l=self.parsing(gallery.test_gallery_13, query)
-               t.rows.append(hd.TableRow([gallery.test_gallery_13.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+class ftsmatch (TestUpdate):
 
+    def test_fts_01(self):
+        """
+        Time taken for searching an artist in 10000 music files.
+        @param description: Time taken for searching an artist in 10000 music files
+        @param expected_value:
+            @param querytype:
+        """
 
-
-class ftsmatch (TestUpdate) :
-
-        def test_fts_01 (self):
-
-            """
-           Time taken for searching an artist in 10000 music files.    
-           @param description: Time taken for searching an artist in 10000 music files
-           @param expected_value: 
-               @param querytype: 
-           """
-
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'ArtistName' }"
-           list=[]
-           for i in range ( 0, iterations ):
-                   start=time.time()
-
-                   result=self.resources.SparqlQuery(query)
-
-                   elapse =time.time()-start
-                   elapse = "%.3f" %elapse 
-                   print "Time taken for searching an artist in 10000 music files  %s" %elapse
-                   print "no. of items retrieved: %d" %len(result)
-                   list.append(elapse)
-
-            value, l=self.parsing(ftsmatch.test_fts_01, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_01.__name__, value[0], value[1], list, value[2], 
len(result), l]))
-
-
-        def test_fts_02 (self) :
-
-            """
-            Time taken for searching a word.   
-           @param description: Time taken for searching a word 
-           @param expected_value: 
-               @param querytype: 
-           """
-
-            """ Searching for a word """
-            query = " SELECT ?uri WHERE { \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching an artist in 10000 music files  %s" % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(ftsmatch.test_fts_01, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_01.__name__, value[0], value[1], list, value[2], len(result), l]))
+
+    def test_fts_02(self):
+        """
+        Time taken for searching a word.
+        @param description: Time taken for searching a word
+        @param expected_value:
+            @param querytype:
+        """
+
+        """ Searching for a word """
+        query = " SELECT ?uri WHERE { \
                      ?uri a nie:InformationElement ; \
                     fts:match 'WordInPlainText' . } "
 
-           list=[]
-           for i in range ( 0, iterations ):
-                   start=time.time()
-
-                   result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                   elapse =time.time()-start
-                    elapse = "%.3f" %elapse 
-                   print "Time taken for searching a word  = %s " %elapse
-                   print "no. of items retrieved: %d" %len(result)
-                   list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-            value, l=self.parsing(ftsmatch.test_fts_02, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_02.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching a word  = %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_fts_03 (self):
+        value, l = self.parsing(ftsmatch.test_fts_02, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_02.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-            """
-            Time taken for searching an artist in 10000 music files.   
-           @param description: Time taken for searching an artist in 10000 music files
-           @param expected_value: 
-               @param querytype: 
-           """
+    def test_fts_03(self):
+        """
+        Time taken for searching an artist in 10000 music files.
+        @param description: Time taken for searching an artist in 10000 music files
+        @param expected_value:
+            @param querytype:
+        """
 
-            """Making a search for artist"""
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'ArtistNa*'}"
-           list=[]
-           for i in range ( 0, iterations ):
-                   start=time.time()
-
-                   result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                   elapse =time.time()-start
-                   elapse = "%.3f" %elapse 
-                   print "Time taken for searching an artist in 10000 music files %s " %elapse
-                   print "no. of items retrieved: %d" %len(result)
-                   list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-            value, l=self.parsing(ftsmatch.test_fts_03, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_03.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching an artist in 10000 music files %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_fts_04 (self):
+        value, l = self.parsing(ftsmatch.test_fts_03, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_03.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-            """
-           Time taken for searching an artist in 10000 music files.    
-           @param description: Time taken for searching an artist in 10000 music files
-           @param expected_value: 
-               @param querytype: 
-           """
+    def test_fts_04(self):
+        """
+        Time taken for searching an artist in 10000 music files.
+        @param description: Time taken for searching an artist in 10000 music files
+        @param expected_value:
+            @param querytype:
+        """
 
-            """Making a search for artist"""
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'Art*' }"
-           list=[]
-           for i in range ( 0, iterations ):
-                   start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                   result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                   elapse =time.time()-start
-                   elapse = "%.3f" %elapse 
-                   print "Time taken for searching an artist in 10000 music files  %s" %elapse
-                   print "no. of items retrieved: %d" %len(result)
-                   list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching an artist in 10000 music files  %s" % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-            value, l=self.parsing(ftsmatch.test_fts_04, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_04.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+        value, l = self.parsing(ftsmatch.test_fts_04, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_04.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-       def test_fts_05 (self):
+    def test_fts_05(self):
+        """
+        Time taken for searching an artist in 10000 music files.
+        @param description: Time taken for searching an artist in 10000 music files
+        @param expected_value:
+            @param querytype:
+        """
 
-            """
-            Time taken for searching an artist in 10000 music files.   
-           @param description: Time taken for searching an artist in 10000 music files
-           @param expected_value: 
-               @param querytype: 
-           """
+        """Making a search for artist"""
 
-            """Making a search for artist"""
-
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'Ar*'}"
-           list=[]
-           for i in range ( 0, iterations ):
-                   start=time.time()
-
-                   result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                   elapse =time.time()-start
-                   elapse = "%.3f" %elapse 
-                   print "Time taken for searching an artist in 10000 music files  %s" %elapse
-                   print "no. of items retrieved: %d" %len(result)
-                   list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-            value, l=self.parsing(ftsmatch.test_fts_05, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_05.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching an artist in 10000 music files  %s" % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(ftsmatch.test_fts_05, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_05.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-        def test_fts_06 (self):
+    def test_fts_06(self):
+        """
+        Time taken for searching an artist in 10000 music files.
+        @param description: Time taken for searching an artist in 10000 music files
+        @param expected_value:
+            @param querytype:
+        """
 
-            """
-            Time taken for searching an artist in 10000 music files.   
-           @param description: Time taken for searching an artist in 10000 music files
-           @param expected_value: 
-               @param querytype: 
-           """
+        """Making a search for artist"""
 
-            """Making a search for artist"""
-
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'A*' }"
-           list=[]
-            for i in range ( 0, iterations ):
-                   start=time.time()
-
-                   result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                   elapse =time.time()-start
-                   elapse = "%.3f" %elapse 
-                   print "Time taken for searching an artist in 10000 music files  %s" %elapse
-                   print "no.of items retrieved: %d" %len(result)
-                   list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-            value, l=self.parsing(ftsmatch.test_fts_06, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_06.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching an artist in 10000 music files  %s" % elapse
+            print "no.of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_fts_07 (self):
+        value, l = self.parsing(ftsmatch.test_fts_06, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_06.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-            """
-            Time taken for searching an artist in 10000 music files.
-           @param description: Time taken for searching an artist in 10000 music files
-           @param expected_value: 
-               @param querytype: 
-           """
+    def test_fts_07(self):
+        """
+        Time taken for searching an artist in 10000 music files.
+        @param description: Time taken for searching an artist in 10000 music files
+        @param expected_value:
+            @param querytype:
+        """
 
+        """Making a search for artist"""
 
-            """Making a search for artist"""
-
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'A* p*' }"
-           list=[]
-            for i in range ( 0, iterations ):
-                   start=time.time()
-
-                   result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                   elapse =time.time()-start
-                   elapse = "%.3f" %elapse 
-                   print "Time taken for searching an artist in 10000 music files  %s" %elapse
-                   print "no. of items retrieved: %d" %len(result)
-                   list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-            value, l=self.parsing(ftsmatch.test_fts_07, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_07.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching an artist in 10000 music files  %s" % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_fts_08 (self):
+        value, l = self.parsing(ftsmatch.test_fts_07, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_07.__name__, value[0], value[1], list, value[2], len(result), l]))
 
-            """
-            Time taken for searching an artist in 10000 music files.   
-           @param description: Time taken for searching an artist in 10000 music files
-           @param expected_value: 
-               @param querytype: 
-           """
+    def test_fts_08(self):
+        """
+        Time taken for searching an artist in 10000 music files.
+        @param description: Time taken for searching an artist in 10000 music files
+        @param expected_value:
+            @param querytype:
+        """
 
-            """Making a search for artist"""
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'A* p* k*' }"
-           list=[]
-            for i in range ( 0, iterations ):
-                   start=time.time()
-
-                   result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                   elapse =time.time()-start
-                   elapse = "%.3f" %elapse 
-                   print "Time taken for searching an artist in 10000 music files %s " %elapse
-                   print "no. of items retrieved: %d" %len(result)
-                   list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-            value, l=self.parsing(ftsmatch.test_fts_08, query)
-           t.rows.append(hd.TableRow([ftsmatch.test_fts_08.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken for searching an artist in 10000 music files %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(ftsmatch.test_fts_08, query)
+        t.rows.append(
+            hd.TableRow([ftsmatch.test_fts_08.__name__, value[0], value[1], list, value[2], len(result), l]))
 
 
+class content_manager (TestUpdate):
 
-class content_manager (TestUpdate) :
+    def test_cm_01(self):
+        """
+        Time taken to get 100 contacts that match fts and get relevant UI info for them.
+        @param description: Time taken to get 100 contacts that match fts and get relevant UI info for them
+        @param expected_value:
+        @param querytype:
+        """
 
-        def test_cm_01 (self):
+        """Get all the contacts that match fts and get relevant UI info for them"""
 
-               """
-               Time taken to get 100 contacts that match fts and get relevant UI info for them.        
-               @param description: Time taken to get 100 contacts that match fts and get relevant UI info 
for them
-               @param expected_value: 
-               @param querytype: 
-               """
-
-
-               """Get all the contacts that match fts and get relevant UI info for them"""
-
-               query = "SELECT ?url ?photourl ?imstatus tracker:coalesce(?family, ?given, ?orgname, ?nick, 
?email, ?phone, ?blog) \
+        query = "SELECT ?url ?photourl ?imstatus tracker:coalesce(?family, ?given, ?orgname, ?nick, ?email, 
?phone, ?blog) \
                        WHERE { { ?url a nco:PersonContact.?url fts:match 'fami*'. } \
                        UNION { ?url a nco:PersonContact. ?url nco:hasEmailAddress ?add.?add fts:match 
'fami*'. } \
                        UNION { ?url a nco:PersonContact. ?url nco:hasPostalAddress ?post.?post fts:match 
'fami*'. } \
@@ -1819,107 +1779,99 @@ class content_manager (TestUpdate) :
                        ORDER BY ?relevance \
                        LIMIT 100"
 
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               list=[]
-                for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 100 contacts that match fts and get relevant UI info for 
them %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(content_manager.test_cm_01, query)
-               t.rows.append(hd.TableRow([content_manager.test_cm_01.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            result = self.resources.SparqlQuery(query)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % 
elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_cm_02 (self):
+        value, l = self.parsing(content_manager.test_cm_01, query)
+        t.rows.append(
+            hd.TableRow([content_manager.test_cm_01.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 100 contacts that match fts and get relevant UI info for them.        
-               @param description: Time taken to get 100 contacts that match fts and get relevant UI info 
for them
-               @param expected_value: 6.13
-               @param querytype: 
-               """
+    def test_cm_02(self):
+        """
+        Time taken to get 100 contacts that match fts and get relevant UI info for them.
+        @param description: Time taken to get 100 contacts that match fts and get relevant UI info for them
+        @param expected_value: 6.13
+        @param querytype:
+        """
 
+        """Get all the contacts that match fts and get relevant UI info for them"""
 
-               """Get all the contacts that match fts and get relevant UI info for them"""
-
-               query = "SELECT ?url tracker:coalesce(nco:nameFamily(?url), nco:nameGiven(?url), 'unknown') \
+        query = "SELECT ?url tracker:coalesce(nco:nameFamily(?url), nco:nameGiven(?url), 'unknown') \
                        WHERE { \
                        { ?url a nco:PersonContact.?url fts:match 'fami*'. } \
                        UNION { ?url a nco:PersonContact. ?url nco:hasEmailAddress ?add.?add fts:match 
'fami*'. } \
                        UNION { ?url a nco:PersonContact. ?url nco:hasPostalAddress ?post.?post fts:match 
'fami*'. } \
                        } LIMIT 100"
 
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               list=[]
-                for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 100 contacts that match fts and get relevant UI info for 
them %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(content_manager.test_cm_02, query)
-               t.rows.append(hd.TableRow([content_manager.test_cm_02.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % 
elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(content_manager.test_cm_02, query)
+        t.rows.append(
+            hd.TableRow([content_manager.test_cm_02.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-        def test_cm_03 (self):
+    def test_cm_03(self):
+        """
+        Time taken to get 100 contacts that match fts and get relevant UI info for them.
+        @param description: Time taken to get 100 contacts that match fts and get relevant UI info for them
+        @param expected_value:
+        @param querytype:
+        """
 
-               """
-               Time taken to get 100 contacts that match fts and get relevant UI info for them.        
-               @param description: Time taken to get 100 contacts that match fts and get relevant UI info 
for them 
-               @param expected_value: 
-               @param querytype: 
-               """
+        """Get all the messages """
 
-
-               """Get all the messages """
-
-               query = "SELECT ?url nie:title(?url) \
+        query = "SELECT ?url nie:title(?url) \
                        WHERE { \
                        { ?url a nmo:Message. ?url fts:match 'fami*'. } \
                        UNION { ?url a nmo:Message. ?url nmo:from ?from . ?from fts:match 'fami*'. } \
                        UNION { ?url a nmo:Message. ?url nmo:recipient ?to . ?to fts:match 'fami*'. } \
                        } LIMIT 100"
 
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               list=[]
-                for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 100 contacts that match fts and get relevant UI info for 
them %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % 
elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(content_manager.test_cm_03, query)
-               t.rows.append(hd.TableRow([content_manager.test_cm_03.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(content_manager.test_cm_03, query)
+        t.rows.append(
+            hd.TableRow([content_manager.test_cm_03.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
+    def test_cm_04(self):
+        """
+        Time taken to get 100 contacts that match fts and get relevant UI info for them.
+        @param description: Time taken to get 100 contacts that match fts and get relevant UI info for them
+        @param expected_value: 8.77
+        @param querytype:
+        """
 
-        def test_cm_04 (self):
+        """Get all the messages """
 
-               """
-               Time taken to get 100 contacts that match fts and get relevant UI info for them.        
-               @param description: Time taken to get 100 contacts that match fts and get relevant UI info 
for them
-               @param expected_value: 8.77
-               @param querytype: 
-               """
-
-               """Get all the messages """
-
-               query = "SELECT ?url ?fileLastModified ?relevance ?fileName ?mimeType ?url2 \
+        query = "SELECT ?url ?fileLastModified ?relevance ?fileName ?mimeType ?url2 \
                        WHERE { \
                        ?url a nfo:Image .\
                        ?url nfo:fileLastModified ?fileLastModified. \
@@ -1929,35 +1881,34 @@ class content_manager (TestUpdate) :
                        OPTIONAL { ?url maemo:relevance ?relevance. } \
                        } ORDER BY ?_fileName"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 100 contacts that match fts and get relevant UI info for 
them %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            start = time.time()
 
-               value, l=self.parsing(content_manager.test_cm_04, query)
-               t.rows.append(hd.TableRow([content_manager.test_cm_04.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            result = self.resources.SparqlQuery(query)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % 
elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-        def test_cm_05 (self):
+        value, l = self.parsing(content_manager.test_cm_04, query)
+        t.rows.append(
+            hd.TableRow([content_manager.test_cm_04.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 100 content items that match fts without UI info for them.    
-               @param description: Time taken to get 100 content items that match fts without UI info for 
them
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_cm_05(self):
+        """
+        Time taken to get 100 content items that match fts without UI info for them.
+        @param description: Time taken to get 100 content items that match fts without UI info for them
+        @param expected_value:
+        @param querytype:
+        """
 
-               """Get all the matching data """
+        """Get all the matching data """
 
-               query = "SELECT ?glob_url \
+        query = "SELECT ?glob_url \
                        WHERE \
                        { \
                          { SELECT ?url as ?glob_url \
@@ -2006,34 +1957,33 @@ class content_manager (TestUpdate) :
                          } \
                        LIMIT 100"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 100 content items that match fts without UI info for them %s 
" %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 100 content items that match fts without UI info for them %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(content_manager.test_cm_05, query)
-               t.rows.append(hd.TableRow([content_manager.test_cm_05.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(content_manager.test_cm_05, query)
+        t.rows.append(
+            hd.TableRow([content_manager.test_cm_05.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
+    def test_cm_06(self):
+        """
+        Time taken to get 100 content items that match fts and get relevant UI info for them.
+        @param description: Time taken to get 100 content items that match fts and get relevant UI info for 
them
+        @param expected_value:
+        @param querytype:
+        """
 
-       def test_cm_06 (self):
+        """Get all the matching data """
 
-               """
-               Time taken to get 100 content items that match fts and get relevant UI info for them.
-               @param description: Time taken to get 100 content items that match fts and get relevant UI 
info for them
-               @param expected_value: 
-               @param querytype: 
-               """
-
-               """Get all the matching data """
-
-               query = "SELECT ?glob_url ?first ?second \
+        query = "SELECT ?glob_url ?first ?second \
                        WHERE \
                        { \
                          { SELECT ?url as ?glob_url \
@@ -2109,34 +2059,34 @@ class content_manager (TestUpdate) :
                          } \
                        LIMIT 100"
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 100 content items that match fts and get relevant UI info 
for them %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(content_manager.test_cm_06, query)
-               t.rows.append(hd.TableRow([content_manager.test_cm_06.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 100 content items that match fts and get relevant UI info for them %s " 
% elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(content_manager.test_cm_06, query)
+        t.rows.append(
+            hd.TableRow([content_manager.test_cm_06.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-class contacts (TestUpdate) :
 
-        def test_contacts_01 (self):
+class contacts (TestUpdate):
 
-               """
-               Time taken to get 50 contacts basic information (original).
-               @param description: Time taken to get 50 contacts basic information (original)(Bug : 176170) 
-               @param expected_value: 
-               @param querytype: Original
-               """
+    def test_contacts_01(self):
+        """
+        Time taken to get 50 contacts basic information (original).
+        @param description: Time taken to get 50 contacts basic information (original)(Bug : 176170)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-               query = " \
+        query = " \
                        SELECT \
                          ?_contact \
                          ?_Avatar_ImageUrl \
@@ -2176,32 +2126,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts basic information (original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts basic information (original) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(contacts.test_contacts_01, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_01.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(contacts.test_contacts_01, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_01.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
+    def test_contacts_02(self):
+        """
+        Time taken to get 50 contacts basic information (modified)
+        @param description: Time taken to get 50 contacts basic information (modified)
+        @param expected_value:
+        @param querytype: Modified
+        """
 
-       def test_contacts_02 (self):
-
-               """
-               Time taken to get 50 contacts basic information (modified)
-               @param description: Time taken to get 50 contacts basic information (modified) 
-               @param expected_value: 
-               @param querytype: Modified
-               """
-
-               query = " \
+        query = " \
                        SELECT \
                          ?_contact \
                          ?_Avatar_ImageUrl \
@@ -2230,32 +2179,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts basic information (modified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-               value, l=self.parsing(contacts.test_contacts_02, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_02.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            result = self.resources.SparqlQuery(query)
 
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts basic information (modified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_contacts_03 (self):
+        value, l = self.parsing(contacts.test_contacts_02, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_02.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 50 contacts address information (original)
-               @param description: Time taken to get 50 contacts address information (original) 
-               @param expected_value: 
-               @param querytype: Original
-               """
+    def test_contacts_03(self):
+        """
+        Time taken to get 50 contacts address information (original)
+        @param description: Time taken to get 50 contacts address information (original)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-               query = " \
+        query = " \
                        SELECT  \
                          ?_contact \
                          ?_Address_Country \
@@ -2330,32 +2278,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts address information (original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts address information (original) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(contacts.test_contacts_03, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_03.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(contacts.test_contacts_03, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_03.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
+    def test_contacts_04(self):
+        """
+        Time taken to get 50 contacts address information (modified)
+        @param description: Time taken to get 50 contacts address information (modified)
+        @param expected_value:
+        @param querytype: Modified
+        """
 
-       def test_contacts_04 (self):
-
-               """
-               Time taken to get 50 contacts address information (modified)
-               @param description: Time taken to get 50 contacts address information (modified)
-               @param expected_value: 
-               @param querytype: Modified
-               """
-
-               query = " \
+        query = " \
                        SELECT \
                          ?contact \
                          nco:country(?postal) \
@@ -2377,32 +2324,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts address information (modified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(contacts.test_contacts_04, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_04.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts address information (modified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_contacts_05 (self):
+        value, l = self.parsing(contacts.test_contacts_04, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_04.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 50 contacts email information (original)
-               @param description: Time taken to get 50 contacts email information (original)
-               @param expected_value: 
-               @param querytype: Original
-               """
+    def test_contacts_05(self):
+        """
+        Time taken to get 50 contacts email information (original)
+        @param description: Time taken to get 50 contacts email information (original)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-
-               query = " \
+        query = " \
                        SELECT \
                          ?_contact ?_EmailAddress ?_EmailAddress_EmailAddress \
                          bound(?_EmailAddress_Context_Work) AS ?_EmailAddress_Context_Work_IsBound \
@@ -2425,31 +2371,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts email information (original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(contacts.test_contacts_05, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_05.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts email information (original) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_contacts_06 (self):
+        value, l = self.parsing(contacts.test_contacts_05, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_05.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 50 contacts email information (modified).
-               @param description: Time taken to get 50 contacts email information (modified)
-               @param expected_value: 
-               @param querytype: Modified
-               """
+    def test_contacts_06(self):
+        """
+        Time taken to get 50 contacts email information (modified).
+        @param description: Time taken to get 50 contacts email information (modified)
+        @param expected_value:
+        @param querytype: Modified
+        """
 
-               query = " \
+        query = " \
                        SELECT \
                          ?contact \
                          ?email \
@@ -2472,31 +2418,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts email information (modified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(contacts.test_contacts_06, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_06.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts email information (modified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_contacts_07 (self):
+        value, l = self.parsing(contacts.test_contacts_06, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_06.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 50 contacts online information (original)
-               @param description: Time taken to get 50 contacts online information (original)
-               @param expected_value: 
-               @param querytype: Original
-               """
+    def test_contacts_07(self):
+        """
+        Time taken to get 50 contacts online information (original)
+        @param description: Time taken to get 50 contacts online information (original)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-               query = " \
+        query = " \
                        SELECT  \
                          ?_contact \
                          ?_OnlineAccount \
@@ -2545,32 +2491,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts online information (original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts online information (original) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(contacts.test_contacts_07, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_07.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(contacts.test_contacts_07, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_07.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
+    def test_contacts_08(self):
+        """
+        Time taken to get 50 contacts online information (modified)
+        @param description: Time taken to get 50 contacts online information (modified)
+        @param expected_value:
+        @param querytype: Modified
+        """
 
-       def test_contacts_08 (self):
-
-               """
-               Time taken to get 50 contacts online information (modified)
-               @param description: Time taken to get 50 contacts online information (modified)
-               @param expected_value: 
-               @param querytype: Modified
-               """
-
-               query = " \
+        query = " \
                        SELECT  \
                          ?_contact \
                          ?_OnlineAccount \
@@ -2596,31 +2541,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts online information (modified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(contacts.test_contacts_08, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_08.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts online information (modified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_contacts_09 (self):
+        value, l = self.parsing(contacts.test_contacts_08, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_08.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 50 contacts phone number information (original)
-               @param description: Time taken to get 50 contacts phone number information (original)
-               @param expected_value: 
-               @param querytype: Original
-               """
+    def test_contacts_09(self):
+        """
+        Time taken to get 50 contacts phone number information (original)
+        @param description: Time taken to get 50 contacts phone number information (original)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-               query = " \
+        query = " \
                        SELECT \
                          ?_contact ?_PhoneNumber ?_PhoneNumber_PhoneNumber \
                          bound(?_PhoneNumber_SubTypes_BulletinBoardSystem) AS 
?_PhoneNumber_SubTypes_BulletinBoardSystem_IsBound \
@@ -2742,31 +2687,31 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts phone number information (original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts phone number information (original) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(contacts.test_contacts_09, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_09.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(contacts.test_contacts_09, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_09.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-       def test_contacts_10 (self):
+    def test_contacts_10(self):
+        """
+        Time taken to get 50 contacts phone number information (modified)
+        @param description: Time taken to get 50 contacts phone number information (modified)
+        @param expected_value:
+        @param querytype: Modified
+        """
 
-               """
-               Time taken to get 50 contacts phone number information (modified)
-               @param description: Time taken to get 50 contacts phone number information (modified)
-               @param expected_value: 
-               @param querytype: Modified
-               """
-
-               query = " \
+        query = " \
                        SELECT \
                          ?contact \
                          ?phoneNumber \
@@ -2787,60 +2732,61 @@ class contacts (TestUpdate) :
                        ORDER BY ?_contact LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts phone number information (modified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(contacts.test_contacts_10, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_10.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts phone number information (modified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-       def test_contacts_11 (self):
+        value, l = self.parsing(contacts.test_contacts_10, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_10.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-               """
-               Time taken to get 50 contacts phone number information (modified)
-               @param description: Time taken to get 50 contacts phone number information (modified)
-               @param expected_value: 
-               @param querytype: 
-               """
+    def test_contacts_11(self):
+        """
+        Time taken to get 50 contacts phone number information (modified)
+        @param description: Time taken to get 50 contacts phone number information (modified)
+        @param expected_value:
+        @param querytype:
+        """
 
-               query = " \
+        query = " \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 contacts phone number information (modified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 contacts phone number information (modified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(contacts.test_contacts_10, query)
-               t.rows.append(hd.TableRow([contacts.test_contacts_10.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(contacts.test_contacts_10, query)
+        t.rows.append(
+            hd.TableRow([contacts.test_contacts_10.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-class location (TestUpdate) :
 
-        def test_location_01 (self):
+class location (TestUpdate):
 
-               """
-               Time taken to get 50 landmarks (original).
-               @param description: Time taken to get 50 landmarks (original)
-               @param expected_value: 
-               @param querytype: Original
-               """
+    def test_location_01(self):
+        """
+        Time taken to get 50 landmarks (original).
+        @param description: Time taken to get 50 landmarks (original)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-               query = " \
+        query = " \
                        SELECT \
                          ?urn \
                          ?cLat ?cLon ?cAlt ?cRad \
@@ -2933,31 +2879,31 @@ class location (TestUpdate) :
                        } ORDER BY ASC(?name) LIMIT 50 \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 landmarks (original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 landmarks (original) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(location.test_location_01, query)
-               t.rows.append(hd.TableRow([location.test_location_01.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(location.test_location_01, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_01.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-        def test_location_02 (self):
+    def test_location_02(self):
+        """
+        Time taken to get 50 landmarks within coords (original)
+        @param description: Time taken to get 50 landmarks within coords (original)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-               """
-               Time taken to get 50 landmarks within coords (original)
-               @param description: Time taken to get 50 landmarks within coords (original)
-               @param expected_value: 
-               @param querytype: Original
-               """
-
-               query = " \
+        query = " \
                        SELECT \
                          ?urn \
                          ?cLat ?cLon ?cAlt ?cRad \
@@ -3051,32 +2997,31 @@ class location (TestUpdate) :
                        } ORDER BY ASC(?name) LIMIT \
                        "
 
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 landmarks within coords (original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            result = self.resources.SparqlQuery(query)
 
-               value, l=self.parsing(location.test_location_02, query)
-               t.rows.append(hd.TableRow([location.test_location_02.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 landmarks within coords (original) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
+        value, l = self.parsing(location.test_location_02, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_02.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
-        def test_location_03 (self):
+    def test_location_03(self):
+        """
+        Time taken to get max 50 landmarks within certain range with bounding box (original).
+        @param description: Time taken to get max 50 landmarks within certain range with bounding box 
(original)
+        @param expected_value:
+        @param querytype: Original
+        """
 
-               """
-               Time taken to get max 50 landmarks within certain range with bounding box (original).
-               @param description: Time taken to get max 50 landmarks within certain range with bounding box 
(original)
-               @param expected_value: 
-               @param querytype: Original
-               """
-
-               query = " \
+        query = " \
                        SELECT \
                          ?urn \
                          ?cLat ?cLon ?cAlt ?cRad \
@@ -3172,32 +3117,31 @@ class location (TestUpdate) :
                                 
tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) <= 25000) 
\
                        } ORDER BY ASC(?distance) LIMIT 50 \
                        "
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get max 50 landmarks within certain range with bounding box 
(original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(location.test_location_03, query)
-               t.rows.append(hd.TableRow([location.test_location_03.__name__, value[0], value[1], list, 
value[2], len(result), l]))
-
-
-        def test_location_04 (self):
-
-               """
-               Time taken to get max 50 landmarks within certain range without bounding box (original).      
  
-               @param description: Time taken to get max 50 landmarks within certain range without bounding 
box (original)
-               @param expected_value: 
-               @param querytype: Original
-               """
-
-               query = " \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get max 50 landmarks within certain range with bounding box (original) %s " 
% elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(location.test_location_03, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_03.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+
+    def test_location_04(self):
+        """
+        Time taken to get max 50 landmarks within certain range without bounding box (original).
+        @param description: Time taken to get max 50 landmarks within certain range without bounding box 
(original)
+        @param expected_value:
+        @param querytype: Original
+        """
+
+        query = " \
                        SELECT \
                          ?urn \
                          ?cLat ?cLon ?cAlt ?cRad \
@@ -3291,31 +3235,31 @@ class location (TestUpdate) :
                          
FILTER(tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) <= 
25000) \
                        } ORDER BY ASC(?distance) LIMIT 50 \
                        "
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get max 50 landmarks within certain range without bounding box 
(original) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(location.test_location_04, query)
-               t.rows.append(hd.TableRow([location.test_location_04.__name__, value[0], value[1], list, 
value[2], len(result), l]))
-
-        def test_location_05 (self):
-
-               """
-               Time taken to get 50 landmarks (simplified).    
-               @param description: Time taken to get 50 landmarks (simplified)
-               @param expected_value: 6.42
-               @param querytype: 
-               """
-
-               query = " \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get max 50 landmarks within certain range without bounding box (original) 
%s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(location.test_location_04, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_04.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+
+    def test_location_05(self):
+        """
+        Time taken to get 50 landmarks (simplified).
+        @param description: Time taken to get 50 landmarks (simplified)
+        @param expected_value: 6.42
+        @param querytype:
+        """
+
+        query = " \
                        SELECT \
                          ?urn \
                          mlo:latitude(?point) mlo:longitude(?point) mlo:altitude(?point) mlo:radius(?point) \
@@ -3328,32 +3272,31 @@ class location (TestUpdate) :
                          ?location mlo:asGeoPoint ?point . \
                        } ORDER BY ASC(?name) LIMIT 50 \
                        "
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get 50 landmarks (simplified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(location.test_location_05, query)
-               t.rows.append(hd.TableRow([location.test_location_05.__name__, value[0], value[1], list, 
value[2], len(result), l]))
-
-
-        def test_location_06 (self):
-
-               """
-               Time taken to get max 50 landmarks within coords (simplified).  
-               @param description: Time taken to get max 50 landmarks within coords (simplified) 
-               @param expected_value: 0.43
-               @param querytype: 
-               """
-
-               query = " \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get 50 landmarks (simplified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(location.test_location_05, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_05.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+
+    def test_location_06(self):
+        """
+        Time taken to get max 50 landmarks within coords (simplified).
+        @param description: Time taken to get max 50 landmarks within coords (simplified)
+        @param expected_value: 0.43
+        @param querytype:
+        """
+
+        query = " \
                        SELECT \
                          ?urn \
                          ?cLat ?cLon mlo:altitude(?point) mlo:radius(?point) \
@@ -3369,31 +3312,31 @@ class location (TestUpdate) :
                          FILTER(?cLat >= 39.16 && ?cLat <= 40.17 && ?cLon >= 63.42 && ?cLon <= 64.96) \
                        } ORDER BY ASC(?name) LIMIT 50 \
                        "
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get max 50 landmarks within coords (simplified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(location.test_location_06, query)
-               t.rows.append(hd.TableRow([location.test_location_06.__name__, value[0], value[1], list, 
value[2], len(result), l]))
-
-        def test_location_07 (self):
-
-               """
-               Time taken to get max 50 landmarks within range with bounding box (simplified). 
-               @param description: Time taken to get max 50 landmarks within range with bounding box 
(simplified)
-               @param expected_value: 0.44
-               @param querytype: 
-               """
-
-               query = " \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get max 50 landmarks within coords (simplified) %s " % elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(location.test_location_06, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_06.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+
+    def test_location_07(self):
+        """
+        Time taken to get max 50 landmarks within range with bounding box (simplified).
+        @param description: Time taken to get max 50 landmarks within range with bounding box (simplified)
+        @param expected_value: 0.44
+        @param querytype:
+        """
+
+        query = " \
                        SELECT \
                          ?urn \
                          ?cLat ?cLon mlo:altitude(?point) mlo:radius(?point) \
@@ -3412,31 +3355,31 @@ class location (TestUpdate) :
                                 
tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) <= 25000) 
\
                        } ORDER BY ASC(?distance) LIMIT 50 \
                        "
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
-
-                       result=self.resources.SparqlQuery(query)
-
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get max 50 landmarks within range with bounding box (simplified) 
%s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
-
-               value, l=self.parsing(location.test_location_07, query)
-               t.rows.append(hd.TableRow([location.test_location_07.__name__, value[0], value[1], list, 
value[2], len(result), l]))
-
-        def test_location_08 (self):
-
-               """
-               Time taken to get max 50 landmarks within range without bounding box (simplified).      
-               @param description: Time taken to get max 50 landmarks within range without bounding box 
(simplified)
-               @param expected_value: 0.63
-               @param querytype: 
-               """
-
-               query = " \
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
+
+            result = self.resources.SparqlQuery(query)
+
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get max 50 landmarks within range with bounding box (simplified) %s " % 
elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
+
+        value, l = self.parsing(location.test_location_07, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_07.__name__, value[0], value[1], list, value[2], 
len(result), l]))
+
+    def test_location_08(self):
+        """
+        Time taken to get max 50 landmarks within range without bounding box (simplified).
+        @param description: Time taken to get max 50 landmarks within range without bounding box (simplified)
+        @param expected_value: 0.63
+        @param querytype:
+        """
+
+        query = " \
                        SELECT \
                          ?urn \
                          ?cLat ?cLon mlo:altitude(?point) mlo:radius(?point) \
@@ -3453,54 +3396,58 @@ class location (TestUpdate) :
                          
FILTER(tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) <= 
25000) \
                        } ORDER BY ASC(?distance) LIMIT 50 \
                        "
-               list=[]
-               for i in range ( 0, iterations ):
-                       start=time.time()
+        list = []
+        for i in range(0, iterations):
+            start = time.time()
 
-                       result=self.resources.SparqlQuery(query)
+            result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       elapse = "%.3f" %elapse 
-                       print "Time taken to get max 50 landmarks within range without bounding box 
(simplified) %s " %elapse
-                       print "no. of items retrieved: %d" %len(result)
-                       list.append(elapse)
+            elapse = time.time() - start
+            elapse = "%.3f" % elapse
+            print "Time taken to get max 50 landmarks within range without bounding box (simplified) %s " % 
elapse
+            print "no. of items retrieved: %d" % len(result)
+            list.append(elapse)
 
-               value, l=self.parsing(location.test_location_08, query)
-               t.rows.append(hd.TableRow([location.test_location_08.__name__, value[0], value[1], list, 
value[2], len(result), l]))
+        value, l = self.parsing(location.test_location_08, query)
+        t.rows.append(
+            hd.TableRow([location.test_location_08.__name__, value[0], value[1], list, value[2], 
len(result), l]))
 
 
 def header(title):
-       global t
-       t=hd.Table(width='100%', col_width=('12%', '20%', '5%', '30%', '10%', '10%', '20%'))
-       doc.nodes.append(hd.Heading(size='2', heading=title))
-       t.rows.append(hd.TableRow(['Test', 'Description', 'ExpectedValue (sec)', 'ActualValue (sec)', 'Query 
type', 'No of items retrived', 'Query'], header=True))
+    global t
+    t = hd.Table(width='100%', col_width=(
+        '12%', '20%', '5%', '30%', '10%', '10%', '20%'))
+    doc.nodes.append(hd.Heading(size='2', heading=title))
+    t.rows.append(
+        hd.TableRow(['Test', 'Description', 'ExpectedValue (sec)', 'ActualValue (sec)', 'Query type', 'No of 
items retrived', 'Query'], header=True))
 
 if __name__ == "__main__":
 
-       if not os.path.isdir("report"): 
-               os.mkdir("report")
-       global iterations
-       iterations=10
-        doc=hd.HtmlDoc()
-        doc.nodes.append(hd.Heading(size='1', heading="Tracker Performance Test"))
-       
-        dict={}
-        base={"email" : email, "calls": calls, "instant_messages": instant_messages, "rtcom":rtcom, 
"audio":audio, "gallery":gallery, "ftsmatch":ftsmatch, "content_manager":content_manager, 
"contacts":contacts, "location":location }
-        if len(sys.argv)==1:
-               dict=base
-        else:
-                for arg in range(1,len(sys.argv)):
-                        print sys.argv[arg]
-                        dict[sys.argv[arg]]=base[sys.argv[arg]]
-       
-        for k, v in dict.items():
-            header(k)
-            unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromTestCase(v))
-            doc.nodes.append(t)
-
-        report=open("./perf_report.html", "a")
-        doc.toHtml(report)
-        report.close()
-
-       #unittest.main()
-
+    if not os.path.isdir("report"):
+        os.mkdir("report")
+    global iterations
+    iterations = 10
+    doc = hd.HtmlDoc()
+    doc.nodes.append(hd.Heading(size='1', heading="Tracker Performance Test"))
+
+    dict = {}
+    base = {"email": email, "calls": calls, "instant_messages": instant_messages, "rtcom": rtcom, "audio": 
audio, "gallery":
+            gallery, "ftsmatch": ftsmatch, "content_manager": content_manager, "contacts": contacts, 
"location": location}
+    if len(sys.argv) == 1:
+        dict = base
+    else:
+        for arg in range(1, len(sys.argv)):
+            print sys.argv[arg]
+            dict[sys.argv[arg]] = base[sys.argv[arg]]
+
+    for k, v in dict.items():
+        header(k)
+        unittest.TextTestRunner().run(
+            unittest.TestLoader().loadTestsFromTestCase(v))
+        doc.nodes.append(t)
+
+    report = open("./perf_report.html", "a")
+    doc.toHtml(report)
+    report.close()
+
+    # unittest.main()
diff --git a/tests/functional-tests/performance-tc.py b/tests/functional-tests/performance-tc.py
index 6abe38a..bce2151 100644
--- a/tests/functional-tests/performance-tc.py
+++ b/tests/functional-tests/performance-tc.py
@@ -18,7 +18,11 @@
 # 02110-1301, USA.
 #
 
-import sys,os,dbus,commands, signal
+import sys
+import os
+import dbus
+import commands
+import signal
 import unittest
 import time
 import random
@@ -29,7 +33,6 @@ TRACKER_OBJ = '/org/freedesktop/Tracker1/Resources'
 RESOURCES_IFACE = "org.freedesktop.Tracker1.Resources"
 
 
-
 """import .ttl files """
 """
 def stats() :
@@ -66,65 +69,61 @@ def import_ttl (music_ttl):
 """
 
 
-
-
-
-
 class TestUpdate (unittest.TestCase):
 
-        def setUp(self):
-                bus = dbus.SessionBus()
-                tracker = bus.get_object(TRACKER, TRACKER_OBJ)
-                self.resources = dbus.Interface (tracker,
-                                                 dbus_interface=RESOURCES_IFACE)
+    def setUp(self):
+        bus = dbus.SessionBus()
+        tracker = bus.get_object(TRACKER, TRACKER_OBJ)
+        self.resources = dbus.Interface(tracker,
+                                        dbus_interface=RESOURCES_IFACE)
 
 
 """ email performance test cases """
-class email(TestUpdate):
 
 
-        def p_test_email_01(self):
+class email(TestUpdate):
 
-               query = "SELECT ?m ?From  ?date ?email1 WHERE { \
+    def p_test_email_01(self):
+
+        query = "SELECT ?m ?From  ?date ?email1 WHERE { \
                        ?m a  nmo:Email ; \
                        nmo:receivedDate ?date ;\
                        nmo:from ?From . ?from nco:hasEmailAddress ?email1 } LIMIT 10000"
 
-               """Query for emails """
-               start=time.time()
+        """Query for emails """
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying emails = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying emails = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
 
+""" calls performance  test cases """
 
 
-""" calls performance  test cases """
 class calls(TestUpdate):
 
+    def p_test_calls_01(self):
 
-        def p_test_calls_01(self):
-
-               query = "SELECT ?duration ?phonenumber WHERE {\
+        query = "SELECT ?duration ?phonenumber WHERE {\
                        ?call  a  nmo:Call ;\
                        nmo:duration ?duration ;\
                        nmo:from [a nco:Contact ; nco:hasPhoneNumber ?phonenumber] }LIMIT 10000"
 
-               """Querying the duration of calls of contacts """
-               start=time.time()
+        """Querying the duration of calls of contacts """
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying duration of calls from phonenumbers  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying duration of calls from phonenumbers  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_calls_02(self):
+    def p_test_calls_02(self):
 
-               query = "SELECT ?name ?date ?number ?duration \
+        query = "SELECT ?name ?date ?number ?duration \
                        WHERE {?m a nmo:Call; \
                        nmo:sentDate ?date ; \
                        nmo:duration ?duration; \
@@ -137,19 +136,18 @@ class calls(TestUpdate):
                        FILTER (?duration > 0) .} \
                        ORDER BY desc(?date) LIMIT 1000"
 
-               """Querying the dialed calls"""
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        """Querying the dialed calls"""
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying dialed calls  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for querying dialed calls  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_calls_03(self):
+    def p_test_calls_03(self):
 
-               query = "SELECT ?name ?date ?number ?duration \
+        query = "SELECT ?name ?date ?number ?duration \
                        WHERE {?m a nmo:Call; \
                        nmo:receivedDate ?date ; \
                        nmo:duration ?duration; \
@@ -160,18 +158,18 @@ class calls(TestUpdate):
                        FILTER (?duration > 0) .} \
                        ORDER BY desc(?date) LIMIT 1000"
 
-               """Querying the received calls"""
-               start=time.time()
+        """Querying the received calls"""
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying received calls  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying received calls  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_calls_04(self):
+    def p_test_calls_04(self):
 
-               query = "SELECT ?name ?date ?number ?duration \
+        query = "SELECT ?name ?date ?number ?duration \
                        WHERE {?m a nmo:Call; \
                        nmo:receivedDate ?date ; \
                        nmo:duration ?duration; \
@@ -182,64 +180,62 @@ class calls(TestUpdate):
                        FILTER (?duration > 0) .} \
                        ORDER BY desc(?date) LIMIT 1000"
 
+        """Querying the missed calls"""
+        start = time.time()
 
-               """Querying the missed calls"""
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken for querying missed calls  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for querying missed calls  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
 
 """ IM performance  test cases """
-class instant_messages(TestUpdate):
 
 
-        def p_test_im_01(self):
+class instant_messages(TestUpdate):
 
+    def p_test_im_01(self):
 
-               query = "SELECT ?message ?from ?date ?content WHERE { \
+        query = "SELECT ?message ?from ?date ?content WHERE { \
                 ?message a nmo:IMMessage ; \
                 nmo:from ?from ; \
                 nmo:receivedDate ?date ;  \
                 nie:plainTextContent ?content} LIMIT 10000"
 
-               """Querying the messages """
-                       start=time.time()
+        """Querying the messages """
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-                       elapse =time.time()-start
-                       print "Time taken for querying  messages  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying  messages  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_im_02(self):
+    def p_test_im_02(self):
 
-               query = "SELECT ?contact ?status WHERE{\
+        query = "SELECT ?contact ?status WHERE{\
                        ?contact a  nco:IMAccount; \
                        nco:imPresence ?status }LIMIT 10000"
 
-               """Querying the status of contacts every sec"""
-               start=time.time()
+        """Querying the status of contacts every sec"""
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken for querying status of contacts = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for querying status of contacts = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
 
 """ rtcom performance  test cases """
-class rtcom(TestUpdate):
 
 
-        def p_test_rtcom_01(self):
+class rtcom(TestUpdate):
+
+    def p_test_rtcom_01(self):
 
-               query = "SELECT ?channel ?participant nco:fullname(?participant) ?last_date 
nie:plainTextContent(?last_message) \
+        query = "SELECT ?channel ?participant nco:fullname(?participant) ?last_date 
nie:plainTextContent(?last_message) \
                                (SELECT COUNT(?message) AS ?message_count  \
                                        WHERE { ?message nmo:communicationChannel ?channel }) \
                                (SELECT COUNT(?message) AS ?message_count  \
@@ -253,21 +249,20 @@ class rtcom(TestUpdate):
                                FILTER (?participant != nco:default-contact-me ) \
                                } ORDER BY DESC(?last_date) LIMIT 50 }"
 
+        start = time.time()
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken for querying (old) conversation list view  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for querying (old) conversation list view  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_rtcom_02(self):
+    def p_test_rtcom_02(self):
 
-               # A version of the next one that skips the contact parts that are not generated properly
+        # A version of the next one that skips the contact parts that are not
+        # generated properly
 
-               query = "SELECT ?msg ?date ?text ?contact \
+        query = "SELECT ?msg ?date ?text ?contact \
                        WHERE { \
                        ?msg nmo:communicationChannel <urn:channel:1> ; \
                        nmo:receivedDate ?date ; \
@@ -275,28 +270,27 @@ class rtcom(TestUpdate):
                        <urn:channel:1> nmo:hasParticipant ?contact . \
                        } ORDER BY DESC(?date) LIMIT 50"
 
-               #query = "SELECT ?msg ?date ?text ?contact \
-               #       WHERE { \
-               #       ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
-               #       nmo:receivedDate ?date ; \
-               #       nie:plainTextContent ?text ; \
-               #       nmo:from [ nco:hasIMAddress ?fromAddress ] . \
-               #       <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
-               #       ?contact nco:hasIMAddress ?fromAddress . \
-               #       } ORDER BY DESC(?date) LIMIT 50"
+        # query = "SELECT ?msg ?date ?text ?contact \
+        #      WHERE { \
+        #      ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
+        #      nmo:receivedDate ?date ; \
+        #      nie:plainTextContent ?text ; \
+        #      nmo:from [ nco:hasIMAddress ?fromAddress ] . \
+        #      <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
+        #      ?contact nco:hasIMAddress ?fromAddress . \
+        #      } ORDER BY DESC(?date) LIMIT 50"
 
+        start = time.time()
 
-               start=time.time()
+        result = self.resources.SparqlQuery(query)
 
-               result=self.resources.SparqlQuery(query)
+        elapse = time.time() - start
+        print "Time taken for querying (old) conversation view (without contact info)  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               elapse =time.time()-start
-               print "Time taken for querying (old) conversation view (without contact info)  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+    def p_test_rtcom_03(self):
 
-        def p_test_rtcom_03(self):
-
-               query = "SELECT ?msg ?date ?text ?contact \
+        query = "SELECT ?msg ?date ?text ?contact \
                        WHERE { \
                        ?msg nmo:communicationChannel <urn:channel:1> ; \
                        nmo:receivedDate ?date ; \
@@ -306,26 +300,25 @@ class rtcom(TestUpdate):
                        ?contact nco:hasIMAddress ?fromAddress . \
                        } ORDER BY DESC(?date) LIMIT 50"
 
-               #query = "SELECT ?msg ?date ?text ?contact \
-               #       WHERE { \
-               #       ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
-               #       nmo:receivedDate ?date ; \
-               #       nie:plainTextContent ?text ; \
-               #       nmo:from [ nco:hasIMAddress ?fromAddress ] . \
-               #       <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
-               #       ?contact nco:hasIMAddress ?fromAddress . \
-               #       } ORDER BY DESC(?date) LIMIT 50"
-
+        # query = "SELECT ?msg ?date ?text ?contact \
+        #      WHERE { \
+        #      ?msg nmo:communicationChannel <urn:uuid:7585395544138154780> ; \
+        #      nmo:receivedDate ?date ; \
+        #      nie:plainTextContent ?text ; \
+        #      nmo:from [ nco:hasIMAddress ?fromAddress ] . \
+        #      <urn:uuid:7585395544138154780> nmo:hasParticipant ?contact . \
+        #      ?contact nco:hasIMAddress ?fromAddress . \
+        #      } ORDER BY DESC(?date) LIMIT 50"
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying (old) conversation view  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying (old) conversation view  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_rtcom_04(self):
+    def p_test_rtcom_04(self):
 
 #
 # Current rtcom queries, please do not "quietly optimize".
@@ -333,7 +326,7 @@ class rtcom(TestUpdate):
 
 # requires secondary index support to be fast
 
-               query = " \
+        query = " \
 SELECT ?message ?date ?from ?to \
      rdf:type(?message) \
      tracker:coalesce(fn:concat(nco:nameGiven(?contact), ' ', nco:nameFamily(?contact)), 
nco:nickname(?contact)) \
@@ -402,19 +395,19 @@ WHERE \
 LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying conversation view  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying conversation view  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_rtcom_05(self):
+    def p_test_rtcom_05(self):
 #
 # Current rtcom queries, please do not "quietly optimize".
 #
-               query = " \
+        query = " \
 SELECT ?channel ?subject nie:generator(?channel) \
   tracker:coalesce(fn:concat(nco:nameGiven(?contact), ' ', nco:nameFamily(?contact)), 
nco:nickname(?contact)) AS ?contactName \
   nco:contactUID(?contact) AS ?contactUID \
@@ -455,20 +448,19 @@ WHERE { \
 ORDER BY DESC(?lastDate) LIMIT 50\
 "
 
+        start = time.time()
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying conversation list  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying conversation list  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_rtcom_06(self):
+    def p_test_rtcom_06(self):
 #
 # Current rtcom queries, please do not "quietly optimize".
 #
-               query = " \
+        query = " \
 SELECT ?call ?date ?from ?to \
      rdf:type(?call) \
      nmo:isSent(?call) \
@@ -525,26 +517,23 @@ WHERE \
 ORDER BY DESC(?date) LIMIT 50\
 "
 
+        start = time.time()
 
+        result = self.resources.SparqlQuery(query)
 
-
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken for querying call history  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying call history  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
 """ Audio, Video, Images  performance  test cases """
-class audio(TestUpdate):
 
 
-        def p_test_audio_01(self):
+class audio(TestUpdate):
 
-               """ Querying for Artist and finding the no.of albums in each artist.  """
+    def p_test_audio_01(self):
+        """ Querying for Artist and finding the no.of albums in each artist.  """
 
-               query = "SELECT ?artist ?name COUNT(DISTINCT ?album) COUNT (?song) \
+        query = "SELECT ?artist ?name COUNT(DISTINCT ?album) COUNT (?song) \
                       WHERE { \
                       ?song a nmm:MusicPiece ; \
                       nmm:musicAlbum ?album;  \
@@ -552,38 +541,36 @@ class audio(TestUpdate):
                       ?artist nmm:artistName ?name. \
                       } GROUP BY ?artist"
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying Artist and finding the no.of albums in each artist  = %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
-        def p_test_audio_02(self):
+        elapse = time.time() - start
+        print "Time taken for querying Artist and finding the no.of albums in each artist  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-                """Query all albums also count of songs in each album """
+    def p_test_audio_02(self):
+        """Query all albums also count of songs in each album """
 
-               query= "SELECT  ?album COUNT(?songs) AS ?count  WHERE { \
+        query = "SELECT  ?album COUNT(?songs) AS ?count  WHERE { \
                        ?a a nmm:MusicAlbum; \
                        nie:title ?album. \
                        ?mp nmm:musicAlbum ?a;\
                        nie:title ?songs.\
                         }GROUP BY ?album ORDER BY DESC(?album)"
 
-               start=time.time()
-
-               result = self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying all albums and count their songs  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
-        def p_test_audio_03(self):
+        elapse = time.time() - start
+        print "Time taken for querying all albums and count their songs  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-                """Query all songs """
+    def p_test_audio_03(self):
+        """Query all songs """
 
-               query = "SELECT DISTINCT ?title ?album ?artist \
+        query = "SELECT DISTINCT ?title ?album ?artist \
                        WHERE { { \
                        ?song a nmm:MusicPiece . \
                        ?song nie:title ?title .\
@@ -593,18 +580,18 @@ class audio(TestUpdate):
                        ?alb nmm:albumTitle ?album .}}}  \
                        ORDER BY ?title "
 
-               start=time.time()
+        start = time.time()
 
-               result = self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying all songs  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying all songs  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_audio_04 (self) :
-                """Query all albums """
+    def p_test_audio_04(self):
+        """Query all albums """
 
-                query = "SELECT DISTINCT nmm:albumTitle(?album) AS ?Album  ?Artist  COUNT(?Songs)  AS ?Songs 
 ?album \
+        query = "SELECT DISTINCT nmm:albumTitle(?album) AS ?Album  ?Artist  COUNT(?Songs)  AS ?Songs  ?album 
\
                        WHERE { { ?Songs a nmm:MusicPiece .\
                        ?Songs nmm:musicAlbum ?album . \
                        OPTIONAL{  \
@@ -612,18 +599,17 @@ class audio(TestUpdate):
                        OPTIONAL{?perf nmm:artistName ?Artist .\
                         }}}}GROUP BY ?album ORDER BY ?album LIMIT 5000"
 
-                start=time.time()
+        start = time.time()
 
-                result = self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-                elapse =time.time()-start
-                print "Time taken for querying 15000 albums  = %s " %elapse
-                print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying 15000 albums  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_audio_05 (self):
-
-                """ Query all artists """
-                query = " SELECT nmm:artistName(?artist) AS ?artistTitle ?albumTitle COUNT(?album) AS ?album 
?artist \
+    def p_test_audio_05(self):
+        """ Query all artists """
+        query = " SELECT nmm:artistName(?artist) AS ?artistTitle ?albumTitle COUNT(?album) AS ?album ?artist 
\
                        WHERE {  \
                        ?song a nmm:MusicPiece  .\
                        ?song nmm:performer ?artist . \
@@ -631,18 +617,18 @@ class audio(TestUpdate):
                        OPTIONAL {?album nmm:albumTitle ?albumTitle .\
                         } } } GROUP BY ?artist  ORDER BY ?artist LIMIT 5000"
 
-                start=time.time()
-                print query
-                result = self.resources.SparqlQuery(query,timeout= 600)
+        start = time.time()
+        print query
+        result = self.resources.SparqlQuery(query, timeout=600)
 
-                elapse =time.time()-start
-                print "Time taken for querying 5000 artists  = %s " %elapse
-                print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying 5000 artists  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_audio_06 (self) :
-                """Query 100 albums """
+    def p_test_audio_06(self):
+        """Query 100 albums """
 
-                query = "SELECT DISTINCT nmm:albumTitle(?album) AS ?Album  ?Artist  COUNT(?Songs)  AS ?Songs 
 ?album \
+        query = "SELECT DISTINCT nmm:albumTitle(?album) AS ?Album  ?Artist  COUNT(?Songs)  AS ?Songs  ?album 
\
                        WHERE { { ?Songs a nmm:MusicPiece .\
                        ?Songs nmm:musicAlbum ?album .\
                        OPTIONAL{ \
@@ -650,19 +636,18 @@ class audio(TestUpdate):
                        OPTIONAL{?perf nmm:artistName ?Artist .\
                        }}}}GROUP BY ?album ORDER BY ?album LIMIT 100"
 
-                start=time.time()
-
-                result = self.resources.SparqlQuery(query)
+        start = time.time()
 
-                elapse =time.time()-start
-                print "Time taken for querying 100 albums  = %s " %elapse
-                print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
-       def p_test_audio_07 (self):
+        elapse = time.time() - start
+        print "Time taken for querying 100 albums  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-                """ Query 100 artists """
+    def p_test_audio_07(self):
+        """ Query 100 artists """
 
-                query = "SELECT nmm:artistName(?artist) AS ?artistTitle ?albumTitle COUNT(?album) AS\
+        query = "SELECT nmm:artistName(?artist) AS ?artistTitle ?albumTitle COUNT(?album) AS\
                            ?album ?artist \
                           WHERE {  \
                           ?song a nmm:MusicPiece  .\
@@ -671,77 +656,72 @@ class audio(TestUpdate):
                            OPTIONAL {?album nmm:albumTitle ?albumTitle .\
                           }}} GROUP BY ?artist  ORDER BY ?artist  LIMIT 100"""
 
-                start=time.time()
-                print query
-                result = self.resources.SparqlQuery(query,timeout=600)
-
-                elapse =time.time()-start
-                print "Time taken for querying 100 artist  = %s " %elapse
+        start = time.time()
+        print query
+        result = self.resources.SparqlQuery(query, timeout=600)
 
-        def p_test_audio_08(self):
+        elapse = time.time() - start
+        print "Time taken for querying 100 artist  = %s " % elapse
 
-                """Query all albums also count of songs in each album """
-               """simplified version of test_audio_02  """
+    def p_test_audio_08(self):
+        """Query all albums also count of songs in each album """
+        """simplified version of test_audio_02  """
 
-               query= "SELECT nie:title(?a) COUNT(?songs) WHERE { \
+        query = "SELECT nie:title(?a) COUNT(?songs) WHERE { \
                        ?a a nmm:MusicAlbum . \
                        ?mp nmm:musicAlbum ?a ; \
                        nie:title ?songs . } \
                        GROUP BY ?a ORDER BY DESC(nie:title(?a))"
 
-               start=time.time()
+        start = time.time()
 
-               result = self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying all albums and count their songs  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying all albums and count their songs  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_audio_09 (self):
-
-                """ Query all artists """
-               """simplified version of test_audio_05  """
-               query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
+    def p_test_audio_09(self):
+        """ Query all artists """
+        """simplified version of test_audio_05  """
+        query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
                                ?song a nmm:MusicPiece . \
                                ?song nmm:performer ?artist . \
                                OPTIONAL { ?song nmm:musicAlbum ?album . } } \
                                GROUP BY ?artist ORDER BY ?artist LIMIT 5000"
 
-                start=time.time()
-                print query
-                result = self.resources.SparqlQuery(query,timeout= 600)
-
-                elapse =time.time()-start
-                print "Time taken for querying 5000 artists  = %s " %elapse
-                print "no. of items retrieved: %d" %len(result)
+        start = time.time()
+        print query
+        result = self.resources.SparqlQuery(query, timeout=600)
 
-       def p_test_audio_10 (self):
+        elapse = time.time() - start
+        print "Time taken for querying 5000 artists  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-                """ Query 100 artists """
-               """simplified version of test_audio_07  """
+    def p_test_audio_10(self):
+        """ Query 100 artists """
+        """simplified version of test_audio_07  """
 
-               query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
+        query = "SELECT nmm:artistName(?artist) nmm:albumTitle(?album) COUNT(?album) ?artist WHERE { \
                        ?song a nmm:MusicPiece . \
                        ?song nmm:performer ?artist . \
                        OPTIONAL  { ?song nmm:musicAlbum ?album . } } \
                        GROUP BY ?artist ORDER BY ?artist LIMIT 100"
 
-                start=time.time()
-                print query
-                result = self.resources.SparqlQuery(query,timeout=600)
+        start = time.time()
+        print query
+        result = self.resources.SparqlQuery(query, timeout=600)
 
-                elapse =time.time()-start
-                print "Time taken for querying 100 artist  = %s " %elapse
+        elapse = time.time() - start
+        print "Time taken for querying 100 artist  = %s " % elapse
 
 
 class gallery(TestUpdate):
 
+    def p_test_gallery_01(self):
+        """ Querying for all Images and Videos """
 
-        def p_test_gallery_01(self):
-
-               """ Querying for all Images and Videos """
-
-               query = "SELECT ?url ?filename ?modified ?_width ?_height \
+        query = "SELECT ?url ?filename ?modified ?_width ?_height \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -751,19 +731,18 @@ class gallery(TestUpdate):
                      OPTIONAL   { ?media nfo:height ?_height .} } \
                      ORDER BY ?modified LIMIT 10000"
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query, timeout=25)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying all images and videos  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query, timeout=25)
 
-        def p_test_gallery_02(self):
+        elapse = time.time() - start
+        print "Time taken for querying all images and videos  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               """ Querying for all Images and Videos without OPTIONALS"""
+    def p_test_gallery_02(self):
+        """ Querying for all Images and Videos without OPTIONALS"""
 
-               query = "SELECT ?url ?filename ?modified \
+        query = "SELECT ?url ?filename ?modified \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -771,19 +750,18 @@ class gallery(TestUpdate):
                      nfo:fileLastModified ?modified .}\
                      ORDER BY ?modified LIMIT 10000"
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query, timeout=25)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying all images and videos without OPTIONALS  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query, timeout=25)
 
-        def p_test_gallery_03(self):
+        elapse = time.time() - start
+        print "Time taken for querying all images and videos without OPTIONALS  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               """ Querying for 500 Images and Videos """
+    def p_test_gallery_03(self):
+        """ Querying for 500 Images and Videos """
 
-               query = "SELECT ?url ?filename ?modified ?_width ?_height \
+        query = "SELECT ?url ?filename ?modified ?_width ?_height \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -792,20 +770,18 @@ class gallery(TestUpdate):
                      OPTIONAL    {?media nfo:width ?_width. } \
                      OPTIONAL   { ?media nfo:height ?_height .} } \
                      ORDER BY ?modified LIMIT 500"
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query, timeout=25)
+        result = self.resources.SparqlQuery(query, timeout=25)
 
-               elapse =time.time()-start
-               print "Time taken for querying 500 images and videos  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying 500 images and videos  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
+    def p_test_gallery_04(self):
+        """ Querying for 500 Images and Videos without OPTIONALS"""
 
-        def p_test_gallery_04(self):
-
-               """ Querying for 500 Images and Videos without OPTIONALS"""
-
-               query = "SELECT ?url ?filename ?modified \
+        query = "SELECT ?url ?filename ?modified \
                     WHERE { \
                      ?media a nfo:Visual; \
                      nie:url ?url;\
@@ -813,90 +789,80 @@ class gallery(TestUpdate):
                      nfo:fileLastModified ?modified .} \
                      ORDER BY ?modified LIMIT 500"
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query, timeout=25)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying 100 images and videos without OPTIONALS  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query, timeout=25)
 
+        elapse = time.time() - start
+        print "Time taken for querying 100 images and videos without OPTIONALS  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
+    def p_test_gallery_05(self):
+        """ Querying for images, videos which have tag TEST """
 
-        def p_test_gallery_05(self):
-
-               """ Querying for images, videos which have tag TEST """
-
-               query  = "SELECT ?media \
+        query  = "SELECT ?media \
                         WHERE { \
                        ?media a nfo:Visual; \
                         nao:hasTag ?tag . \
                        ?tag nao:prefLabel 'TEST' }"
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying all images and videos with a tag  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for querying all images and videos with a tag  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_gallery_06(self):
-
-               """ Querying for 500 images, videos which have tag TEST """
-               query  = "SELECT ?media \
+    def p_test_gallery_06(self):
+        """ Querying for 500 images, videos which have tag TEST """
+        query  = "SELECT ?media \
                         WHERE { \
                        ?media a nfo:Visual; \
                         nao:hasTag ?tag . \
                        ?tag nao:prefLabel 'TEST' } LIMIT 500"
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken for querying 500 images and videos with a tag  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        start = time.time()
 
+        result = self.resources.SparqlQuery(query)
 
-        def p_test_gallery_07(self):
+        elapse = time.time() - start
+        print "Time taken for querying 500 images and videos with a tag  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               """Querying all images and videos taken with phone's camera """
+    def p_test_gallery_07(self):
+        """Querying all images and videos taken with phone's camera """
 
-               query = "SELECT ?media WHERE { \
+        query = "SELECT ?media WHERE { \
                        ?media a nfo:Visual; \
                         nfo:equipment [ a nfo:Equipment; nfo:make 'NOKIA' ] }"
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying all images and videos taken with phone's camera  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying all images and videos taken with phone's camera  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
+    def p_test_gallery_08(self):
+        """Querying 500 images and videos taken with phone's camera """
 
-        def p_test_gallery_08(self):
-
-               """Querying 500 images and videos taken with phone's camera """
-
-               query = "SELECT ?media WHERE { \
+        query = "SELECT ?media WHERE { \
                        ?media a nfo:Visual; \
                         nfo:equipment [ a nfo:Equipment; nfo:make 'NOKIA' ] } LIMIT 500"
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken for querying 500 images and videos taken with phone's camera  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for querying 500 images and videos taken with phone's camera  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_gallery_09(self):
+    def p_test_gallery_09(self):
+        """Querying all images """
 
-               """Querying all images """
-
-               query = " SELECT ?url ?height ?width ?mime ?camera ?exposuretime ?fnumber ?focallength \
+        query = " SELECT ?url ?height ?width ?mime ?camera ?exposuretime ?fnumber ?focallength \
                         WHERE {\
                        ?image a nmm:Photo; \
                         nie:url ?url; \
@@ -908,22 +874,18 @@ class gallery(TestUpdate):
                        OPTIONAL { ?image nmm:fnumber ?fnumber .}\
                        OPTIONAL { ?image nmm:focalLength ?focallength .}} LIMIT 10000"
 
+        start = time.time()
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken for querying all images = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
-
-
+        result = self.resources.SparqlQuery(query)
 
-        def p_test_gallery_10(self):
+        elapse = time.time() - start
+        print "Time taken for querying all images = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               """Querying 500 images """
+    def p_test_gallery_10(self):
+        """Querying 500 images """
 
-               query = " SELECT ?url ?height ?width ?mime ?camera ?exposuretime ?fnumber ?focallength \
+        query = " SELECT ?url ?height ?width ?mime ?camera ?exposuretime ?fnumber ?focallength \
                         WHERE {\
                        ?image a nmm:Photo; \
                         nie:url ?url; \
@@ -935,21 +897,18 @@ class gallery(TestUpdate):
                        OPTIONAL { ?image nmm:fnumber ?fnumber .}\
                        OPTIONAL { ?image nmm:focalLength ?focallength .}} LIMIT 500"
 
+        start = time.time()
 
-               start=time.time()
+        result = self.resources.SparqlQuery(query)
 
-               result=self.resources.SparqlQuery(query)
+        elapse = time.time() - start
+        print "Time taken for querying 500 images = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               elapse =time.time()-start
-               print "Time taken for querying 500 images = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+    def p_test_gallery_11(self):
+        """ Querying for 500 Images and Videos with UNION for them """
 
-
-        def p_test_gallery_11(self):
-
-               """ Querying for 500 Images and Videos with UNION for them """
-
-               query = "SELECT ?url ?filename ?modified ?_width ?_height \
+        query = "SELECT ?url ?filename ?modified ?_width ?_height \
                     WHERE { \
                      {?media a nmm:Photo.} UNION {?media a nmm:Video.} \
                      ?media nie:url ?url.\
@@ -958,178 +917,164 @@ class gallery(TestUpdate):
                      OPTIONAL    {?media nfo:width ?_width. } \
                      OPTIONAL   { ?media nfo:height ?_height .} } \
                      ORDER BY ?modified LIMIT 500"
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query,timeout=1000)
-
-               elapse =time.time()-start
-               print "Time taken for querying 500 images and videos  = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
-
-        def p_test_gallery_12(self):
+        start = time.time()
 
-               """Querying all images """
-               """simplified version of test_gallery_09 """
+        result = self.resources.SparqlQuery(query, timeout=1000)
 
-               query = "SELECT nie:url(?image) nfo:height(?image) nfo:width(?image) nie:mimeType(?image) 
nfo:model (nfo:equipment (?image)) nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE 
{ ?image a nmm:Photo . } limit 10000"
+        elapse = time.time() - start
+        print "Time taken for querying 500 images and videos  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
+    def p_test_gallery_12(self):
+        """Querying all images """
+        """simplified version of test_gallery_09 """
 
-               start=time.time()
+        query = "SELECT nie:url(?image) nfo:height(?image) nfo:width(?image) nie:mimeType(?image) nfo:model 
(nfo:equipment (?image)) nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE { ?image 
a nmm:Photo . } limit 10000"
 
-               result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken for querying all images = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
-        def p_test_gallery_13(self):
+        elapse = time.time() - start
+        print "Time taken for querying all images = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               """Querying 500 images """
-               """simplified version of test_gallery_10 """
+    def p_test_gallery_13(self):
+        """Querying 500 images """
+        """simplified version of test_gallery_10 """
 
-               query = "SELECT nie:url(?image) nfo:height(?image) nfo:width(?image) nie:mimeType(?image) 
nfo:model (nfo:equipment (?image)) nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE 
{ ?image a nmm:Photo . } limit 500"
+        query = "SELECT nie:url(?image) nfo:height(?image) nfo:width(?image) nie:mimeType(?image) nfo:model 
(nfo:equipment (?image)) nmm:exposureTime(?image) nmm:fnumber(?image) nmm:focalLength(?image) WHERE { ?image 
a nmm:Photo . } limit 500"
 
+        start = time.time()
 
-               start=time.time()
+        result = self.resources.SparqlQuery(query)
 
-               result=self.resources.SparqlQuery(query)
+        elapse = time.time() - start
+        print "Time taken for querying 500 images = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               elapse =time.time()-start
-               print "Time taken for querying 500 images = %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
 
+class ftsmatch (TestUpdate):
 
+    def p_test_fts_01(self):
+        """Making a search for artist"""
 
-
-
-class ftsmatch (TestUpdate) :
-
-        def p_test_fts_01 (self):
-            """Making a search for artist"""
-
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'ArtistName' }"
-            start=time.time()
-
-            result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-            elapse =time.time()-start
-            print "Time taken for searching an artist in 10000 music files  " %elapse
-            print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for searching an artist in 10000 music files  " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_fts_02 (self) :
-            """ Searching for a word """
-            query = " SELECT ?uri WHERE { \
+    def p_test_fts_02(self):
+        """ Searching for a word """
+        query = " SELECT ?uri WHERE { \
                      ?uri a nie:InformationElement ; \
                     fts:match 'WordInPlainText' . } "
 
-            start=time.time()
+        start = time.time()
 
-            result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-            elapse =time.time()-start
-            print "Time taken for searching a word  = %s " %elapse
-            print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for searching a word  = %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_fts_03 (self):
-            """Making a search for artist"""
+    def p_test_fts_03(self):
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'ArtistNa*'}"
-            start=time.time()
+        start = time.time()
 
-            result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-            elapse =time.time()-start
-            print "Time taken for searching an artist in 10000 music files  " %elapse
-            print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for searching an artist in 10000 music files  " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_fts_04 (self):
-            """Making a search for artist"""
+    def p_test_fts_04(self):
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'Art*' }"
-            start=time.time()
+        start = time.time()
 
-            result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-            elapse =time.time()-start
-            print "Time taken for searching an artist in 10000 music files  " %elapse
-            print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for searching an artist in 10000 music files  " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_fts_05 (self):
-            """Making a search for artist"""
+    def p_test_fts_05(self):
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'Ar*'}"
-            start=time.time()
-
-            result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-            elapse =time.time()-start
-            print "Time taken for searching an artist in 10000 music files  " %elapse
-            print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for searching an artist in 10000 music files  " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_fts_06 (self):
-            """Making a search for artist"""
+    def p_test_fts_06(self):
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'A*' }"
-            start=time.time()
+        start = time.time()
 
-            result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-            elapse =time.time()-start
-            print "Time taken for searching an artist in 10000 music files  " %elapse
-            print "no.of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for searching an artist in 10000 music files  " % elapse
+        print "no.of items retrieved: %d" % len(result)
 
-       def p_test_fts_07 (self):
+    def p_test_fts_07(self):
+        """Making a search for artist"""
 
-            """Making a search for artist"""
-
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'A* p*' }"
-            start=time.time()
+        start = time.time()
 
-            result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-            elapse =time.time()-start
-            print "Time taken for searching an artist in 10000 music files  " %elapse
-            print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken for searching an artist in 10000 music files  " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_fts_08 (self):
-            """Making a search for artist"""
+    def p_test_fts_08(self):
+        """Making a search for artist"""
 
-            query = "  SELECT ?uri WHERE { \
+        query = "  SELECT ?uri WHERE { \
                       ?uri a nie:InformationElement ; \
                       fts:match 'A* p* k*' }"
-            start=time.time()
-
-            result=self.resources.SparqlQuery(query)
-
-            elapse =time.time()-start
-            print "Time taken for searching an artist in 10000 music files %s " %elapse
-            print "no. of items retrieved: %d" %len(result)
+        start = time.time()
 
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken for searching an artist in 10000 music files %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
 
-class content_manager (TestUpdate) :
+class content_manager (TestUpdate):
 
-        def p_test_cm_01 (self):
+    def p_test_cm_01(self):
+        """Get all the contacts that match fts and get relevant UI info for them"""
 
-
-               """Get all the contacts that match fts and get relevant UI info for them"""
-
-               query = "SELECT DISTINCT ?url ?photourl ?imstatus tracker:coalesce(?family, ?given, ?orgname, 
?nick, ?email, ?phone, ?blog) \
+        query = "SELECT DISTINCT ?url ?photourl ?imstatus tracker:coalesce(?family, ?given, ?orgname, ?nick, 
?email, ?phone, ?blog) \
                WHERE { { ?url a nco:PersonContact.?url fts:match 'fami*'. } \
                UNION { ?url a nco:PersonContact. ?url nco:hasEmailAddress ?add.?add fts:match 'fami*'. } \
                UNION { ?url a nco:PersonContact. ?url nco:hasPostalAddress ?post.?post fts:match 'fami*'. } \
@@ -1146,65 +1091,54 @@ class content_manager (TestUpdate) :
                ORDER BY ?relevance \
                LIMIT 100"
 
+        start = time.time()
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
+    def p_test_cm_02(self):
+        """Get all the contacts that match fts and get relevant UI info for them"""
 
-        def p_test_cm_02 (self):
-
-
-               """Get all the contacts that match fts and get relevant UI info for them"""
-
-               query = "SELECT DISTINCT ?url tracker:coalesce(nco:nameFamily(?url), nco:nameGiven(?url), 
'unknown') \
+        query = "SELECT DISTINCT ?url tracker:coalesce(nco:nameFamily(?url), nco:nameGiven(?url), 'unknown') 
\
                WHERE { \
                { ?url a nco:PersonContact.?url fts:match 'fami*'. } \
                UNION { ?url a nco:PersonContact. ?url nco:hasEmailAddress ?add.?add fts:match 'fami*'. } \
                UNION { ?url a nco:PersonContact. ?url nco:hasPostalAddress ?post.?post fts:match 'fami*'. } \
                } LIMIT 100"
 
+        start = time.time()
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
-
+        result = self.resources.SparqlQuery(query)
 
-        def p_test_cm_03 (self):
+        elapse = time.time() - start
+        print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
+    def p_test_cm_03(self):
+        """Get all the messages """
 
-               """Get all the messages """
-
-               query = "SELECT DISTINCT ?url nie:title(?url) \
+        query = "SELECT DISTINCT ?url nie:title(?url) \
                WHERE { \
                { ?url a nmo:Message. ?url fts:match 'fami*'. } \
                UNION { ?url a nmo:Message. ?url nmo:from ?from . ?from fts:match 'fami*'. } \
                UNION { ?url a nmo:Message. ?url nmo:recipient ?to . ?to fts:match 'fami*'. } \
                } LIMIT 100"
 
+        start = time.time()
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_cm_04 (self):
+    def p_test_cm_04(self):
+        """Get all the messages """
 
-               """Get all the messages """
-
-               query = "SELECT ?url ?fileLastModified ?relevance ?fileName ?mimeType ?url2 \
+        query = "SELECT ?url ?fileLastModified ?relevance ?fileName ?mimeType ?url2 \
                        WHERE { \
                        ?url a nfo:Image .\
                        ?url nfo:fileLastModified ?fileLastModified. \
@@ -1214,21 +1148,18 @@ class content_manager (TestUpdate) :
                        OPTIONAL { ?url maemo:relevance ?relevance. } \
                        } ORDER BY ?_fileName"
 
+        start = time.time()
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken to get 100 contacts that match fts and get relevant UI info for them %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_cm_05 (self):
+    def p_test_cm_05(self):
+        """Get all the matching data """
 
-               """Get all the matching data """
-
-               query = "SELECT DISTINCT ?glob_url \
+        query = "SELECT DISTINCT ?glob_url \
                        WHERE \
                        { \
                          { SELECT ?url as ?glob_url \
@@ -1277,20 +1208,18 @@ class content_manager (TestUpdate) :
                          } \
                        LIMIT 100"
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
-
-               elapse =time.time()-start
-               print "Time taken to get 100 content items that match fts without UI info for them %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
+        start = time.time()
 
+        result = self.resources.SparqlQuery(query)
 
-       def p_test_cm_06 (self):
+        elapse = time.time() - start
+        print "Time taken to get 100 content items that match fts without UI info for them %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               """Get all the matching data """
+    def p_test_cm_06(self):
+        """Get all the matching data """
 
-               query = "SELECT DISTINCT ?glob_url ?first ?second \
+        query = "SELECT DISTINCT ?glob_url ?first ?second \
                        WHERE \
                        { \
                          { SELECT ?url as ?glob_url \
@@ -1366,20 +1295,20 @@ class content_manager (TestUpdate) :
                          } \
                        LIMIT 100"
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 100 content items that match fts and get relevant UI info for them 
%s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 100 content items that match fts and get relevant UI info for them %s " % 
elapse
+        print "no. of items retrieved: %d" % len(result)
 
 
-class contacts (TestUpdate) :
+class contacts (TestUpdate):
 
-        def p_test_contacts_01 (self):
+    def p_test_contacts_01(self):
 
-               query = " \
+        query = " \
 SELECT DISTINCT \
   ?_contact \
   ?_Avatar_ImageUrl \
@@ -1419,17 +1348,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts basic information (original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts basic information (original) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-
-       def p_test_contacts_02 (self):
-               query = " \
+    def p_test_contacts_02(self):
+        query = " \
 SELECT DISTINCT \
   ?_contact \
   ?_Avatar_ImageUrl \
@@ -1458,17 +1386,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts basic information (modified) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts basic information (modified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_contacts_03 (self):
-               query = " \
+    def p_test_contacts_03(self):
+        query = " \
 SELECT DISTINCT \
   ?_contact \
   ?_Address_Country \
@@ -1543,17 +1470,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts address information (original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts address information (original) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_contacts_04 (self):
-               query = " \
+    def p_test_contacts_04(self):
+        query = " \
 SELECT \
   ?contact \
   nco:country(?postal) \
@@ -1575,16 +1501,16 @@ WHERE \
 ORDER BY ?contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts address information (modified) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts address information (modified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_contacts_05 (self):
-               query = " \
+    def p_test_contacts_05(self):
+        query = " \
 SELECT DISTINCT \
   ?_contact ?_EmailAddress ?_EmailAddress_EmailAddress \
   bound(?_EmailAddress_Context_Work) AS ?_EmailAddress_Context_Work_IsBound \
@@ -1607,16 +1533,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts email information (original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts email information (original) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_contacts_06 (self):
-               query = " \
+    def p_test_contacts_06(self):
+        query = " \
 SELECT \
   ?contact \
   ?email \
@@ -1639,16 +1565,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts email information (modified) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts email information (modified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_contacts_07 (self):
-               query = " \
+    def p_test_contacts_07(self):
+        query = " \
 SELECT DISTINCT \
   ?_contact \
   ?_OnlineAccount \
@@ -1697,17 +1623,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts online information (original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts online information (original) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-
-       def p_test_contacts_08 (self):
-               query = " \
+    def p_test_contacts_08(self):
+        query = " \
 SELECT DISTINCT \
   ?_contact \
   ?_OnlineAccount \
@@ -1733,16 +1658,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts online information (modified) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts online information (modified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_contacts_09 (self):
-               query = " \
+    def p_test_contacts_09(self):
+        query = " \
 SELECT DISTINCT \
   ?_contact ?_PhoneNumber ?_PhoneNumber_PhoneNumber \
   bound(?_PhoneNumber_SubTypes_BulletinBoardSystem) AS ?_PhoneNumber_SubTypes_BulletinBoardSystem_IsBound \
@@ -1864,16 +1789,16 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts phone number information (original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts phone number information (original) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-       def p_test_contacts_10 (self):
-               query = " \
+    def p_test_contacts_10(self):
+        query = " \
 SELECT DISTINCT \
   ?contact \
   ?phoneNumber \
@@ -1894,18 +1819,19 @@ WHERE \
 ORDER BY ?_contact LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
+
+        result = self.resources.SparqlQuery(query)
 
-               result=self.resources.SparqlQuery(query)
+        elapse = time.time() - start
+        print "Time taken to get 50 contacts phone number information (modified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 contacts phone number information (modified) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
 
-class location (TestUpdate) :
+class location (TestUpdate):
 
-        def p_test_location_01 (self):
-               query = " \
+    def p_test_location_01(self):
+        query = " \
 SELECT \
   ?urn \
   ?cLat ?cLon ?cAlt ?cRad \
@@ -1998,16 +1924,16 @@ WHERE { \
 } ORDER BY ASC(?name) LIMIT 50 \
 "
 
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get 50 landmarks (original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get 50 landmarks (original) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_location_02 (self):
-               query = " \
+    def p_test_location_02(self):
+        query = " \
 SELECT \
   ?urn \
   ?cLat ?cLon ?cAlt ?cRad \
@@ -2101,17 +2027,16 @@ WHERE { \
 } ORDER BY ASC(?name) LIMIT \
 "
 
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken to get 50 landmarks within coords (original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken to get 50 landmarks within coords (original) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_location_03 (self):
-               query = " \
+    def p_test_location_03(self):
+        query = " \
 SELECT \
   ?urn \
   ?cLat ?cLon ?cAlt ?cRad \
@@ -2207,17 +2132,16 @@ WHERE { \
         tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) 
<= 25000) \
 } ORDER BY ASC(?distance) LIMIT 50 \
 "
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get max 50 landmarks within certain range with bounding box (original) 
%s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get max 50 landmarks within certain range with bounding box (original) %s " % 
elapse
+        print "no. of items retrieved: %d" % len(result)
 
-
-        def p_test_location_04 (self):
-               query = " \
+    def p_test_location_04(self):
+        query = " \
 SELECT \
   ?urn \
   ?cLat ?cLon ?cAlt ?cRad \
@@ -2311,16 +2235,16 @@ WHERE { \
   FILTER(tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) 
<= 25000) \
 } ORDER BY ASC(?distance) LIMIT 50 \
 "
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get max 50 landmarks within certain range without bounding box 
(original) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get max 50 landmarks within certain range without bounding box (original) %s " 
% elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_location_05 (self):
-               query = " \
+    def p_test_location_05(self):
+        query = " \
 SELECT \
   ?urn \
   mlo:latitude(?point) mlo:longitude(?point) mlo:altitude(?point) mlo:radius(?point) \
@@ -2333,17 +2257,16 @@ WHERE { \
   ?location mlo:asGeoPoint ?point . \
 } ORDER BY ASC(?name) LIMIT 50 \
 "
-               start=time.time()
-
-               result=self.resources.SparqlQuery(query)
+        start = time.time()
 
-               elapse =time.time()-start
-               print "Time taken to get 50 landmarks (simplified) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        result = self.resources.SparqlQuery(query)
 
+        elapse = time.time() - start
+        print "Time taken to get 50 landmarks (simplified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_location_06 (self):
-               query = " \
+    def p_test_location_06(self):
+        query = " \
 SELECT \
   ?urn \
   ?cLat ?cLon mlo:altitude(?point) mlo:radius(?point) \
@@ -2359,16 +2282,16 @@ WHERE { \
   FILTER(?cLat >= 39.16 && ?cLat <= 40.17 && ?cLon >= 63.42 && ?cLon <= 64.96) \
 } ORDER BY ASC(?name) LIMIT 50 \
 "
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get max 50 landmarks within coords (simplified) %s " %elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get max 50 landmarks within coords (simplified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_location_07 (self):
-               query = " \
+    def p_test_location_07(self):
+        query = " \
 SELECT \
   ?urn \
   ?cLat ?cLon mlo:altitude(?point) mlo:radius(?point) \
@@ -2387,16 +2310,16 @@ WHERE { \
         tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) 
<= 25000) \
 } ORDER BY ASC(?distance) LIMIT 50 \
 "
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get max 50 landmarks within range with bounding box (simplified) %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get max 50 landmarks within range with bounding box (simplified) %s " % elapse
+        print "no. of items retrieved: %d" % len(result)
 
-        def p_test_location_08 (self):
-               query = " \
+    def p_test_location_08(self):
+        query = " \
 SELECT \
   ?urn \
   ?cLat ?cLon mlo:altitude(?point) mlo:radius(?point) \
@@ -2413,14 +2336,13 @@ WHERE { \
   FILTER(tracker:haversine-distance(xsd:double(?cLat),xsd:double(39.50),xsd:double(?cLon),xsd:double(64.50)) 
<= 25000) \
 } ORDER BY ASC(?distance) LIMIT 50 \
 "
-               start=time.time()
+        start = time.time()
 
-               result=self.resources.SparqlQuery(query)
+        result = self.resources.SparqlQuery(query)
 
-               elapse =time.time()-start
-               print "Time taken to get max 50 landmarks within range without bounding box (simplified) %s " 
%elapse
-               print "no. of items retrieved: %d" %len(result)
+        elapse = time.time() - start
+        print "Time taken to get max 50 landmarks within range without bounding box (simplified) %s " % 
elapse
+        print "no. of items retrieved: %d" % len(result)
 
 if __name__ == "__main__":
-        unittest.main()
-
+    unittest.main()
diff --git a/tests/functional-tests/unittest2/__init__.py b/tests/functional-tests/unittest2/__init__.py
index bf03e43..16acd7c 100644
--- a/tests/functional-tests/unittest2/__init__.py
+++ b/tests/functional-tests/unittest2/__init__.py
@@ -40,7 +40,7 @@ __all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
 
 # To use the local copy!
 import sys
-sys.path.insert (0, "./common")
+sys.path.insert(0, "./common")
 
 from unittest2.result import TestResult
 from unittest2.case import (
@@ -63,7 +63,7 @@ except ImportError:
     # Compatibility with platforms that don't have the signal module
     pass
 else:
-    __all__.extend(['installHandler', 'registerResult', 'removeResult', 
+    __all__.extend(['installHandler', 'registerResult', 'removeResult',
                     'removeHandler'])
 
 # deprecated
diff --git a/tests/functional-tests/unittest2/case.py b/tests/functional-tests/unittest2/case.py
index 058a51f..47a46fe 100644
--- a/tests/functional-tests/unittest2/case.py
+++ b/tests/functional-tests/unittest2/case.py
@@ -20,6 +20,7 @@ __unittest = True
 
 
 class SkipTest(Exception):
+
     """
     Raise this exception in a test to skip it.
 
@@ -27,7 +28,9 @@ class SkipTest(Exception):
     instead of raising this directly.
     """
 
+
 class _ExpectedFailure(Exception):
+
     """
     Raise this when a test is expected to fail.
 
@@ -39,14 +42,18 @@ class _ExpectedFailure(Exception):
         Exception.__init__(self)
         self.exc_info = exc_info
 
+
 class _UnexpectedSuccess(Exception):
+
     """
     The test was supposed to fail, but it didn't!
     """
 
+
 def _id(obj):
     return obj
 
+
 def skip(reason):
     """
     Unconditionally skip a test.
@@ -57,12 +64,13 @@ def skip(reason):
             def skip_wrapper(*args, **kwargs):
                 raise SkipTest(reason)
             test_item = skip_wrapper
-        
+
         test_item.__unittest_skip__ = True
         test_item.__unittest_skip_why__ = reason
         return test_item
     return decorator
 
+
 def skipIf(condition, reason):
     """
     Skip a test if the condition is true.
@@ -71,6 +79,7 @@ def skipIf(condition, reason):
         return skip(reason)
     return _id
 
+
 def skipUnless(condition, reason):
     """
     Skip a test unless the condition is true.
@@ -92,6 +101,7 @@ def expectedFailure(func):
 
 
 class _AssertRaisesContext(object):
+
     """A context manager used to implement TestCase.assertRaises* methods."""
 
     def __init__(self, expected, test_case, expected_regexp=None):
@@ -113,7 +123,7 @@ class _AssertRaisesContext(object):
         if not issubclass(exc_type, self.expected):
             # let unexpected exceptions pass through
             return False
-        self.exception = exc_value # store for later retrieval
+        self.exception = exc_value  # store for later retrieval
         if self.expected_regexp is None:
             return True
 
@@ -122,25 +132,25 @@ class _AssertRaisesContext(object):
             expected_regexp = re.compile(expected_regexp)
         if not expected_regexp.search(str(exc_value)):
             raise self.failureException('"%s" does not match "%s"' %
-                     (expected_regexp.pattern, str(exc_value)))
+                                        (expected_regexp.pattern, str(exc_value)))
         return True
 
 
 class _TyepEqualityDict(object):
-    
+
     def __init__(self, testcase):
         self.testcase = testcase
         self._store = {}
-    
+
     def __setitem__(self, key, value):
         self._store[key] = value
-    
+
     def __getitem__(self, key):
         value = self._store[key]
         if isinstance(value, basestring):
             return getattr(self.testcase, value)
         return value
-    
+
     def get(self, key, default=None):
         if key in self._store:
             return self[key]
@@ -148,6 +158,7 @@ class _TyepEqualityDict(object):
 
 
 class TestCase(unittest.TestCase):
+
     """A class whose instances are single test cases.
 
     By default, the test code itself should be placed in a method named
@@ -180,9 +191,9 @@ class TestCase(unittest.TestCase):
     # to any explicit message passed.
 
     longMessage = True
-    
+
     # Attribute used by TestSuite for classSetUp
-    
+
     _classSetupFailed = False
 
     def __init__(self, methodName='runTest'):
@@ -195,8 +206,8 @@ class TestCase(unittest.TestCase):
         try:
             testMethod = getattr(self, methodName)
         except AttributeError:
-            raise ValueError("no such test method in %s: %s" % \
-                  (self.__class__, methodName))
+            raise ValueError("no such test method in %s: %s" %
+                             (self.__class__, methodName))
         self._testMethodDoc = testMethod.__doc__
         self._cleanups = []
 
@@ -236,7 +247,7 @@ class TestCase(unittest.TestCase):
 
     def setUp(self):
         "Hook method for setting up the test fixture before exercising it."
-    
+
     @classmethod
     def setUpClass(cls):
         "Hook method for setting up class fixture before running tests in the class."
@@ -264,7 +275,6 @@ class TestCase(unittest.TestCase):
         doc = self._testMethodDoc
         return doc and doc.split("\n")[0].strip() or None
 
-
     def id(self):
         return "%s.%s" % (strclass(self.__class__), self._testMethodName)
 
@@ -286,13 +296,14 @@ class TestCase(unittest.TestCase):
     def __repr__(self):
         return "<%s testMethod=%s>" % \
                (strclass(self.__class__), self._testMethodName)
-    
+
     def _addSkip(self, result, reason):
         addSkip = getattr(result, 'addSkip', None)
         if addSkip is not None:
             addSkip(self, reason)
         else:
-            warnings.warn("Use of a TestResult without an addSkip method is deprecated", 
+            warnings.warn(
+                "Use of a TestResult without an addSkip method is deprecated",
                           DeprecationWarning, 2)
             result.addSuccess(self)
 
@@ -306,11 +317,11 @@ class TestCase(unittest.TestCase):
 
         self._resultForDoCleanups = result
         result.startTest(self)
-        
+
         testMethod = getattr(self, self._testMethodName)
-        
-        if (getattr(self.__class__, "__unittest_skip__", False) or 
-            getattr(testMethod, "__unittest_skip__", False)):
+
+        if (getattr(self.__class__, "__unittest_skip__", False) or
+                getattr(testMethod, "__unittest_skip__", False)):
             # If the class or method was skipped.
             try:
                 skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
@@ -333,19 +344,23 @@ class TestCase(unittest.TestCase):
                 except self.failureException:
                     result.addFailure(self, sys.exc_info())
                 except _ExpectedFailure, e:
-                    addExpectedFailure = getattr(result, 'addExpectedFailure', None)
+                    addExpectedFailure = getattr(
+                        result, 'addExpectedFailure', None)
                     if addExpectedFailure is not None:
                         addExpectedFailure(self, e.exc_info)
-                    else: 
-                        warnings.warn("Use of a TestResult without an addExpectedFailure method is 
deprecated", 
+                    else:
+                        warnings.warn(
+                            "Use of a TestResult without an addExpectedFailure method is deprecated",
                                       DeprecationWarning)
                         result.addSuccess(self)
                 except _UnexpectedSuccess:
-                    addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
+                    addUnexpectedSuccess = getattr(
+                        result, 'addUnexpectedSuccess', None)
                     if addUnexpectedSuccess is not None:
                         addUnexpectedSuccess(self)
                     else:
-                        warnings.warn("Use of a TestResult without an addUnexpectedSuccess method is 
deprecated", 
+                        warnings.warn(
+                            "Use of a TestResult without an addUnexpectedSuccess method is deprecated",
                                       DeprecationWarning)
                         result.addFailure(self, sys.exc_info())
                 except SkipTest, e:
@@ -434,7 +449,6 @@ class TestCase(unittest.TestCase):
         except UnicodeDecodeError:
             return '%s : %s' % (safe_str(standardMsg), safe_str(msg))
 
-
     def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
         """Fail unless an exception of class excClass is thrown
            by callableObj when invoked with arguments args and keyword
@@ -464,8 +478,8 @@ class TestCase(unittest.TestCase):
             callableObj(*args, **kwargs)
         except excClass:
             return
-        
-        if hasattr(excClass,'__name__'):
+
+        if hasattr(excClass, '__name__'):
             excName = excClass.__name__
         else:
             excName = str(excClass)
@@ -514,8 +528,8 @@ class TestCase(unittest.TestCase):
            operator.
         """
         if not first != second:
-            msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first), 
-                                                           safe_repr(second)))
+            msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
+                                                         safe_repr(second)))
             raise self.failureException(msg)
 
     def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None):
@@ -535,24 +549,24 @@ class TestCase(unittest.TestCase):
             return
         if delta is not None and places is not None:
             raise TypeError("specify delta or places not both")
-        
+
         if delta is not None:
             if abs(first - second) <= delta:
                 return
-        
-            standardMsg = '%s != %s within %s delta' % (safe_repr(first), 
-                                                        safe_repr(second), 
+
+            standardMsg = '%s != %s within %s delta' % (safe_repr(first),
+                                                        safe_repr(second),
                                                         safe_repr(delta))
         else:
             if places is None:
                 places = 7
-                
-            if round(abs(second-first), places) == 0:
+
+            if round(abs(second - first), places) == 0:
                 return
-        
-            standardMsg = '%s != %s within %r places' % (safe_repr(first), 
-                                                          safe_repr(second), 
-                                                          places)
+
+            standardMsg = '%s != %s within %r places' % (safe_repr(first),
+                                                         safe_repr(second),
+                                                         places)
         msg = self._formatMessage(msg, standardMsg)
         raise self.failureException(msg)
 
@@ -572,15 +586,15 @@ class TestCase(unittest.TestCase):
         if delta is not None:
             if not (first == second) and abs(first - second) > delta:
                 return
-            standardMsg = '%s == %s within %s delta' % (safe_repr(first), 
+            standardMsg = '%s == %s within %s delta' % (safe_repr(first),
                                                         safe_repr(second),
                                                         safe_repr(delta))
         else:
             if places is None:
                 places = 7
-            if not (first == second) and round(abs(second-first), places) != 0:
+            if not (first == second) and round(abs(second - first), places) != 0:
                 return
-            standardMsg = '%s == %s within %r places' % (safe_repr(first), 
+            standardMsg = '%s == %s within %r places' % (safe_repr(first),
                                                          safe_repr(second),
                                                          places)
 
@@ -646,14 +660,14 @@ class TestCase(unittest.TestCase):
             len1 = len(seq1)
         except (TypeError, NotImplementedError):
             differing = 'First %s has no length.    Non-sequence?' % (
-                    seq_type_name)
+                seq_type_name)
 
         if differing is None:
             try:
                 len2 = len(seq2)
             except (TypeError, NotImplementedError):
                 differing = 'Second %s has no length.    Non-sequence?' % (
-                        seq_type_name)
+                    seq_type_name)
 
         if differing is None:
             if seq1 == seq2:
@@ -672,30 +686,32 @@ class TestCase(unittest.TestCase):
                 try:
                     item1 = seq1[i]
                 except (TypeError, IndexError, NotImplementedError):
-                    differing += ('\nUnable to index element %d of first %s\n' %
+                    differing += (
+                        '\nUnable to index element %d of first %s\n' %
                                  (i, seq_type_name))
                     break
 
                 try:
                     item2 = seq2[i]
                 except (TypeError, IndexError, NotImplementedError):
-                    differing += ('\nUnable to index element %d of second %s\n' %
+                    differing += (
+                        '\nUnable to index element %d of second %s\n' %
                                  (i, seq_type_name))
                     break
 
                 if item1 != item2:
                     differing += ('\nFirst differing element %d:\n%s\n%s\n' %
-                                 (i, item1, item2))
+                                  (i, item1, item2))
                     break
             else:
                 if (len1 == len2 and seq_type is None and
-                    type(seq1) != type(seq2)):
+                        type(seq1) != type(seq2)):
                     # The sequences are the same, but have differing types.
                     return
 
             if len1 > len2:
                 differing += ('\nFirst %s contains %d additional '
-                             'elements.\n' % (seq_type_name, len1 - len2))
+                              'elements.\n' % (seq_type_name, len1 - len2))
                 try:
                     differing += ('First extra element %d:\n%s\n' %
                                   (len2, seq1[len2]))
@@ -704,7 +720,7 @@ class TestCase(unittest.TestCase):
                                   'of first %s\n' % (len2, seq_type_name))
             elif len1 < len2:
                 differing += ('\nSecond %s contains %d additional '
-                             'elements.\n' % (seq_type_name, len2 - len1))
+                              'elements.\n' % (seq_type_name, len2 - len1))
                 try:
                     differing += ('First extra element %d:\n%s\n' %
                                   (len1, seq2[len1]))
@@ -765,7 +781,8 @@ class TestCase(unittest.TestCase):
         except TypeError, e:
             self.fail('invalid type when attempting set difference: %s' % e)
         except AttributeError, e:
-            self.fail('second argument does not support set difference: %s' % e)
+            self.fail(
+                'second argument does not support set difference: %s' % e)
 
         if not (difference1 or difference2):
             return
@@ -786,15 +803,15 @@ class TestCase(unittest.TestCase):
     def assertIn(self, member, container, msg=None):
         """Just like self.assertTrue(a in b), but with a nicer default message."""
         if member not in container:
-            standardMsg = '%s not found in %s' % (safe_repr(member), 
-                                                   safe_repr(container))
+            standardMsg = '%s not found in %s' % (safe_repr(member),
+                                                  safe_repr(container))
             self.fail(self._formatMessage(msg, standardMsg))
 
     def assertNotIn(self, member, container, msg=None):
         """Just like self.assertTrue(a not in b), but with a nicer default message."""
         if member in container:
-            standardMsg = '%s unexpectedly found in %s' % (safe_repr(member), 
-                                                            safe_repr(container))
+            standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
+                                                           safe_repr(container))
             self.fail(self._formatMessage(msg, standardMsg))
 
     def assertIs(self, expr1, expr2, msg=None):
@@ -810,8 +827,10 @@ class TestCase(unittest.TestCase):
             self.fail(self._formatMessage(msg, standardMsg))
 
     def assertDictEqual(self, d1, d2, msg=None):
-        self.assert_(isinstance(d1, dict), 'First argument is not a dictionary')
-        self.assert_(isinstance(d2, dict), 'Second argument is not a dictionary')
+        self.assert_(isinstance(d1, dict),
+                     'First argument is not a dictionary')
+        self.assert_(isinstance(d2, dict),
+                     'Second argument is not a dictionary')
 
         if d1 != d2:
             standardMsg = ('\n' + '\n'.join(difflib.ndiff(
@@ -828,7 +847,7 @@ class TestCase(unittest.TestCase):
                 missing.append(key)
             elif value != actual[key]:
                 mismatched.append('%s, expected: %s, actual: %s' %
-                                  (safe_repr(key), safe_repr(value), 
+                                  (safe_repr(key), safe_repr(value),
                                    safe_repr(actual[key])))
 
         if not (missing or mismatched):
@@ -836,8 +855,8 @@ class TestCase(unittest.TestCase):
 
         standardMsg = ''
         if missing:
-            standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in 
-                                                    missing)
+            standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
+                                                   missing)
         if mismatched:
             if standardMsg:
                 standardMsg += '; '
@@ -875,11 +894,11 @@ class TestCase(unittest.TestCase):
 
         errors = []
         if missing:
-            errors.append('Expected, but missing:\n    %s' % 
-                           safe_repr(missing))
+            errors.append('Expected, but missing:\n    %s' %
+                          safe_repr(missing))
         if unexpected:
-            errors.append('Unexpected, but present:\n    %s' % 
-                           safe_repr(unexpected))
+            errors.append('Unexpected, but present:\n    %s' %
+                          safe_repr(unexpected))
         if errors:
             standardMsg = '\n'.join(errors)
             self.fail(self._formatMessage(msg, standardMsg))
@@ -887,9 +906,9 @@ class TestCase(unittest.TestCase):
     def assertMultiLineEqual(self, first, second, msg=None):
         """Assert that two multi-line strings are equal."""
         self.assert_(isinstance(first, basestring), (
-                'First argument is not a string'))
+            'First argument is not a string'))
         self.assert_(isinstance(second, basestring), (
-                'Second argument is not a string'))
+            'Second argument is not a string'))
 
         if first != second:
             standardMsg = '\n' + ''.join(difflib.ndiff(first.splitlines(True),
@@ -905,19 +924,22 @@ class TestCase(unittest.TestCase):
     def assertLessEqual(self, a, b, msg=None):
         """Just like self.assertTrue(a <= b), but with a nicer default message."""
         if not a <= b:
-            standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
+            standardMsg = '%s not less than or equal to %s' % (
+                safe_repr(a), safe_repr(b))
             self.fail(self._formatMessage(msg, standardMsg))
 
     def assertGreater(self, a, b, msg=None):
         """Just like self.assertTrue(a > b), but with a nicer default message."""
         if not a > b:
-            standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
+            standardMsg = '%s not greater than %s' % (
+                safe_repr(a), safe_repr(b))
             self.fail(self._formatMessage(msg, standardMsg))
 
     def assertGreaterEqual(self, a, b, msg=None):
         """Just like self.assertTrue(a >= b), but with a nicer default message."""
         if not a >= b:
-            standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
+            standardMsg = '%s not greater than or equal to %s' % (
+                safe_repr(a), safe_repr(b))
             self.fail(self._formatMessage(msg, standardMsg))
 
     def assertIsNone(self, obj, msg=None):
@@ -966,22 +988,22 @@ class TestCase(unittest.TestCase):
                 expected_regexp = re.compile(expected_regexp)
             if not expected_regexp.search(str(exc_value)):
                 raise self.failureException('"%s" does not match "%s"' %
-                         (expected_regexp.pattern, str(exc_value)))
+                                            (expected_regexp.pattern, str(exc_value)))
         else:
-            if hasattr(expected_exception, '__name__'): 
+            if hasattr(expected_exception, '__name__'):
                 excName = expected_exception.__name__
-            else: 
+            else:
                 excName = str(expected_exception)
             raise self.failureException, "%s not raised" % excName
 
-
     def assertRegexpMatches(self, text, expected_regexp, msg=None):
         """Fail the test unless the text matches the regular expression."""
         if isinstance(expected_regexp, basestring):
             expected_regexp = re.compile(expected_regexp)
         if not expected_regexp.search(text):
             msg = msg or "Regexp didn't match"
-            msg = '%s: %r not found in %r' % (msg, expected_regexp.pattern, text)
+            msg = '%s: %r not found in %r' % (
+                msg, expected_regexp.pattern, text)
             raise self.failureException(msg)
 
     def assertNotRegexpMatches(self, text, unexpected_regexp, msg=None):
@@ -997,7 +1019,9 @@ class TestCase(unittest.TestCase):
                                                text)
             raise self.failureException(msg)
 
+
 class FunctionTestCase(TestCase):
+
     """A test case that wraps a test function.
 
     This is useful for slipping pre-existing test functions into the
@@ -1032,9 +1056,9 @@ class FunctionTestCase(TestCase):
             return NotImplemented
 
         return self._setUpFunc == other._setUpFunc and \
-               self._tearDownFunc == other._tearDownFunc and \
-               self._testFunc == other._testFunc and \
-               self._description == other._description
+            self._tearDownFunc == other._tearDownFunc and \
+            self._testFunc == other._testFunc and \
+            self._description == other._description
 
     def __ne__(self, other):
         return not self == other
diff --git a/tests/functional-tests/unittest2/compatibility.py 
b/tests/functional-tests/unittest2/compatibility.py
index 61a56a3..8452e85 100644
--- a/tests/functional-tests/unittest2/compatibility.py
+++ b/tests/functional-tests/unittest2/compatibility.py
@@ -20,7 +20,7 @@ if not hasattr(os, 'relpath'):
     if os.path is sys.modules.get('ntpath'):
         def relpath(path, start=os.path.curdir):
             """Return a relative version of a path"""
-        
+
             if not path:
                 raise ValueError("no path specified")
             start_list = os.path.abspath(start).split(os.path.sep)
@@ -30,39 +30,39 @@ if not hasattr(os, 'relpath'):
                 unc_start, rest = os.path.splitunc(start)
                 if bool(unc_path) ^ bool(unc_start):
                     raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
-                                                                        % (path, start))
+                                     % (path, start))
                 else:
                     raise ValueError("path is on drive %s, start on drive %s"
-                                                        % (path_list[0], start_list[0]))
+                                     % (path_list[0], start_list[0]))
             # Work out how much of the filepath is shared by start and path.
             for i in range(min(len(start_list), len(path_list))):
                 if start_list[i].lower() != path_list[i].lower():
                     break
             else:
                 i += 1
-        
-            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
+
+            rel_list = [os.path.pardir] * (len(start_list) - i) + path_list[i:]
             if not rel_list:
                 return os.path.curdir
             return os.path.join(*rel_list)
-    
+
     else:
         # default to posixpath definition
         def relpath(path, start=os.path.curdir):
             """Return a relative version of a path"""
-        
+
             if not path:
                 raise ValueError("no path specified")
-            
+
             start_list = os.path.abspath(start).split(os.path.sep)
             path_list = os.path.abspath(path).split(os.path.sep)
-        
+
             # Work out how much of the filepath is shared by start and path.
             i = len(os.path.commonprefix([start_list, path_list]))
-        
-            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
+
+            rel_list = [os.path.pardir] * (len(start_list) - i) + path_list[i:]
             if not rel_list:
                 return os.path.curdir
             return os.path.join(*rel_list)
-        
+
     os.path.relpath = relpath
diff --git a/tests/functional-tests/unittest2/loader.py b/tests/functional-tests/unittest2/loader.py
index b69b694..92d8dcb 100644
--- a/tests/functional-tests/unittest2/loader.py
+++ b/tests/functional-tests/unittest2/loader.py
@@ -22,8 +22,10 @@ __unittest = True
 def _CmpToKey(mycmp):
     'Convert a cmp= function into a key= function'
     class K(object):
+
         def __init__(self, obj):
             self.obj = obj
+
         def __lt__(self, other):
             return mycmp(self.obj, other.obj) == -1
     return K
@@ -44,18 +46,21 @@ def _make_failed_import_test(name, suiteClass):
     return _make_failed_test('ModuleImportFailure', name, ImportError(message),
                              suiteClass)
 
+
 def _make_failed_load_tests(name, exception, suiteClass):
     return _make_failed_test('LoadTestsFailure', name, exception, suiteClass)
 
+
 def _make_failed_test(classname, methodname, exception, suiteClass):
     def testFailure(self):
         raise exception
     attrs = {methodname: testFailure}
     TestClass = type(classname, (case.TestCase,), attrs)
     return suiteClass((TestClass(methodname),))
-    
+
 
 class TestLoader(unittest.TestLoader):
+
     """
     This class is responsible for loading tests according to various criteria
     and returning them wrapped in a TestSuite
@@ -183,7 +188,8 @@ class TestLoader(unittest.TestLoader):
         """
         set_implicit_top = False
         if top_level_dir is None and self._top_level_dir is not None:
-            # make top_level_dir optional if called from load_tests in a package
+            # make top_level_dir optional if called from load_tests in a
+            # package
             top_level_dir = self._top_level_dir
         elif top_level_dir is None:
             set_implicit_top = True
@@ -203,7 +209,8 @@ class TestLoader(unittest.TestLoader):
         if os.path.isdir(os.path.abspath(start_dir)):
             start_dir = os.path.abspath(start_dir)
             if start_dir != top_level_dir:
-                is_not_importable = not os.path.isfile(os.path.join(start_dir, '__init__.py'))
+                is_not_importable = not os.path.isfile(
+                    os.path.join(start_dir, '__init__.py'))
         else:
             # support for discovery from dotted module names
             try:
@@ -213,13 +220,16 @@ class TestLoader(unittest.TestLoader):
             else:
                 the_module = sys.modules[start_dir]
                 top_part = start_dir.split('.')[0]
-                start_dir = os.path.abspath(os.path.dirname((the_module.__file__)))
+                start_dir = os.path.abspath(
+                    os.path.dirname((the_module.__file__)))
                 if set_implicit_top:
-                    self._top_level_dir = 
os.path.abspath(os.path.dirname(os.path.dirname(sys.modules[top_part].__file__)))
+                    self._top_level_dir = os.path.abspath(
+                        os.path.dirname(os.path.dirname(sys.modules[top_part].__file__)))
                     sys.path.remove(top_level_dir)
 
         if is_not_importable:
-            raise ImportError('Start directory is not importable: %r' % start_dir)
+            raise ImportError(
+                'Start directory is not importable: %r' % start_dir)
 
         tests = list(self._find_tests(start_dir, pattern))
         return self.suiteClass(tests)
@@ -257,16 +267,19 @@ class TestLoader(unittest.TestLoader):
                     except:
                         yield _make_failed_import_test(name, self.suiteClass)
                     else:
-                        mod_file = os.path.abspath(getattr(module, '__file__', full_path))
+                        mod_file = os.path.abspath(
+                            getattr(module, '__file__', full_path))
                         realpath = os.path.splitext(mod_file)[0]
                         fullpath_noext = os.path.splitext(full_path)[0]
                         if realpath.lower() != fullpath_noext.lower():
                             module_dir = os.path.dirname(realpath)
-                            mod_name = os.path.splitext(os.path.basename(full_path))[0]
+                            mod_name = os.path.splitext(
+                                os.path.basename(full_path))[0]
                             expected_dir = os.path.dirname(full_path)
                             msg = ("%r module incorrectly imported from %r. Expected %r. "
                                    "Is this module globally installed?")
-                            raise ImportError(msg % (mod_name, module_dir, expected_dir))
+                            raise ImportError(
+                                msg % (mod_name, module_dir, expected_dir))
                         yield self.loadTestsFromModule(module)
             elif os.path.isdir(full_path):
                 if not os.path.isfile(os.path.join(full_path, '__init__.py')):
@@ -275,11 +288,13 @@ class TestLoader(unittest.TestLoader):
                 load_tests = None
                 tests = None
                 if fnmatch(path, pattern):
-                    # only check load_tests if the package directory itself matches the filter
+                    # only check load_tests if the package directory itself
+                    # matches the filter
                     name = self._get_name_from_path(full_path)
                     package = self._get_module_from_name(name)
                     load_tests = getattr(package, 'load_tests', None)
-                    tests = self.loadTestsFromModule(package, use_load_tests=False)
+                    tests = self.loadTestsFromModule(
+                        package, use_load_tests=False)
 
                 if load_tests is None:
                     if tests is not None:
@@ -306,13 +321,16 @@ def _makeLoader(prefix, sortUsing, suiteClass=None):
         loader.suiteClass = suiteClass
     return loader
 
+
 def getTestCaseNames(testCaseClass, prefix, sortUsing=cmp):
     return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
 
+
 def makeSuite(testCaseClass, prefix='test', sortUsing=cmp,
               suiteClass=suite.TestSuite):
     return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(testCaseClass)
 
+
 def findTestCases(module, prefix='test', sortUsing=cmp,
                   suiteClass=suite.TestSuite):
     return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(module)
diff --git a/tests/functional-tests/unittest2/main.py b/tests/functional-tests/unittest2/main.py
index 254f0e8..f2814ab 100644
--- a/tests/functional-tests/unittest2/main.py
+++ b/tests/functional-tests/unittest2/main.py
@@ -14,8 +14,8 @@ except ImportError:
 
 __unittest = True
 
-FAILFAST     = "  -f, --failfast   Stop on first failure\n"
-CATCHBREAK   = "  -c, --catch      Catch control-C and display results\n"
+FAILFAST = "  -f, --failfast   Stop on first failure\n"
+CATCHBREAK = "  -c, --catch      Catch control-C and display results\n"
 BUFFEROUTPUT = "  -b, --buffer     Buffer stdout and stderr during test runs\n"
 
 USAGE_AS_MAIN = """\
@@ -66,11 +66,12 @@ Examples:
 
 
 class TestProgram(object):
+
     """A command-line program that runs a set of tests; this is primarily
        for making test modules conveniently executable.
     """
     USAGE = USAGE_FROM_MODULE
-    
+
     # defaults for testing
     failfast = catchbreak = buffer = None
 
@@ -123,21 +124,21 @@ class TestProgram(object):
         try:
             options, args = getopt.getopt(argv[1:], 'hHvqfcb', long_opts)
             for opt, value in options:
-                if opt in ('-h','-H','--help'):
+                if opt in ('-h', '-H', '--help'):
                     self.usageExit()
-                if opt in ('-q','--quiet'):
+                if opt in ('-q', '--quiet'):
                     self.verbosity = 0
-                if opt in ('-v','--verbose'):
+                if opt in ('-v', '--verbose'):
                     self.verbosity = 2
-                if opt in ('-f','--failfast'):
+                if opt in ('-f', '--failfast'):
                     if self.failfast is None:
                         self.failfast = True
                     # Should this raise an exception if -f is not valid?
-                if opt in ('-c','--catch'):
+                if opt in ('-c', '--catch'):
                     if self.catchbreak is None and installHandler is not None:
                         self.catchbreak = True
                     # Should this raise an exception if -c is not valid?
-                if opt in ('-b','--buffer'):
+                if opt in ('-b', '--buffer'):
                     if self.buffer is None:
                         self.buffer = True
                     # Should this raise an exception if -b is not valid?
@@ -169,22 +170,26 @@ class TestProgram(object):
         parser.add_option('-v', '--verbose', dest='verbose', default=False,
                           help='Verbose output', action='store_true')
         if self.failfast != False:
-            parser.add_option('-f', '--failfast', dest='failfast', default=False,
-                              help='Stop on first fail or error', 
+            parser.add_option(
+                '-f', '--failfast', dest='failfast', default=False,
+                              help='Stop on first fail or error',
                               action='store_true')
         if self.catchbreak != False and installHandler is not None:
-            parser.add_option('-c', '--catch', dest='catchbreak', default=False,
-                              help='Catch ctrl-C and display results so far', 
+            parser.add_option(
+                '-c', '--catch', dest='catchbreak', default=False,
+                              help='Catch ctrl-C and display results so far',
                               action='store_true')
         if self.buffer != False:
             parser.add_option('-b', '--buffer', dest='buffer', default=False,
-                              help='Buffer stdout and stderr during tests', 
+                              help='Buffer stdout and stderr during tests',
                               action='store_true')
         parser.add_option('-s', '--start-directory', dest='start', default='.',
                           help="Directory to start discovery ('.' default)")
-        parser.add_option('-p', '--pattern', dest='pattern', default='test*.py',
+        parser.add_option(
+            '-p', '--pattern', dest='pattern', default='test*.py',
                           help="Pattern to match tests ('test*.py' default)")
-        parser.add_option('-t', '--top-level-directory', dest='top', default=None,
+        parser.add_option(
+            '-t', '--top-level-directory', dest='top', default=None,
                           help='Top level directory of project (defaults to start directory)')
 
         options, args = parser.parse_args(argv)
@@ -193,7 +198,7 @@ class TestProgram(object):
 
         for name, value in zip(('start', 'pattern', 'top'), args):
             setattr(options, name, value)
-        
+
         # only set options from the parsing here
         # if they weren't set explicitly in the constructor
         if self.failfast is None:
@@ -202,7 +207,7 @@ class TestProgram(object):
             self.catchbreak = options.catchbreak
         if self.buffer is None:
             self.buffer = options.buffer
-        
+
         if options.verbose:
             self.verbosity = 2
 
diff --git a/tests/functional-tests/unittest2/result.py b/tests/functional-tests/unittest2/result.py
index 725f7b4..a4ab726 100644
--- a/tests/functional-tests/unittest2/result.py
+++ b/tests/functional-tests/unittest2/result.py
@@ -13,6 +13,7 @@ from unittest2.compatibility import wraps
 
 __unittest = True
 
+
 def failfast(method):
     @wraps(method)
     def inner(self, *args, **kw):
@@ -25,7 +26,9 @@ def failfast(method):
 STDOUT_LINE = '\nStdout:\n%s'
 STDERR_LINE = '\nStderr:\n%s'
 
+
 class TestResult(unittest.TestResult):
+
     """Holder for test result information.
 
     Test results are automatically managed by the TestCase and TestSuite
@@ -38,7 +41,7 @@ class TestResult(unittest.TestResult):
     """
     _previousTestClass = None
     _moduleSetUpFailed = False
-    
+
     def __init__(self):
         self.failfast = False
         self.failures = []
@@ -54,7 +57,7 @@ class TestResult(unittest.TestResult):
         self._original_stdout = sys.stdout
         self._original_stderr = sys.stderr
         self._mirrorOutput = False
-    
+
     def startTest(self, test):
         "Called when the given test is about to be run"
         self.testsRun += 1
@@ -86,7 +89,7 @@ class TestResult(unittest.TestResult):
                     if not error.endswith('\n'):
                         error += '\n'
                     self._original_stderr.write(STDERR_LINE % error)
-                
+
             sys.stdout = self._original_stdout
             sys.stderr = self._original_stderr
             self._stdout_buffer.seek(0)
@@ -94,7 +97,6 @@ class TestResult(unittest.TestResult):
             self._stderr_buffer.seek(0)
             self._stderr_buffer.truncate()
         self._mirrorOutput = False
-        
 
     def stopTestRun(self):
         """Called once after all tests are executed.
@@ -155,10 +157,10 @@ class TestResult(unittest.TestResult):
             msgLines = traceback.format_exception(exctype, value, tb, length)
         else:
             msgLines = traceback.format_exception(exctype, value, tb)
-        
+
         if self.buffer:
             output = sys.stdout.getvalue()
-            error = sys.stderr.getvalue()            
+            error = sys.stderr.getvalue()
             if output:
                 if not output.endswith('\n'):
                     output += '\n'
diff --git a/tests/functional-tests/unittest2/runner.py b/tests/functional-tests/unittest2/runner.py
index 8dfce94..2769d14 100644
--- a/tests/functional-tests/unittest2/runner.py
+++ b/tests/functional-tests/unittest2/runner.py
@@ -13,27 +13,30 @@ try:
 except ImportError:
     def registerResult(_):
         pass
-    
+
 __unittest = True
 
 
 class _WritelnDecorator(object):
+
     """Used to decorate file-like objects with a handy 'writeln' method"""
-    def __init__(self,stream):
+
+    def __init__(self, stream):
         self.stream = stream
 
     def __getattr__(self, attr):
         if attr in ('stream', '__getstate__'):
             raise AttributeError(attr)
-        return getattr(self.stream,attr)
+        return getattr(self.stream, attr)
 
     def writeln(self, arg=None):
         if arg:
             self.write(arg)
-        self.write('\n') # text-mode streams translate to \r\n if needed
+        self.write('\n')  # text-mode streams translate to \r\n if needed
 
 
 class TextTestResult(result.TestResult):
+
     """A test result class that can print formatted text results to a stream.
 
     Used by TextTestRunner.
@@ -119,7 +122,8 @@ class TextTestResult(result.TestResult):
     def printErrorList(self, flavour, errors):
         for test, err in errors:
             self.stream.writeln(self.separator1)
-            self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
+            self.stream.writeln(
+                "%s: %s" % (flavour, self.getDescription(test)))
             self.stream.writeln(self.separator2)
             self.stream.writeln("%s" % err)
 
@@ -129,6 +133,7 @@ class TextTestResult(result.TestResult):
 
 
 class TextTestRunner(unittest.TextTestRunner):
+
     """A test runner class that displays results in textual form.
 
     It prints out the names of tests as they are run, errors as they
@@ -137,7 +142,7 @@ class TextTestRunner(unittest.TextTestRunner):
     resultclass = TextTestResult
 
     def __init__(self, stream=sys.stderr, descriptions=True, verbosity=1,
-                    failfast=False, buffer=False, resultclass=None):
+                 failfast=False, buffer=False, resultclass=None):
         self.stream = _WritelnDecorator(stream)
         self.descriptions = descriptions
         self.verbosity = verbosity
@@ -155,7 +160,7 @@ class TextTestRunner(unittest.TextTestRunner):
         result.failfast = self.failfast
         result.buffer = self.buffer
         registerResult(result)
-        
+
         startTime = time.time()
         startTestRun = getattr(result, 'startTestRun', None)
         if startTestRun is not None:
@@ -176,7 +181,7 @@ class TextTestRunner(unittest.TextTestRunner):
         self.stream.writeln("Ran %d test%s in %.3fs" %
                             (run, run != 1 and "s" or "", timeTaken))
         self.stream.writeln()
-        
+
         expectedFails = unexpectedSuccesses = skipped = 0
         try:
             results = map(len, (result.expectedFailures,
diff --git a/tests/functional-tests/unittest2/signals.py b/tests/functional-tests/unittest2/signals.py
index bf2dacd..61ae011 100644
--- a/tests/functional-tests/unittest2/signals.py
+++ b/tests/functional-tests/unittest2/signals.py
@@ -9,6 +9,7 @@ __unittest = True
 
 
 class _InterruptHandler(object):
+
     def __init__(self, default_handler):
         self.called = False
         self.default_handler = default_handler
@@ -19,7 +20,7 @@ class _InterruptHandler(object):
             # if we aren't the installed handler, then delegate immediately
             # to the default handler
             self.default_handler(signum, frame)
-            
+
         if self.called:
             self.default_handler(signum, frame)
         self.called = True
@@ -27,13 +28,18 @@ class _InterruptHandler(object):
             result.stop()
 
 _results = weakref.WeakKeyDictionary()
+
+
 def registerResult(result):
     _results[result] = 1
 
+
 def removeResult(result):
     return bool(_results.pop(result, None))
 
 _interrupt_handler = None
+
+
 def installHandler():
     global _interrupt_handler
     if _interrupt_handler is None:
diff --git a/tests/functional-tests/unittest2/suite.py b/tests/functional-tests/unittest2/suite.py
index 9431f2b..0c76b74 100644
--- a/tests/functional-tests/unittest2/suite.py
+++ b/tests/functional-tests/unittest2/suite.py
@@ -10,8 +10,10 @@ __unittest = True
 
 
 class BaseTestSuite(unittest.TestSuite):
+
     """A simple test suite that doesn't provide class or module shared fixtures.
     """
+
     def __init__(self, tests=()):
         self._tests = []
         self.addTests(tests)
@@ -72,6 +74,7 @@ class BaseTestSuite(unittest.TestSuite):
 
 
 class TestSuite(BaseTestSuite):
+
     """A test suite is a composite test consisting of a number of TestCases.
 
     For use, create an instance of TestSuite, then add test case instances.
@@ -80,7 +83,6 @@ class TestSuite(BaseTestSuite):
     in the order in which they were added, aggregating the results. When
     subclassing, do not forget to call the base class constructor.
     """
-    
 
     def run(self, result):
         self._wrapped_run(result)
@@ -88,28 +90,28 @@ class TestSuite(BaseTestSuite):
         self._handleModuleTearDown(result)
         return result
 
-    ################################
+    #
     # private methods
     def _wrapped_run(self, result):
         for test in self:
             if result.shouldStop:
                 break
-            
+
             if _isnotsuite(test):
                 self._tearDownPreviousClass(test, result)
                 self._handleModuleFixture(test, result)
                 self._handleClassSetUp(test, result)
                 result._previousTestClass = test.__class__
-                
-                if (getattr(test.__class__, '_classSetupFailed', False) or 
-                    getattr(result, '_moduleSetUpFailed', False)):
+
+                if (getattr(test.__class__, '_classSetupFailed', False) or
+                        getattr(result, '_moduleSetUpFailed', False)):
                     continue
-            
+
             if hasattr(test, '_wrapped_run'):
                 test._wrapped_run(result)
             else:
                 test(result)
-    
+
     def _handleClassSetUp(self, test, result):
         previousClass = getattr(result, '_previousTestClass', None)
         currentClass = test.__class__
@@ -119,14 +121,14 @@ class TestSuite(BaseTestSuite):
             return
         if getattr(currentClass, "__unittest_skip__", False):
             return
-        
+
         try:
             currentClass._classSetupFailed = False
         except TypeError:
             # test may actually be a function
             # so its class will be a builtin-type
             pass
-            
+
         setUpClass = getattr(currentClass, 'setUpClass', None)
         if setUpClass is not None:
             try:
@@ -134,24 +136,22 @@ class TestSuite(BaseTestSuite):
             except:
                 currentClass._classSetupFailed = True
                 self._addClassSetUpError(result, currentClass)
-    
+
     def _get_previous_module(self, result):
         previousModule = None
         previousClass = getattr(result, '_previousTestClass', None)
         if previousClass is not None:
             previousModule = previousClass.__module__
         return previousModule
-        
-        
+
     def _handleModuleFixture(self, test, result):
         previousModule = self._get_previous_module(result)
         currentModule = test.__class__.__module__
         if currentModule == previousModule:
             return
-        
+
         self._handleModuleTearDown(result)
 
-        
         result._moduleSetUpFailed = False
         try:
             module = sys.modules[currentModule]
@@ -172,7 +172,7 @@ class TestSuite(BaseTestSuite):
             return
         if result._moduleSetUpFailed:
             return
-            
+
         try:
             module = sys.modules[previousModule]
         except KeyError:
@@ -185,7 +185,7 @@ class TestSuite(BaseTestSuite):
             except:
                 error = _ErrorHolder('tearDownModule (%s)' % previousModule)
                 result.addError(error, sys.exc_info())
-    
+
     def _tearDownPreviousClass(self, test, result):
         previousClass = getattr(result, '_previousTestClass', None)
         currentClass = test.__class__
@@ -197,14 +197,14 @@ class TestSuite(BaseTestSuite):
             return
         if getattr(previousClass, "__unittest_skip__", False):
             return
-        
+
         tearDownClass = getattr(previousClass, 'tearDownClass', None)
         if tearDownClass is not None:
             try:
                 tearDownClass()
             except:
                 self._addClassTearDownError(result)
-    
+
     def _addClassTearDownError(self, result):
         className = util.strclass(result._previousTestClass)
         error = _ErrorHolder('classTearDown (%s)' % className)
@@ -216,8 +216,8 @@ class TestSuite(BaseTestSuite):
         result.addError(error, sys.exc_info())
 
 
-
 class _ErrorHolder(object):
+
     """
     Placeholder for a TestCase inside a result. As far as a TestResult
     is concerned, this looks exactly like a unit test. Used to insert
@@ -255,6 +255,7 @@ class _ErrorHolder(object):
     def countTestCases(self):
         return 0
 
+
 def _isnotsuite(test):
     "A crude way to tell apart testcases and suites with duck-typing"
     try:
diff --git a/tests/functional-tests/unittest2/unit2.py b/tests/functional-tests/unittest2/unit2.py
index dc4f6e0..b13c21a 100644
--- a/tests/functional-tests/unittest2/unit2.py
+++ b/tests/functional-tests/unittest2/unit2.py
@@ -5,4 +5,4 @@ __unittest = True
 from unittest2.main import main, TestProgram, USAGE_AS_MAIN
 TestProgram.USAGE = USAGE_AS_MAIN
 
-main(module=None)
\ No newline at end of file
+main(module=None)
diff --git a/tests/functional-tests/unittest2/util.py b/tests/functional-tests/unittest2/util.py
index a7bffb6..85d1c87 100644
--- a/tests/functional-tests/unittest2/util.py
+++ b/tests/functional-tests/unittest2/util.py
@@ -2,21 +2,25 @@
 
 """Various utility functions."""
 
+
 def safe_repr(obj):
     try:
         return repr(obj)
     except Exception:
         return object.__repr__(obj)
 
+
 def safe_str(obj):
     try:
         return str(obj)
     except Exception:
         return object.__str__(obj)
 
+
 def strclass(cls):
     return "%s.%s" % (cls.__module__, cls.__name__)
 
+
 def sorted_list_difference(expected, actual):
     """Finds elements in only one or the other of two, sorted input lists.
 
@@ -57,6 +61,7 @@ def sorted_list_difference(expected, actual):
             break
     return missing, unexpected
 
+
 def unorderable_list_difference(expected, actual, ignore_duplicate=False):
     """Same behavior as sorted_list_difference but
     for lists of unorderable items (like dicts).
diff --git a/tests/functional-tests/virtual-files-tc.py b/tests/functional-tests/virtual-files-tc.py
index e6290b6..5110401 100644
--- a/tests/functional-tests/virtual-files-tc.py
+++ b/tests/functional-tests/virtual-files-tc.py
@@ -18,7 +18,9 @@
 # 02110-1301, USA.
 #
 
-import sys,os,dbus
+import sys
+import os
+import dbus
 import unittest
 import time
 import random
@@ -30,138 +32,145 @@ import string
 TRACKER = "org.freedesktop.Tracker1"
 TRACKER_OBJ = "/org/freedesktop/Tracker1/Resources"
 RESOURCES_IFACE = "org.freedesktop.Tracker1.Resources"
-MINER="org.freedesktop.Tracker1.Miner.Files"
-MINER_OBJ="/org/freedesktop/Tracker1/Miner/Files"
-MINER_IFACE="org.freedesktop.Tracker1.Miner"
+MINER = "org.freedesktop.Tracker1.Miner.Files"
+MINER_OBJ = "/org/freedesktop/Tracker1/Miner/Files"
+MINER_IFACE = "org.freedesktop.Tracker1.Miner"
 
 
 target = configuration.check_target()
 
 if target == configuration.MAEMO6_HW:
-       """target is device """
-        dir_path = configuration.MYDOCS
-        dir_path_parent = '/home/user'
-       src = configuration.TEST_DATA_IMAGES + 'test-image-1.jpg'
+    """target is device """
+    dir_path = configuration.MYDOCS
+    dir_path_parent = '/home/user'
+    src = configuration.TEST_DATA_IMAGES + 'test-image-1.jpg'
 
 elif target == configuration.DESKTOP:
-        dir_path = os.path.expanduser("~")
-        dir_path_parent = os.path.expanduser("~") + "/" + "tmp"
-       if (not (os.path.exists(dir_path_parent) and os.path.isdir(dir_path_parent))):
-           os.mkdir (dir_path_parent)
-       src = configuration.VCS_TEST_DATA_IMAGES + 'test-image-1.jpg'
+    dir_path = os.path.expanduser("~")
+    dir_path_parent = os.path.expanduser("~") + "/" + "tmp"
+    if (not (os.path.exists(dir_path_parent) and os.path.isdir(dir_path_parent))):
+        os.mkdir(dir_path_parent)
+    src = configuration.VCS_TEST_DATA_IMAGES + 'test-image-1.jpg'
 
 print dir_path
 
 """ copy the test data to proper location. """
+
+
 def copy_file():
 
-        dest = dir_path
-        print 'Copying '+src+' to '+dest
-        commands.getoutput('cp '+src+ ' '+dest)
+    dest = dir_path
+    print 'Copying ' + src + ' to ' + dest
+    commands.getoutput('cp ' + src + ' ' + dest)
 
 copy_file()
 
+
 class TestVirtualFiles (unittest.TestCase):
-        def setUp(self):
-                bus = dbus.SessionBus()
-                tracker = bus.get_object(TRACKER, TRACKER_OBJ)
-                self.resources = dbus.Interface (tracker, dbus_interface=RESOURCES_IFACE)
-               miner_obj= bus.get_object(MINER,MINER_OBJ)
-               self.miner=dbus.Interface (miner_obj,dbus_interface=MINER_IFACE)
 
-        def sparql_update(self,query):
-                return self.resources.SparqlUpdate(query)
-        def query(self,query):
-                return self.resources.SparqlQuery(query)
-       def ignore(self,uri):
-               return self.miner.IgnoreNextUpdate(uri)
+    def setUp(self):
+        bus = dbus.SessionBus()
+        tracker = bus.get_object(TRACKER, TRACKER_OBJ)
+        self.resources = dbus.Interface(
+            tracker, dbus_interface=RESOURCES_IFACE)
+        miner_obj = bus.get_object(MINER, MINER_OBJ)
+        self.miner = dbus.Interface(miner_obj, dbus_interface=MINER_IFACE)
+
+    def sparql_update(self, query):
+        return self.resources.SparqlUpdate(query)
+
+    def query(self, query):
+        return self.resources.SparqlQuery(query)
+
+    def ignore(self, uri):
+        return self.miner.IgnoreNextUpdate(uri)
 
 
 class virtual_files(TestVirtualFiles):
 
-       def test_Virttual_01(self):
-                """
-                Test if the update is ignored until the creation of the file is completed.
-                1. Move the file to some other location.
-                2. Create resource in tracker , by making instance of nie:DataObject.
-                3. IgnoreNextUpdate on the files.
-                4. Copy the original file to the present directory.
-                5. Query for the title of the file.
-                """
+    def test_Virttual_01(self):
+        """
+        Test if the update is ignored until the creation of the file is completed.
+        1. Move the file to some other location.
+        2. Create resource in tracker , by making instance of nie:DataObject.
+        3. IgnoreNextUpdate on the files.
+        4. Copy the original file to the present directory.
+        5. Query for the title of the file.
+        """
 
-                test_file = 'test-image-1.jpg'
-                file= dir_path + '/' + test_file
-                uri='file://' + file
-               print uri
+        test_file = 'test-image-1.jpg'
+        file = dir_path + '/' + test_file
+        uri = 'file://' + file
+        print uri
 
-                commands.getoutput('mv  ' + file + ' ' + dir_path_parent)
+        commands.getoutput('mv  ' + file + ' ' + dir_path_parent)
 
-                Insert = """
+        Insert = """
                INSERT { _:x a nfo:Image, nie:DataObject ;
                 nie:url <%s> ;
-                nie:title 'title_test'. }""" %(uri)
-               print Insert
+                nie:title 'title_test'. }""" % (uri)
+        print Insert
 
-                self.sparql_update(Insert)
-               time.sleep(10)
+        self.sparql_update(Insert)
+        time.sleep(10)
 
-                self.miner.IgnoreNextUpdate([uri])
+        self.miner.IgnoreNextUpdate([uri])
 
-                commands.getoutput('cp ' + dir_path_parent + '/'+ test_file + ' ' + dir_path)
+        commands.getoutput(
+            'cp ' + dir_path_parent + '/' + test_file + ' ' + dir_path)
 
-                QUERY = """
+        QUERY = """
                 SELECT ?t WHERE { ?file a nfo:FileDataObject ;
                 nie:title ?t ;
                 nie:url <%s> .}
-                """ %(uri)
-               print QUERY
-
-                result=self.query(QUERY)
-               print result
+                """ % (uri)
+        print QUERY
 
-                self.assert_(result[0][0].find('title_test')!=-1 , "File is not ignored")
+        result = self.query(QUERY)
+        print result
 
+        self.assert_(
+            result[0][0].find('title_test') != -1, "File is not ignored")
 
-       def test_Virtual_02(self):
+    def test_Virtual_02(self):
+        """
+        1) Insert in tracker a "virtual" file (can be a text file) tagged as Favourite
+        2) Start writing the file (with some sleep to make the process long)
+        3) Close the file, wait for tracker to discover it
+        4) Check the metadata of the file AND that the tag (favourite) is there
+        """
 
-               """
-               1) Insert in tracker a "virtual" file (can be a text file) tagged as Favourite
-               2) Start writing the file (with some sleep to make the process long)
-               3) Close the file, wait for tracker to discover it
-               4) Check the metadata of the file AND that the tag (favourite) is there
-                """
+        test_file = 'testfilename.txt'
+        file = dir_path + '/' + test_file
+        url = 'file://' + file
+        print url
 
-                test_file = 'testfilename.txt'
-                file= dir_path + '/' + test_file
-                url='file://' + file
-               print url
-
-               insert="""
+        insert = """
                INSERT { _:x a nfo:Image, nie:DataObject ; \
                nie:url <%s> ; \
                nie:title 'title_test';
                nao:hasTag [a nao:Tag ; nao:prefLabel "Favorite"] ;
                nie:plainTextContent 'This is script to test virtual file support'.
                }
-               """ %url
-               self.sparql_update(insert)
+               """ % url
+        self.sparql_update(insert)
 
-               time.sleep(3)
+        time.sleep(3)
 
-               QUERY="""
+        QUERY = """
                 SELECT ?label ?content WHERE { ?file a nie:DataObject ;nao:hasTag[a nao:Tag ;nao:prefLabel 
?label]; nie:url <%s> ;
                nie:plainTextContent ?content.
                 }
-                """ %url
-
-               result=self.query(QUERY)
+                """ % url
 
-               self.assert_(result[0][0].find('Favorite')!=-1 and result[1][1].find("This is script to test 
virtual file support")!=-1, "File is not monitored by tracker")
+        result = self.query(QUERY)
 
+        self.assert_(result[0][0].find('Favorite') != -1 and result[1][1].find(
+            "This is script to test virtual file support") != -1, "File is not monitored by tracker")
 
 
 if __name__ == "__main__":
 
-        unittest.main()
-       if (os.path.exists(dir_path_parent) and os.path.isdir(dir_path_parent)):
-           os.rmdir (dir_path_parent)
+    unittest.main()
+    if (os.path.exists(dir_path_parent) and os.path.isdir(dir_path_parent)):
+        os.rmdir(dir_path_parent)



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]