[PATCH] Inline highlighting performance improvements
- From: Piotr Piastucki <the_leech users berlios de>
- Subject: [PATCH] Inline highlighting performance improvements
- Date: Fri, 17 Jul 2009 17:29:29 +0200
---
filediff.py | 92 +++++++++++++++++++++++++++++++++++++----------------------
1 files changed, 58 insertions(+), 34 deletions(-)
diff --git a/filediff.py b/filediff.py
index acc1546..145d061 100644
--- a/filediff.py
+++ b/filediff.py
@@ -91,6 +91,7 @@ class FileDiff(melddoc.MeldDoc, gnomeglade.Component):
self._sync_vscroll_lock = False
self._sync_hscroll_lock = False
self.linediffer = diffutil.Differ()
+ self._inline_cache = []
for text in self.textview:
text.set_wrap_mode( self.prefs.edit_wrap_lines )
for buf in self.textbuffer:
@@ -622,47 +623,70 @@ class FileDiff(melddoc.MeldDoc, gnomeglade.Component):
mgr.clear()
def _update_highlighting(self):
- for b in self.textbuffer:
- taglist = ["delete line", "conflict line", "replace line", "inline line"]
- table = b.get_tag_table()
- for tagname in taglist:
- tag = table.lookup(tagname)
+ alltexts = [b for b in self._get_texts(raw=1)]
+ alltags = [b.get_tag_table().lookup("inline line") for b in self.textbuffer]
+ clearchunks = len(self._inline_cache) != 0
+ if not clearchunks:
+ for b, tag in zip(self.textbuffer, alltags):
b.remove_tag(tag, b.get_start_iter(), b.get_end_iter() )
+ newcache = []
+ startlines = [0] * 3
for chunk in self.linediffer.all_changes():
for i,c in enumerate(chunk):
if c and c[0] == "replace":
bufs = self.textbuffer[1], self.textbuffer[i*2]
- #tags = [b.get_tag_table().lookup("replace line") for b in bufs]
- starts = [b.get_iter_at_line(l) for b,l in zip(bufs, (c[1],c[3])) ]
- text1 = "\n".join( self._get_texts(raw=1)[1 ][c[1]:c[2]] ).encode("utf16")
- text1 = struct.unpack("%iH"%(len(text1)/2), text1)[1:]
- textn = "\n".join( self._get_texts(raw=1)[i*2][c[3]:c[4]] ).encode("utf16")
- textn = struct.unpack("%iH"%(len(textn)/2), textn)[1:]
-
- tags = [b.get_tag_table().lookup("inline line") for b in bufs]
- # For very long sequences, bail rather than trying a very slow comparison
- inline_limit = 8000 # arbitrary constant
- if len(text1) > inline_limit and len(textn) > inline_limit:
- ends = [b.get_iter_at_line(l) for b, l in zip(bufs, (c[2], c[4]))]
- for i in range(2):
- bufs[i].apply_tag(tags[i], starts[i], ends[i])
- continue
-
- matcher = difflib.SequenceMatcher(None, text1, textn)
- #print "<<<\n%s\n---\n%s\n>>>" % (text1, textn)
- back = (0,0)
- for o in matcher.get_opcodes():
- if o[0] == "equal":
- if (o[2]-o[1] < 3) or (o[4]-o[3] < 3):
- back = o[4]-o[3], o[2]-o[1]
+ text1 = alltexts[1][c[1]:c[2]]
+ textn = alltexts[i*2][c[3]:c[4]]
+ cacheitem = (i, c, text1, textn)
+ newcache.append(cacheitem)
+ notincache = not cacheitem in self._inline_cache
+ if clearchunks:
+ # clean all previous non-replace chunks + this chunk if it was not found in the cache
+ bufs[1].remove_tag(alltags[i*2], get_iter_at_line_or_eof(bufs[1], startlines[i*2]), get_iter_at_line_or_eof(bufs[1], c[3 + int(notincache)]))
+ bufs[0].remove_tag(alltags[1], get_iter_at_line_or_eof(bufs[0], startlines[1]), get_iter_at_line_or_eof(bufs[0], c[1 + int(notincache)]))
+ startlines[i*2] = c[4]
+ startlines[1] = c[2]
+
+ if notincache:
+ tags = alltags[1], alltags[i*2]
+ #tags = [b.get_tag_table().lookup("replace line") for b in bufs]
+ starts = [get_iter_at_line_or_eof(b, l) for b,l in zip(bufs, (c[1],c[3])) ]
+ text1 = "\n".join(alltexts[1][c[1]:c[2]] ).encode("utf16")
+ text1 = struct.unpack("%iH"%(len(text1)/2), text1)[1:]
+ textn = "\n".join(alltexts[i*2][c[3]:c[4]] ).encode("utf16")
+ textn = struct.unpack("%iH"%(len(textn)/2), textn)[1:]
+
+ # For very long sequences, bail rather than trying a very slow comparison
+ inline_limit = 8000 # arbitrary constant
+ if len(text1) > inline_limit and len(textn) > inline_limit:
+ ends = [get_iter_at_line_or_eof(b, l) for b, l in zip(bufs, (c[2], c[4]))]
+ for i in range(2):
+ bufs[i].apply_tag(tags[i], starts[i], ends[i])
continue
- for i in range(2):
- s,e = starts[i].copy(), starts[i].copy()
- s.forward_chars( o[1+2*i] - back[i] )
- e.forward_chars( o[2+2*i] )
- bufs[i].apply_tag(tags[i], s, e)
+
+ matcher = difflib.SequenceMatcher(None, text1, textn)
+ #print "<<<\n%s\n---\n%s\n>>>" % (text1, textn)
back = (0,0)
- yield 1
+ for o in matcher.get_opcodes():
+ if o[0] == "equal":
+ if (o[2]-o[1] < 3) or (o[4]-o[3] < 3):
+ back = o[4]-o[3], o[2]-o[1]
+ continue
+ for i in range(2):
+ s,e = starts[i].copy(), starts[i].copy()
+ s.forward_chars( o[1+2*i] - back[i] )
+ e.forward_chars( o[2+2*i] )
+ bufs[i].apply_tag(tags[i], s, e)
+ back = (0,0)
+ yield 1
+
+ # clean up all tailing lines
+ if clearchunks:
+ for b, tag, start in zip(self.textbuffer, alltags, startlines):
+ b.remove_tag(tag, get_iter_at_line_or_eof(b, start), b.get_end_iter() )
+ self._inline_cache = newcache
+ yield 1
+
def on_textview_expose_event(self, textview, event):
if self.num_panes == 1:
--
1.6.0.4
--------------010800060504040108030404--
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]