[library-web] moved some generic (non library.gnome.org specific) stuff to app.py
- From: Frederic Peters <fpeters src gnome org>
- To: svn-commits-list gnome org
- Cc:
- Subject: [library-web] moved some generic (non library.gnome.org specific) stuff to app.py
- Date: Thu, 6 Aug 2009 20:10:50 +0000 (UTC)
commit 098715479e7c3f92071fcf1ceda23899eba62f9c
Author: Frédéric Péters <fpeters 0d be>
Date: Thu Aug 6 22:03:31 2009 +0200
moved some generic (non library.gnome.org specific) stuff to app.py
src/app.py | 208 ++++++++++++++++++++++++++++++++++++++++++++++++++
src/document.py | 4 +-
src/lgo.py | 168 +---------------------------------------
src/modtypes/base.py | 2 +-
src/overlay.py | 2 +-
src/utils.py | 22 -----
6 files changed, 217 insertions(+), 189 deletions(-)
---
diff --git a/src/app.py b/src/app.py
new file mode 100644
index 0000000..1d65557
--- /dev/null
+++ b/src/app.py
@@ -0,0 +1,208 @@
+# libgo - script to build library.gnome.org
+# Copyright (C) 2007-2009 Frederic Peters
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301 USA
+
+import __builtin__
+import glob
+import logging
+import os
+from optparse import OptionParser
+import stat
+import urllib2
+
+from config import Config
+import utils
+
+class App:
+ indexes_xsl_file = os.path.join(data_dir, 'xslt', 'indexes.xsl')
+ javascript_dir = os.path.join(data_dir, 'js')
+ skin_dir = os.path.join(data_dir, 'skin')
+
+ rebuild_all = False
+ debug = False
+
+ def __init__(self):
+ __builtin__.__dict__['app'] = self
+ self.documents = []
+
+ parser = OptionParser()
+ parser.add_option('-c', '--config', dest = 'config')
+ parser.add_option('-v', '--verbose',
+ action = 'count', dest = 'verbose', default = 0,
+ help = 'verbosity level (more -v for more verbose)')
+ parser.add_option('--rebuild', dest = 'rebuild_module',
+ help = 'rebuild documentation from FILENAME', metavar = 'FILENAME')
+ parser.add_option('--rebuild-all',
+ action = 'store_true', dest = 'rebuild_all',
+ help = 'rebuild all documents (even those that were already built)')
+ parser.add_option('--rebuild-language', dest = 'rebuild_language',
+ help = 'rebuild all documents in LANGUAGE', metavar = 'LANGUAGE')
+ parser.add_option('--skip-extra-tarballs',
+ action = 'store_false', dest = 'skip_extra_tarballs',
+ help = "don't look for documentation extra tarballs")
+ self.options, args = parser.parse_args()
+
+ logging.basicConfig(level = 10 + logging.CRITICAL - self.options.verbose*10,
+ formatter = utils.LogFormatter())
+ logging.getLogger().handlers[0].setFormatter(utils.LogFormatter())
+
+ self.debug = (self.options.verbose >= 5)
+ self.rebuild_all = self.options.rebuild_all
+ self.rebuild_language = self.options.rebuild_language
+
+ if self.options.config:
+ self.config = Config(filename = self.options.config)
+ else:
+ self.config = Config()
+
+ self.check_sanity()
+
+ def check_sanity(self):
+ for filename in [os.path.join(data_dir, 'overlay.xml'),
+ os.path.join(data_dir, 'catalog.xml')]:
+ if not os.path.exists(filename):
+ print >> sys.stderr, '%s is missing, you should run make' % filename
+ sys.exit(1)
+
+ if not self.config.output_dir.endswith(os.path.sep):
+ logging.warning('output dir should end with slash')
+ self.config.output_dir += os.path.sep
+
+ def copy_static_files(self):
+ if not os.path.exists(os.path.join(self.config.output_dir, 'js')):
+ os.makedirs(os.path.join(self.config.output_dir, 'js'))
+ if not os.path.exists(os.path.join(self.config.output_dir, 'skin')):
+ os.makedirs(os.path.join(self.config.output_dir, 'skin'))
+ if not os.path.exists(os.path.join(self.config.output_dir, 'skin/icons')):
+ os.makedirs(os.path.join(self.config.output_dir, 'skin/icons'))
+
+ for src in glob.glob('%s/*.js' % self.javascript_dir):
+ dst = os.path.join(self.config.output_dir, 'js', os.path.basename(src))
+ if not os.path.exists(dst) or \
+ os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
+ open(dst, 'w').write(open(src, 'r').read())
+
+ for src in glob.glob('%s/*.css' % self.skin_dir) + \
+ glob.glob('%s/*.png' % self.skin_dir) + \
+ glob.glob('%s/*.gif' % self.skin_dir):
+ dst = os.path.join(self.config.output_dir, 'skin', os.path.basename(src))
+ if not os.path.exists(dst) or \
+ os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
+ open(dst, 'w').write(open(src, 'r').read())
+
+ for src in glob.glob('%s/icons/*.png' % self.skin_dir):
+ dst = os.path.join(self.config.output_dir, 'skin/icons', os.path.basename(src))
+ if not os.path.exists(dst) or \
+ os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
+ open(dst, 'w').write(open(src, 'r').read())
+
+ def download(self, url):
+ parsed_url = urllib2.urlparse.urlparse(url)
+ if parsed_url[0] == 'file':
+ return parsed_url[2]
+ filename = '/'.join(parsed_url[1:3])
+ cache_filename = os.path.join(self.config.download_cache_dir, filename)
+ cache_dir = os.path.split(cache_filename)[0]
+ if not os.path.exists(cache_dir):
+ os.makedirs(cache_dir)
+ if not os.path.exists(cache_filename):
+ logging.info('downloading %s' % url)
+ try:
+ s = urllib2.urlopen(url).read()
+ except urllib2.HTTPError, e:
+ logging.warning('error %s downloading %s' % (e.code, url))
+ return None
+ except urllib2.URLError, e:
+ logging.warning('error (URLError) downloading %s' % url)
+ return None
+ open(cache_filename, 'w').write(s)
+ return cache_filename
+
+ def apply_overlay(self):
+ logging.info('Applying overlay')
+ for doc in self.documents:
+ self.overlay.apply(doc)
+ self.documents.extend(self.overlay.get_new_docs())
+
+ def generate_symbols_files(self):
+ if not (self.config.symbols_dbm_filepath or self.config.symbols_sqlite_filepath):
+ return
+ logging.info('getting all symbols')
+
+ if self.rebuild_all:
+ if self.config.symbols_dbm_filepath and os.path.exists(
+ self.config.symbols_dbm_filepath):
+ os.unlink(self.config.symbols_dbm_filepath)
+
+ if self.config.symbols_sqlite_filepath and os.path.exists(
+ self.config.symbols_sqlite_filepath):
+ os.unlink(self.config.symbols_sqlite_filepath)
+
+ if self.config.symbols_dbm_filepath:
+ cmd = [self.config.httxt2dbm_path, '-i', '-', '-o', self.config.symbols_dbm_filepath]
+ logging.debug('executing %s' % ' '.join(cmd))
+ try:
+ httxt2dbm = subprocess.Popen(cmd, stdin = subprocess.PIPE).stdin
+ except OSError:
+ logging.error('failed to generate dbm symbols file (OSError)')
+ return
+ else:
+ httxt2dbm = None
+
+ if sqlite and self.config.symbols_sqlite_filepath:
+ sqlcon = sqlite.connect(self.config.symbols_sqlite_filepath, isolation_level=None)
+ sqlcon.execute('create table symbols(symbol, path)')
+ sqlcur = sqlcon.cursor()
+ else:
+ sqlcon = None
+ sqlcur = None
+
+ def symbols_iterator():
+ for doc in self.documents:
+ if doc.category != 'api':
+ continue
+ if not doc.module or not doc.path:
+ continue
+
+ web_dir = os.path.join(app.config.output_dir, doc.path[1:])
+
+ devhelp_path = os.path.join(web_dir, '%s.devhelp2' % doc.module)
+ if os.path.exists(devhelp_path):
+ tree = ET.parse(devhelp_path)
+ for keyword in tree.findall('//{http://www.devhelp.net/book}keyword'):
+ key = keyword.attrib.get('name').replace('()', '').strip()
+ if not key or ' ' in key:
+ # ignore keys with spaces in their name
+ continue
+ value = os.path.join(doc.path, keyword.attrib.get('link'))
+ if httxt2dbm:
+ print >> httxt2dbm, key, value
+ yield (key, value)
+ return
+
+ # if active, the dbm symbol file will be generated while iterating for
+ # SQLite.
+ if sqlcur:
+ sqlcur.executemany('insert into symbols values (?, ?)', symbols_iterator())
+ sqlcur.execute('create index symbols_idx on symbols(symbol)')
+ else:
+ for x in symbols_iterator():
+ pass
+
+ if httxt2dbm:
+ httxt2dbm.close()
+
diff --git a/src/document.py b/src/document.py
index 1ee0b82..9402d5e 100644
--- a/src/document.py
+++ b/src/document.py
@@ -31,7 +31,7 @@ try:
except ImportError:
html5lib = None
-from utils import version_cmp, download
+from utils import version_cmp
def assert_elementtree_node(node):
@@ -246,5 +246,5 @@ class RemoteDocument(Document):
def download(self, href):
# TODO: add some support (think <local update="daily"/>) so the file
# can be "watched" for changes
- return download(href)
+ return app.download(href)
diff --git a/src/lgo.py b/src/lgo.py
index 62c4804..23eff85 100755
--- a/src/lgo.py
+++ b/src/lgo.py
@@ -30,8 +30,6 @@ try:
except ImportError:
import xml.etree.ElementTree as ET
import tarfile
-import glob
-import tempfile
import stat
import subprocess
import dbm
@@ -47,10 +45,9 @@ except ImportError:
data_dir = os.path.join(os.path.dirname(__file__), '../data')
__builtin__.__dict__['data_dir'] = data_dir
-from config import Config
import errors
import utils
-from utils import version_cmp, is_version_number, download
+from utils import version_cmp, is_version_number
from document import Document
from overlay import Overlay
@@ -59,8 +56,8 @@ from modtypes.gnomedocbook import GnomeDocbookModule
from modtypes.gtkdoc import GtkDocModule
from modtypes.htmlfiles import HtmlFilesModule
+from app import App
-app = None
# timeout for downloads, so it doesn't hang on connecting to sourceforge
socket.setdefaulttimeout(10)
@@ -131,51 +128,9 @@ class FtpDotGnomeDotOrg:
return []
-class Lgo:
+class Lgo(App):
'''Main Application Class'''
- indexes_xsl_file = os.path.join(data_dir, 'xslt', 'indexes.xsl')
- javascript_dir = os.path.join(data_dir, 'js')
- skin_dir = os.path.join(data_dir, 'skin')
-
- rebuild_all = False
- debug = False
-
- def __init__(self):
- self.documents = []
-
- parser = OptionParser()
- parser.add_option('-c', '--config', dest = 'config')
- parser.add_option('-v', '--verbose',
- action = 'count', dest = 'verbose', default = 0,
- help = 'verbosity level (more -v for more verbose)')
- parser.add_option('--rebuild', dest = 'rebuild_module',
- help = 'rebuild documentation from FILENAME', metavar = 'FILENAME')
- parser.add_option('--rebuild-all',
- action = 'store_true', dest = 'rebuild_all',
- help = 'rebuild all documents (even those that were already built)')
- parser.add_option('--rebuild-language', dest = 'rebuild_language',
- help = 'rebuild all documents in LANGUAGE', metavar = 'LANGUAGE')
- parser.add_option('--skip-extra-tarballs',
- action = 'store_false', dest = 'skip_extra_tarballs',
- help = "don't look for documentation extra tarballs")
- self.options, args = parser.parse_args()
-
- logging.basicConfig(level = 10 + logging.CRITICAL - self.options.verbose*10,
- formatter = utils.LogFormatter())
- logging.getLogger().handlers[0].setFormatter(utils.LogFormatter())
-
- self.debug = (self.options.verbose >= 5)
- self.rebuild_all = self.options.rebuild_all
- self.rebuild_language = self.options.rebuild_language
-
- if self.options.config:
- self.config = Config(filename = self.options.config)
- else:
- self.config = Config()
-
- self.check_sanity()
-
def run(self):
self.ftp_gnome_org = FtpDotGnomeDotOrg(self.config)
self.overlay = Overlay(os.path.join(data_dir, 'overlay.xml'))
@@ -198,17 +153,6 @@ class Lgo:
self.generate_symbols_files()
self.generate_static_pages()
- def check_sanity(self):
- for filename in [os.path.join(data_dir, 'overlay.xml'),
- os.path.join(data_dir, 'catalog.xml')]:
- if not os.path.exists(filename):
- print >> sys.stderr, '%s is missing, you should run make' % filename
- sys.exit(1)
-
- if not self.config.output_dir.endswith(os.path.sep):
- logging.warning('output dir should end with slash')
- self.config.output_dir += os.path.sep
-
def get_yelp_categories(self):
logging.info('Getting categories from Yelp')
@@ -243,38 +187,13 @@ class Lgo:
self.toc_mapping[subject.attrib['category']] = sub_id
def copy_static_files(self):
- if not os.path.exists(os.path.join(self.config.output_dir, 'js')):
- os.makedirs(os.path.join(self.config.output_dir, 'js'))
- if not os.path.exists(os.path.join(self.config.output_dir, 'skin')):
- os.makedirs(os.path.join(self.config.output_dir, 'skin'))
- if not os.path.exists(os.path.join(self.config.output_dir, 'skin/icons')):
- os.makedirs(os.path.join(self.config.output_dir, 'skin/icons'))
+ App.copy_static_files(self)
src = os.path.join(data_dir, 'gnome-library-search.xml')
dst = os.path.join(self.config.output_dir, 'gnome-library-search.xml')
if not os.path.exists(dst) or \
os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
open(dst, 'w').write(open(src, 'r').read())
-
- for src in glob.glob('%s/*.js' % self.javascript_dir):
- dst = os.path.join(self.config.output_dir, 'js', os.path.basename(src))
- if not os.path.exists(dst) or \
- os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
- open(dst, 'w').write(open(src, 'r').read())
-
- for src in glob.glob('%s/*.css' % self.skin_dir) + \
- glob.glob('%s/*.png' % self.skin_dir) + \
- glob.glob('%s/*.gif' % self.skin_dir):
- dst = os.path.join(self.config.output_dir, 'skin', os.path.basename(src))
- if not os.path.exists(dst) or \
- os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
- open(dst, 'w').write(open(src, 'r').read())
-
- for src in glob.glob('%s/icons/*.png' % self.skin_dir):
- dst = os.path.join(self.config.output_dir, 'skin/icons', os.path.basename(src))
- if not os.path.exists(dst) or \
- os.stat(src)[stat.ST_MTIME] > os.stat(dst)[stat.ST_MTIME]:
- open(dst, 'w').write(open(src, 'r').read())
def process_releases(self):
'''Download GNOME releases'''
@@ -383,7 +302,7 @@ class Lgo:
logging.error('error downloading %s' % url)
return
else:
- filename = download(url)
+ filename = App.download(self, url)
return filename
@@ -562,14 +481,6 @@ class Lgo:
for doc_module in self.extract_modules(filename, nightly = True):
doc_module.process()
-
- def apply_overlay(self):
- logging.info('Applying overlay')
- for doc in self.documents:
- self.overlay.apply(doc)
-
- self.documents.extend(self.overlay.get_new_docs())
-
def generate_indexes(self):
logging.info('generating indexes')
indexes = ET.Element('indexes')
@@ -744,78 +655,9 @@ class Lgo:
if rc != 0:
logging.warn('%s failed with error %d' % (' '.join(cmd), rc))
- def generate_symbols_files(self):
- if not (self.config.symbols_dbm_filepath or self.config.symbols_sqlite_filepath):
- return
- logging.info('getting all symbols')
-
- if self.rebuild_all:
- if self.config.symbols_dbm_filepath and os.path.exists(
- self.config.symbols_dbm_filepath):
- os.unlink(self.config.symbols_dbm_filepath)
-
- if self.config.symbols_sqlite_filepath and os.path.exists(
- self.config.symbols_sqlite_filepath):
- os.unlink(self.config.symbols_sqlite_filepath)
-
- if self.config.symbols_dbm_filepath:
- cmd = [self.config.httxt2dbm_path, '-i', '-', '-o', self.config.symbols_dbm_filepath]
- logging.debug('executing %s' % ' '.join(cmd))
- try:
- httxt2dbm = subprocess.Popen(cmd, stdin = subprocess.PIPE).stdin
- except OSError:
- logging.error('failed to generate dbm symbols file (OSError)')
- return
- else:
- httxt2dbm = None
-
- if sqlite and self.config.symbols_sqlite_filepath:
- sqlcon = sqlite.connect(self.config.symbols_sqlite_filepath, isolation_level=None)
- sqlcon.execute('create table symbols(symbol, path)')
- sqlcur = sqlcon.cursor()
- else:
- sqlcon = None
- sqlcur = None
-
- def symbols_iterator():
- for doc in self.documents:
- if doc.category != 'api':
- continue
- if not doc.module or not doc.path:
- continue
-
- web_dir = os.path.join(app.config.output_dir, doc.path[1:])
-
- devhelp_path = os.path.join(web_dir, '%s.devhelp2' % doc.module)
- if os.path.exists(devhelp_path):
- tree = ET.parse(devhelp_path)
- for keyword in tree.findall('//{http://www.devhelp.net/book}keyword'):
- key = keyword.attrib.get('name').replace('()', '').strip()
- if not key or ' ' in key:
- # ignore keys with spaces in their name
- continue
- value = os.path.join(doc.path, keyword.attrib.get('link'))
- if httxt2dbm:
- print >> httxt2dbm, key, value
- yield (key, value)
- return
-
- # if active, the dbm symbol file will be generated while iterating for
- # SQLite.
- if sqlcur:
- sqlcur.executemany('insert into symbols values (?, ?)', symbols_iterator())
- sqlcur.execute('create index symbols_idx on symbols(symbol)')
- else:
- for x in symbols_iterator():
- pass
-
- if httxt2dbm:
- httxt2dbm.close()
-
if __name__ == '__main__':
app = Lgo()
app.Document = Document
- __builtin__.__dict__['app'] = app
app.run()
diff --git a/src/modtypes/base.py b/src/modtypes/base.py
index d6ee4c5..c43f29b 100644
--- a/src/modtypes/base.py
+++ b/src/modtypes/base.py
@@ -22,7 +22,7 @@ import re
import stat
import tarfile
-from utils import version_cmp, is_version_number, download
+from utils import version_cmp, is_version_number
licence_modules = ['fdl', 'gpl', 'lgpl']
diff --git a/src/overlay.py b/src/overlay.py
index 29ef9c7..b3ad6c5 100644
--- a/src/overlay.py
+++ b/src/overlay.py
@@ -22,7 +22,7 @@ except ImportError:
import xml.etree.ElementTree as ET
from document import RemoteDocument
-from utils import version_cmp, is_version_number, download
+from utils import version_cmp, is_version_number
class SubIndex:
diff --git a/src/utils.py b/src/utils.py
index 5f8db39..dfbde93 100644
--- a/src/utils.py
+++ b/src/utils.py
@@ -79,28 +79,6 @@ def version_cmp(x, y):
def is_version_number(v):
return re.match('\d+\.\d+', v) is not None
-def download(href):
- parsed_url = urllib2.urlparse.urlparse(href)
- if parsed_url[0] == 'file':
- return parsed_url[2]
- filename = '/'.join(parsed_url[1:3])
- cache_filename = os.path.join(app.config.download_cache_dir, filename)
- cache_dir = os.path.split(cache_filename)[0]
- if not os.path.exists(cache_dir):
- os.makedirs(cache_dir)
- if not os.path.exists(cache_filename):
- logging.info('downloading %s' % href)
- try:
- s = urllib2.urlopen(href).read()
- except urllib2.HTTPError, e:
- logging.warning('error %s downloading %s' % (e.code, href))
- return None
- except urllib2.URLError, e:
- logging.warning('error (URLError) downloading %s' % href)
- return None
- open(cache_filename, 'w').write(s)
- return cache_filename
-
class LogFormatter(logging.Formatter):
'''Class used for formatting log messages'''
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]