2006-12-20 01:43:15 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2007-01-04 23:11:36 +01:00
|
|
|
import os, re, sys
|
2007-01-04 19:24:26 +01:00
|
|
|
try:
|
|
|
|
import feedparser
|
|
|
|
except ImportError:
|
|
|
|
sys.stderr.write("Could not load python module 'feedparser'!\n")
|
|
|
|
sys.stderr.write("Maybe you should run 'apt-get install python-feedparser.\n")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2006-12-20 01:43:15 +01:00
|
|
|
|
|
|
|
class TemplateWriter:
|
2007-01-04 21:52:03 +01:00
|
|
|
templatefile = "template.tmpl"
|
|
|
|
tmpldir = "placeholder"
|
|
|
|
contentdir = "content"
|
2006-12-20 01:43:15 +01:00
|
|
|
tmplfileext = ".tmpl"
|
|
|
|
outfileext = ".html"
|
2007-01-04 21:52:03 +01:00
|
|
|
output_directory = "html"
|
2007-01-04 19:24:26 +01:00
|
|
|
max_rss_items = 5
|
2006-12-24 00:46:53 +01:00
|
|
|
## regular expressions of not-wanted file/directory names
|
|
|
|
## for now: no svn, no vi-swap files, no backup files
|
|
|
|
ignore_items = [ r'\.svn', r'\.swp$', r'~$' ]
|
2006-12-20 01:43:15 +01:00
|
|
|
|
|
|
|
def __init__(self):
|
2007-01-04 19:24:26 +01:00
|
|
|
self.placeholder = self.__get_placeholder_dict()
|
|
|
|
self.template = open(self.templatefile).read()
|
2006-12-20 02:37:42 +01:00
|
|
|
|
|
|
|
|
2006-12-24 00:46:53 +01:00
|
|
|
def get_sorted(self, flist):
|
|
|
|
result = flist[:]
|
2006-12-20 02:37:42 +01:00
|
|
|
result.sort()
|
|
|
|
return result
|
|
|
|
|
2006-12-20 01:43:15 +01:00
|
|
|
|
2006-12-24 00:46:53 +01:00
|
|
|
def get_filtered(self, flist):
|
|
|
|
result = []
|
|
|
|
for item in flist:
|
|
|
|
found = False
|
|
|
|
for expression in self.ignore_items:
|
|
|
|
if re.search(expression, item):
|
|
|
|
found = True
|
|
|
|
continue
|
|
|
|
if not found:
|
|
|
|
result.append(item)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2007-01-04 19:24:26 +01:00
|
|
|
def __get_placeholder_dict(self):
|
|
|
|
"""returns the common dictionary for all files - except for the 'entries'
|
|
|
|
"""
|
|
|
|
placeholder = {}
|
2006-12-24 00:46:53 +01:00
|
|
|
for tmpl in self.get_sorted(self.get_filtered(os.listdir(self.tmpldir))):
|
2007-01-04 19:24:26 +01:00
|
|
|
tmplfile = os.path.join(self.tmpldir, tmpl)
|
2006-12-20 01:43:15 +01:00
|
|
|
if not os.path.isfile(tmplfile):
|
2006-12-24 00:46:53 +01:00
|
|
|
print " str.repl: cancelling %s - not a file" % tmplfile
|
2006-12-20 01:43:15 +01:00
|
|
|
else:
|
2007-01-04 20:21:10 +01:00
|
|
|
placeholder[tmpl] = file(tmplfile).read().strip()
|
2007-01-04 19:24:26 +01:00
|
|
|
placeholder["rss_content"] = self.get_rss_info()
|
|
|
|
return placeholder
|
2006-12-20 01:43:15 +01:00
|
|
|
|
2006-12-24 00:46:53 +01:00
|
|
|
|
2007-01-04 19:24:26 +01:00
|
|
|
def get_entries(self, html_name):
|
|
|
|
"""reads all files in the given directory sorted into a string
|
|
|
|
"""
|
|
|
|
entries = ""
|
|
|
|
for entry in self.get_sorted(self.get_filtered(os.listdir(
|
|
|
|
os.path.join(self.contentdir, html_name)))):
|
|
|
|
entries += file(os.path.join(self.contentdir, html_name, entry)).read()
|
|
|
|
return entries
|
2006-12-20 01:43:15 +01:00
|
|
|
|
2006-12-24 00:46:53 +01:00
|
|
|
|
2007-01-04 21:52:03 +01:00
|
|
|
def build_sites_from_template(self):
|
|
|
|
print "Building:"
|
2006-12-24 00:46:53 +01:00
|
|
|
for html in self.get_sorted(self.get_filtered(os.listdir(self.contentdir))):
|
2007-01-04 21:52:03 +01:00
|
|
|
print " %s%s" % (html, self.outfileext)
|
2007-01-04 19:24:26 +01:00
|
|
|
self.placeholder["entries"] = self.get_entries(html)
|
|
|
|
## start with the content of the template
|
|
|
|
text = self.template
|
|
|
|
## repeat substitution for five times - for recursive stuff
|
|
|
|
text = text % self.placeholder
|
|
|
|
text = text % self.placeholder
|
|
|
|
text = text % self.placeholder
|
|
|
|
text = text % self.placeholder
|
|
|
|
text = text % self.placeholder
|
|
|
|
## write the result
|
2007-01-04 21:52:03 +01:00
|
|
|
outfile = open(os.path.join(self.output_directory, html + self.outfileext), "w")
|
2007-01-04 19:24:26 +01:00
|
|
|
outfile.write(text)
|
|
|
|
outfile.close()
|
2006-12-20 01:43:15 +01:00
|
|
|
return
|
2007-01-04 19:24:26 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_rss_info(self):
|
|
|
|
"""retrieve rss feed from http://devel.cryptobox.org/timeline"""
|
|
|
|
timeline_url = r'http://devel.cryptobox.org/timeline?max=%d&wiki=off&ticket=on&changeset=on&milestone=off&format=rss' % self.max_rss_items
|
2007-01-12 00:32:27 +01:00
|
|
|
entry_html = r'<li><p class="date">%(updated)s</p><a href="%(link)s">%(title)s</a>%(summary)s</li>'
|
2007-01-04 19:24:26 +01:00
|
|
|
feed = feedparser.parse(timeline_url)
|
|
|
|
if feed["entries"]:
|
|
|
|
html_items = [ entry_html % e for e in feed["entries"] ]
|
|
|
|
return '<ul class="recent_changes">\n' + '\n'.join(html_items) + '</ul>\n'
|
|
|
|
else:
|
|
|
|
return '<p>The latest development changes are temporarily unavailable. Sorry!</p>'
|
2006-12-24 00:46:53 +01:00
|
|
|
|
2006-12-20 01:43:15 +01:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2007-01-04 21:52:03 +01:00
|
|
|
TemplateWriter().build_sites_from_template()
|
2006-12-24 00:46:53 +01:00
|
|
|
|