import datetime
import subprocess
from subprocess import PIPE
+from urllib2 import URLError
import feedparser
import lxml.html
# Too early!
return
- updates = self._poll(self.limit, self.max_age)
- cache.get_cache('spline-frontpage')[self.cache_key()] = updates
+ try:
+ updates = self._poll(self.limit, self.max_age)
+ cache.get_cache('spline-frontpage')[self.cache_key()] = updates
+ except Exception:
+ # Hmm, polling broke. Be conservative and don't do anything; old
+ # data is probably still OK for now
+ pass
return
def _poll(self, limit, max_age):
feed = feedparser.parse(self.feed_url)
+ if feed.bozo and isinstance(feed.bozo_exception, URLError):
+ # Feed is DOWN. Bail here; otherwise, old entries might be lost
+ # just because, say, Bulbanews is down yet again
+ raise feed.bozo_exception
+
if not self.title:
self.title = feed.feed.title
FrontPageGit = namedtuple('FrontPageGit', ['source', 'time', 'log', 'tag'])
FrontPageGitCommit = namedtuple('FrontPageGitCommit',
- ['hash', 'author', 'time', 'subject', 'repo'])
+ ['hash', 'author', 'email', 'time', 'subject', 'repo'])
class GitSource(CachedSource):
"""Represents a git repository.
'git',
'--git-dir=' + repo_path,
'log',
- '--pretty=%h%x00%an%x00%at%x00%s',
+ '--pretty=%h%x00%an%x00%aE%x00%at%x00%s',
"{0}..{1}".format(since_tag, tag),
]
proc = subprocess.Popen(git_log_args, stdout=PIPE)
for line in proc.stdout:
- hash, author, time, subject = line.strip().split('\x00')
+ hash, author, email, time, subject = line.strip().split('\x00')
commits.append(
FrontPageGitCommit(
hash = hash,
author = author,
+ email = email,
time = datetime.datetime.fromtimestamp(int(time)),
subject = subject,
repo = repo_name,