diff --git a/append.py b/append.py index 4953c42..b5079b5 100755 --- a/append.py +++ b/append.py @@ -8,9 +8,9 @@ import re from BeautifulSoup import BeautifulSoup import shutil import time +import datetime + -base_urls = { 'itunes' : 'http://traffic.libsyn.com/cmdln', - 'other' : 'http://cmdln.evenflow.nl/mp3' } def __fetch_feed(url): try: @@ -23,15 +23,18 @@ def __fetch_feed(url): logging.debug('Network failure reason, %s.' % e.reason) return None -def __append(feed, suffix, append_fn, args=None): + +def __append(feed, suffix, append_fn): latest = __fetch_feed('cmdln_%s.xml' % suffix).entries[0] entry = feed.entries[0] if latest.title.find(entry.title) != -1: logging.info('%s is up to date.' % suffix) return + base_url = 'http://www.archive.org/download/%s' % __archive_slug(entry.title) filename = 'cmdln_%s.xml' % suffix - backup = 'cmdln_%s.xml.bak' % suffix + today = datetime.date.today() + backup = '%s.%s' % (filename, today.strftime('%Y-%m-%d')) shutil.copy(filename, backup) f = open(backup) o = open(filename, 'w') @@ -40,7 +43,7 @@ def __append(feed, suffix, append_fn, args=None): updated = time.strftime('%a, %d %b %Y %X +0000', feed.updated) for line in f: if line.find('') != -1 and not firstItem: - append_fn(entry, o, suffix, args) + append_fn(entry, o, suffix, base_url) firstItem = True if line.startswith(' '): line = ' %s\n' % updated @@ -52,8 +55,8 @@ def __append(feed, suffix, append_fn, args=None): o.close() -def __append_non_itunes(entry, output, suffix, args): - (url, mime_type, size) = __enclosure(entry.enclosures, base_urls['other'], suffix) +def __append_non_itunes(entry, output, suffix, base_url): + (url, mime_type, size) = __enclosure(entry.enclosures, base_url, suffix) output.write(""" %(title)s (Comment Line 240-949-2638) %(link)s @@ -73,11 +76,13 @@ def __append_non_itunes(entry, output, suffix, args): logging.info('Inserted new %s item.' % suffix) -def __append_itunes(entry, output, suffix, args): +def __append_itunes(entry, output, suffix, base_url): description = __description(entry.content) soup = BeautifulSoup(description) summary = '\n\n'.join([''.join(p.findAll(text=True)) for p in soup.findAll('p')]) - (url, mime_type, size) = __enclosure(entry.enclosures, base_urls['itunes'], suffix) + (url, mime_type, size) = __enclosure(entry.enclosures, base_url, suffix) + if size == 0: + raise Exception('Couldn not find media, %s.' % base_url) output.write(""" %(title)s (Comment Line 240-949-2638) %(link)s @@ -101,7 +106,7 @@ def __append_itunes(entry, output, suffix, args): 'size' : size, 'subtitle' : ''.join(soup.contents[0].findAll(text = True)), 'summary' : summary, - 'duration' : args[1] }) + 'duration' : entry.itunes_duration }) logging.info('Inserted new %s item.' % suffix) @@ -134,6 +139,16 @@ def __enclosure(enclosures, base_url, suffix): return (url, mime_type, size) +def __archive_slug(title): + slug = re.sub('\([^0-9]\)-\([^0-9]\)', '\1\2', title) + slug = re.sub('[^A-Za-z0-9\-]', ' ', slug) + slug = re.sub(' {2,}', ' ', slug) + tokens = slug.split(' ') + tokens = [t.capitalize() for t in tokens] + slug = ''.join(tokens) + return slug + + def main(): logging.basicConfig(level=logging.INFO, format='%(message)s') @@ -142,12 +157,9 @@ def main(): logging.error('Failed to fetch feed.') sys.exit(1) - if len(sys.argv) > 1: - base_urls['itunes'] = 'http://www.archive.org/download/%s' % sys.argv[2] - base_urls['other'] = 'http://www.archive.org/download/%s' % sys.argv[2] __append(feed, 'mp3', __append_non_itunes) __append(feed, 'ogg', __append_non_itunes) - __append(feed, 'm4a', __append_itunes, sys.argv) + __append(feed, 'm4a', __append_itunes) if __name__ == "__main__": diff --git a/encode.bash b/encode.bash index b596b07..f697151 100755 --- a/encode.bash +++ b/encode.bash @@ -72,7 +72,7 @@ lame -b 112 \ --tc "${comment}" \ --tg "${genre}" \ --id3v2-only \ ---noreplaygain i\ +--noreplaygain \ cmdln.net_${date}.wav \ cmdln.net_${date}.mp3 diff --git a/relink.py b/relink.py index 2483ea6..4f4f894 100755 --- a/relink.py +++ b/relink.py @@ -5,13 +5,20 @@ import urllib2 from urllib2 import HTTPError, URLError import os.path import logging +import shutil +import datetime def __repoint(): logging.basicConfig(level=logging.INFO, format='%(message)s') - f = open("cmdln_m4a.xml") + today = datetime.date.today() + filename = 'cmdln_m4a.xml' + backup = '%s.%s' % (filename, today.strftime('%Y-%m-%d')) + shutil.copy(filename, backup) + f = open(backup) + o = open(filename, 'w') try: soup = BeautifulStoneSoup(f) enclosures = soup.findAll('enclosure') @@ -29,9 +36,10 @@ def __repoint(): enclosure['url'] = rewritten enclosure['type'] = mime_type enclosure['length'] = length - print soup + o.write(str(soup)) finally: f.close() + o.close() def __archive_slug(title):