Skip to content

Instantly share code, notes, and snippets.

@philipn
Created May 15, 2014 00:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save philipn/20f24bea0b58575f7668 to your computer and use it in GitHub Desktop.
Save philipn/20f24bea0b58575f7668 to your computer and use it in GitHub Desktop.
Test (doesn't work any longer) export to static html from LocalWiki
import slumber
import requests
import os
import lxml.html
import urllib
from urlparse import urljoin
from utils import all, quote_fs
SITE = 'https://trianglewiki.org'
api = slumber.API(urljoin(SITE, '/api/'))
dump_path = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'dump')
if not os.path.exists(dump_path):
os.mkdir(dump_path)
PROTOCOLS = ['http://', 'https://', 'mailto:']
def process_content(s):
def _page_relative(link):
if [True for p in PROTOCOLS if link.startswith(p)]:
return link
if link.startswith('_files/'):
return link
enc_link = quote_fs(urllib.unquote(link).decode('utf-8'))
return (u"../%s" % enc_link)
try:
html = lxml.html.fromstring(s)
except lxml.etree.ParserError:
return ''
html.rewrite_links(_page_relative)
content = lxml.html.tostring(html)
content = u"""<html>
<head>
<link href="../_static/css/base.css" rel="stylesheet" type="text/css">
<script type="text/javascript" src="../_static/js/base.js"></script>
</head>
<body>
%s
</body>
</html>""" % content
return content.encode('utf-8')
def save_page_content(page):
fsname = quote_fs(page['name'])
page_dir = os.path.join(dump_path, fsname)
if not os.path.exists(page_dir):
os.mkdir(page_dir)
content = process_content(page['content'])
f = open(os.path.join(page_dir, 'index.html'), 'w')
f.write(content)
f.close()
def save_files(page):
fsname = quote_fs(page['name'])
page_dir = os.path.join(dump_path, fsname)
files_dir = os.path.join(page_dir, '_files')
if not os.path.exists(files_dir):
os.mkdir(files_dir)
# Get files on page
for file in all(api.file.get, slug=page['slug']):
print " getting file %s" % file['name']
r = requests.get(urljoin(SITE, file['file']))
f = open(os.path.join(files_dir, file['name']), 'w')
f.write(r.content)
f.close()
for page in all(api.page.get):
print 'Getting page %s' % page['name'].encode('utf-8')
save_page_content(page)
save_files(page)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment