My static website generator using poole https://www.xythobuz.de
Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

macros.py 13KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367
  1. import re
  2. import itertools
  3. import email.utils
  4. import os.path
  5. import time
  6. import codecs
  7. from datetime import datetime
  8. DEFAULT_LANG = "en"
  9. BASE_URL = "https://www.xythobuz.de"
  10. # -----------------------------------------------------------------------------
  11. # lightgallery helper macro
  12. # -----------------------------------------------------------------------------
  13. # call this macro like this
  14. # lightgallery([
  15. # [ "image-link", "description" ],
  16. # [ "image-link", "thumbnail-link", "description" ],
  17. # [ "youtube-link", "thumbnail-link", "description" ],
  18. # [ "video-link", "mime", "thumbnail-link", "image-link", "description" ]
  19. # ])
  20. def lightgallery(links):
  21. videos = [l for l in links if len(l) == 5]
  22. v_i = 0
  23. for v in videos:
  24. link, mime, thumb, poster, alt = v
  25. v_i += 1
  26. print '<div style="display:none;" id="video' + str(v_i) + '">'
  27. print '<video class="lg-video-object lg-html5" controls preload="none">'
  28. print '<source src="' + link + '" type="' + mime + '">'
  29. print 'Your browser does not support HTML5 video.'
  30. print '</video>'
  31. print '</div>'
  32. print '<div class="lightgallery">'
  33. v_i = 0
  34. for l in links:
  35. if (len(l) == 3) or (len(l) == 2):
  36. link = img = alt = ""
  37. if len(l) == 3:
  38. link, img, alt = l
  39. else:
  40. link, alt = l
  41. x = link.rfind('.')
  42. img = link[:x] + '_small' + link[x:]
  43. print '<div class="border" data-src="' + link + '"><a href="' + link + '"><img class="pic" src="' + img + '" alt="' + alt + '"></a></div>'
  44. elif len(l) == 5:
  45. v_i += 1
  46. link, mime, thumb, poster, alt = v
  47. print '<div class="border" data-poster="' + poster + '" data-sub-html="' + alt + '" data-html="#video' + str(v_i) + '"><a href="' + link + '"><img class="pic" src="' + thumb + '"></a></div>'
  48. else:
  49. raise NameError('Invalid number of arguments for lightgallery')
  50. print '</div>'
  51. # -----------------------------------------------------------------------------
  52. # github helper macros
  53. # -----------------------------------------------------------------------------
  54. import urllib, json
  55. def restRequest(url):
  56. response = urllib.urlopen(url)
  57. data = json.loads(response.read())
  58. return data
  59. def restReleases(user, repo):
  60. s = "https://api.github.com/repos/"
  61. s += user
  62. s += "/"
  63. s += repo
  64. s += "/releases"
  65. return restRequest(s)
  66. def printLatestRelease(user, repo):
  67. repo_url = "https://github.com/" + user + "/" + repo
  68. print("<div class=\"releasecard\">")
  69. print("Release builds for " + repo + " are <a href=\"" + repo_url + "/releases\">available on GitHub</a>.<br>\n")
  70. releases = restReleases(user, repo)
  71. if len(releases) <= 0:
  72. print("No release has been published on GitHub yet.")
  73. print("</div>")
  74. return
  75. releases.sort(key=lambda x: x["published_at"], reverse=True)
  76. r = releases[0]
  77. release_url = r["html_url"]
  78. print("Latest release of <a href=\"" + repo_url + "\">" + repo + "</a>, at the time of this writing: <a href=\"" + release_url + "\">" + r["name"] + "</a> (" + datetime.strptime(r["published_at"], "%Y-%m-%dT%H:%M:%SZ").strftime("%Y-%m-%d %H:%M:%S") + ")\n")
  79. if len(r["assets"]) <= 0:
  80. print("<br>No release assets have been published on GitHub for that.")
  81. print("</div>")
  82. return
  83. print("<ul>")
  84. print("Release Assets:")
  85. for a in r["assets"]:
  86. size = int(a["size"])
  87. ss = " "
  88. if size >= (1024 * 1024):
  89. ss += "(%.1f MiB)" % (size / (1024.0 * 1024.0))
  90. elif size >= 1024:
  91. ss += "(%d KiB)" % (size // 1024)
  92. else:
  93. ss += "(%d Byte)" % (size)
  94. print("<li><a href=\"" + a["browser_download_url"] + "\">" + a["name"] + "</a>" + ss)
  95. print("</ul></div>")
  96. # -----------------------------------------------------------------------------
  97. # preconvert hooks
  98. # -----------------------------------------------------------------------------
  99. def hook_preconvert_anotherlang():
  100. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  101. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  102. vpages = [] # Set of all virtual pages
  103. for p in pages:
  104. current_lang = DEFAULT_LANG # Default language
  105. langs = [] # List of languages for the current page
  106. page_vpages = {} # Set of virtual pages for the current page
  107. text_lang = re.split(_re_lang, p.source)
  108. text_grouped = dict(zip([current_lang,] + \
  109. [lang.strip() for lang in text_lang[1::2]], \
  110. text_lang[::2]))
  111. for lang, text in text_grouped.iteritems():
  112. spath = p.fname.split(os.path.sep)
  113. langs.append(lang)
  114. if lang == "en":
  115. filename = re.sub(MKD_PATT, "%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  116. else:
  117. filename = re.sub(MKD_PATT, ".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  118. vp = Page(filename, virtual=text)
  119. # Copy real page attributes to the virtual page
  120. for attr in p:
  121. if not vp.has_key(attr):
  122. vp[attr] = p[attr]
  123. # Define a title in the proper language
  124. vp["title"] = p["title_%s" % lang] \
  125. if p.has_key("title_%s" % lang) \
  126. else p["title"]
  127. # Keep track of the current lang of the virtual page
  128. vp["lang"] = lang
  129. # Fix post name if exists
  130. if vp.has_key("post"):
  131. if lang == "en":
  132. vp["post"] = vp["post"][:]
  133. else:
  134. vp["post"] = vp["post"][:-len(lang) - 1]
  135. page_vpages[lang] = vp
  136. # Each virtual page has to know about its sister vpages
  137. for lang, vpage in page_vpages.iteritems():
  138. vpage["lang_links"] = dict([(l, v["url"]) for l, v in page_vpages.iteritems()])
  139. vpage["other_lang"] = langs # set other langs and link
  140. vpages += page_vpages.values()
  141. pages[:] = vpages
  142. _COMPAT = """ case "%s":
  143. $loc = "%s/%s";
  144. break;
  145. """
  146. _COMPAT_404 = """ default:
  147. $loc = "%s";
  148. break;
  149. """
  150. def hook_preconvert_compat():
  151. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  152. fp.write("<?\n")
  153. fp.write("// Auto generated xyCMS compatibility index.php\n")
  154. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  155. fp.write("if (isset($_GET['p'])) {\n")
  156. fp.write(" if (isset($_GET['lang'])) {\n")
  157. fp.write(" $_GET['p'] .= 'EN';\n")
  158. fp.write(" }\n")
  159. fp.write(" switch($_GET['p']) {\n")
  160. for p in pages:
  161. if p.get("compat", "") != "":
  162. tmp = p["compat"]
  163. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  164. tmp = tmp + "EN"
  165. fp.write(_COMPAT % (tmp, "https://www.xythobuz.de", p.url))
  166. fp.write("\n")
  167. fp.write(_COMPAT_404 % "/404.html")
  168. fp.write(" }\n")
  169. fp.write("}\n")
  170. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  171. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  172. fp.write(" header('Status: 301 Moved Permanently');\n")
  173. fp.write(" } else {\n")
  174. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  175. fp.write(" }\n")
  176. fp.write("}\n");
  177. fp.write("header('Location: '.$loc);\n")
  178. fp.write("?>")
  179. fp.close()
  180. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  181. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  182. %s
  183. </urlset>
  184. """
  185. _SITEMAP_URL = """
  186. <url>
  187. <loc>%s/%s</loc>
  188. <lastmod>%s</lastmod>
  189. <changefreq>%s</changefreq>
  190. <priority>%s</priority>
  191. </url>
  192. """
  193. def hook_preconvert_sitemap():
  194. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  195. urls = []
  196. for p in pages:
  197. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  198. fname = os.path.join(options.project, "output", "sitemap.xml")
  199. fp = open(fname, 'w')
  200. fp.write(_SITEMAP % "".join(urls))
  201. fp.close()
  202. # -----------------------------------------------------------------------------
  203. # postconvert hooks
  204. # -----------------------------------------------------------------------------
  205. _RSS = """<?xml version="1.0"?>
  206. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  207. <channel>
  208. <title>%s</title>
  209. <link>%s</link>
  210. <atom:link href="%s" rel="self" type="application/rss+xml" />
  211. <description>%s</description>
  212. <language>en-us</language>
  213. <pubDate>%s</pubDate>
  214. <lastBuildDate>%s</lastBuildDate>
  215. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  216. <generator>Poole</generator>
  217. %s
  218. </channel>
  219. </rss>
  220. """
  221. _RSS_ITEM = """
  222. <item>
  223. <title>%s</title>
  224. <link>%s</link>
  225. <description>%s</description>
  226. <pubDate>%s</pubDate>
  227. <guid>%s</guid>
  228. </item>
  229. """
  230. def hook_postconvert_rss():
  231. items = []
  232. posts = [p for p in pages if "date" in p]
  233. posts.sort(key=lambda p: p.date, reverse=True)
  234. posts = posts[:10]
  235. for p in posts:
  236. title = p.title
  237. if "post" in p:
  238. title = p.post
  239. link = "%s/%s" % (BASE_URL, p.url)
  240. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  241. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  242. desc = desc.replace("href=\"/img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  243. desc = desc.replace("src=\"/img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  244. desc = htmlspecialchars(desc)
  245. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  246. date = email.utils.formatdate(date)
  247. items.append(_RSS_ITEM % (title, link, desc, date, link))
  248. items = "".join(items)
  249. title = "xythobuz.de Blog"
  250. link = "%s" % BASE_URL
  251. feed = "%s/rss.xml" % BASE_URL
  252. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  253. date = email.utils.formatdate()
  254. rss = _RSS % (title, link, feed, desc, date, date, items)
  255. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  256. fp.write(rss)
  257. fp.close()
  258. _COMPAT_MOB = """ case "%s":
  259. $loc = "%s/%s";
  260. break;
  261. """
  262. _COMPAT_404_MOB = """ default:
  263. $loc = "%s";
  264. break;
  265. """
  266. def hook_postconvert_mobilecompat():
  267. directory = os.path.join(output, "mobile")
  268. if not os.path.exists(directory):
  269. os.makedirs(directory)
  270. fp = codecs.open(os.path.join(directory, "index.php"), "w", "utf-8")
  271. fp.write("<?\n")
  272. fp.write("// Auto generated xyCMS compatibility mobile/index.php\n")
  273. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  274. fp.write("if (isset($_GET['p'])) {\n")
  275. fp.write(" if (isset($_GET['lang'])) {\n")
  276. fp.write(" $_GET['p'] .= 'EN';\n")
  277. fp.write(" }\n")
  278. fp.write(" switch($_GET['p']) {\n")
  279. for p in pages:
  280. if p.get("compat", "") != "":
  281. tmp = p["compat"]
  282. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  283. tmp = tmp + "EN"
  284. fp.write(_COMPAT_MOB % (tmp, "https://www.xythobuz.de", re.sub(".html", ".html", p.url)))
  285. fp.write("\n")
  286. fp.write(_COMPAT_404_MOB % "/404.mob.html")
  287. fp.write(" }\n")
  288. fp.write("}\n")
  289. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  290. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  291. fp.write(" header('Status: 301 Moved Permanently');\n")
  292. fp.write(" } else {\n")
  293. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  294. fp.write(" }\n")
  295. fp.write("}\n");
  296. fp.write("header('Location: '.$loc);\n")
  297. fp.write("?>")
  298. fp.close()
  299. def hook_postconvert_size():
  300. file_ext = '|'.join(['pdf', 'zip', 'rar', 'ods', 'odt', 'odp', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx', 'exe', 'brd', 'mp3', 'mp4', 'plist'])
  301. def matched_link(matchobj):
  302. try:
  303. path = matchobj.group(1)
  304. if path.startswith("http") or path.startswith("//") or path.startswith("ftp"):
  305. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  306. elif path.startswith("/"):
  307. path = path.strip("/")
  308. path = os.path.join("static/", path)
  309. size = os.path.getsize(path)
  310. if size >= (1024 * 1024):
  311. return "<a href=\"%s\">%s</a>&nbsp;(%.1f MiB)" % (matchobj.group(1), matchobj.group(3), size / (1024.0 * 1024.0))
  312. elif size >= 1024:
  313. return "<a href=\"%s\">%s</a>&nbsp;(%d KiB)" % (matchobj.group(1), matchobj.group(3), size // 1024)
  314. else:
  315. return "<a href=\"%s\">%s</a>&nbsp;(%d Byte)" % (matchobj.group(1), matchobj.group(3), size)
  316. except:
  317. print "Unable to estimate file size for %s" % matchobj.group(1)
  318. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  319. _re_url = '<a href=\"([^\"]*?\.(%s))\">(.*?)<\/a>' % file_ext
  320. for p in pages:
  321. p.html = re.sub(_re_url, matched_link, p.html)