My static website generator using poole https://www.xythobuz.de
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

macros.py 20KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548
  1. # -*- coding: utf-8 -*-
  2. import re
  3. import itertools
  4. import email.utils
  5. import os.path
  6. import time
  7. import codecs
  8. from datetime import datetime
  9. DEFAULT_LANG = "en"
  10. BASE_URL = "https://www.xythobuz.de"
  11. # -----------------------------------------------------------------------------
  12. # sub page helper macro
  13. # -----------------------------------------------------------------------------
  14. def backToParent():
  15. url = page.get("parent", "") + ".html"
  16. posts = [p for p in pages if p.url == url]
  17. if len(posts) > 0:
  18. p = posts[0]
  19. print '<span class="listdesc">[...back to ' + p.title + ' overview](' + p.url + ')</span>'
  20. # -----------------------------------------------------------------------------
  21. # table helper macro
  22. # -----------------------------------------------------------------------------
  23. def tableHelper(style, header, content):
  24. print "<table>"
  25. if (header != None) and (len(header) == len(style)):
  26. print "<tr>"
  27. for h in header:
  28. print "<th>" + h + "</th>"
  29. print "</tr>"
  30. for ci in range(0, len(content)):
  31. if len(content[ci]) != len(style):
  32. # invalid call of table helper!
  33. continue
  34. print "<tr>"
  35. for i in range(0, len(style)):
  36. s = style[i]
  37. td_style = ""
  38. if "monospaced" in s:
  39. td_style += " font-family: monospace;"
  40. if "align-last-right" in s:
  41. if ci == (len(content) - 1):
  42. td_style += " text-align: right;"
  43. else:
  44. if "align-center" in s:
  45. td_style += " text-align: center;"
  46. elif "align-right" in s:
  47. td_style += " text-align: right;"
  48. elif "align-center" in s:
  49. td_style += " text-align: center;"
  50. td_args = ""
  51. if td_style != "":
  52. td_args = " style=\"" + td_style + "\""
  53. print "<td" + td_args + ">"
  54. if isinstance(content[ci][i], tuple):
  55. text, link = content[ci][i]
  56. print "<a href=\"" + link + "\">" + text + "</a>"
  57. else:
  58. text = content[ci][i]
  59. print text
  60. print "</td>"
  61. print "</tr>"
  62. print "</table>"
  63. # -----------------------------------------------------------------------------
  64. # menu helper macro
  65. # -----------------------------------------------------------------------------
  66. def githubCommitBadge(p):
  67. if p.get("github", "") != "":
  68. link = p.get("git", p.github)
  69. linkParts = p.github.split("/")
  70. if len(linkParts) >= 5:
  71. return "<a href=\"" + link + "\"><img src=\"https://img.shields.io/github/last-commit/" + linkParts[3] + "/" + linkParts[4] + ".svg?logo=git&style=flat\" /></a>"
  72. return ""
  73. def printMenuItem(p, yearsAsHeading = False, showDateSpan = False, showOnlyStartDate = False, nicelyFormatFullDate = False, lastyear = "0", lang = "", showLastCommit = True):
  74. title = p.title
  75. if lang != "":
  76. if p.get("title_" + lang, "") != "":
  77. title = p.get("title_" + lang, "")
  78. if p.title == "Blog":
  79. title = p.post
  80. year = p.get("date", "")[0:4]
  81. if year != lastyear:
  82. lastyear = year
  83. if yearsAsHeading:
  84. print "\n\n#### %s\n" % (year)
  85. dateto = ""
  86. if p.get("date", "" != ""):
  87. year = p.get("date", "")[0:4]
  88. if showOnlyStartDate:
  89. dateto = " (%s)" % (year)
  90. if p.get("update", "") != "" and p.get("update", "")[0:4] != year:
  91. if showDateSpan:
  92. dateto = " (%s - %s)" % (year, p.get("update", "")[0:4])
  93. if nicelyFormatFullDate:
  94. dateto = " - " + datetime.strptime(p.get("update", p.date), "%Y-%m-%d").strftime("%B %d, %Y")
  95. print " * **[%s](%s)**%s" % (title, p.url, dateto)
  96. if p.get("description", "") != "":
  97. description = p.get("description", "")
  98. if lang != "":
  99. if p.get("description_" + lang, "") != "":
  100. description = p.get("description_" + lang, "")
  101. print "<br><span class=\"listdesc\">" + description + "</span>"
  102. if showLastCommit:
  103. link = githubCommitBadge(p)
  104. if len(link) > 0:
  105. print "<br>" + link
  106. return lastyear
  107. def printRecentMenu(count = 5):
  108. posts = [p for p in pages if "date" in p]
  109. posts.sort(key=lambda p: p.get("update", p.get("date")), reverse=True)
  110. for p in posts[0:count]:
  111. printMenuItem(p, False, False, False, True, "0", "", False)
  112. def printBlogMenu():
  113. posts = [p for p in pages if "post" in p]
  114. posts.sort(key=lambda p: p.get("date", "9999-01-01"), reverse=True)
  115. lastyear = "0"
  116. for p in posts:
  117. lastyear = printMenuItem(p, True, False, False, True, lastyear)
  118. def printProjectsMenu():
  119. # prints all pages with parent 'projects' or 'stuff'.
  120. # first the ones without date, sorted by position.
  121. # then afterwards those with date, split by year.
  122. # also supports blog posts with parent.
  123. enpages = [p for p in pages if p.lang == "en"]
  124. dpages = [p for p in enpages if p.get("date", "") == ""]
  125. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  126. mpages.sort(key=lambda p: [int(p.get("position", "999"))])
  127. for p in mpages:
  128. printMenuItem(p)
  129. dpages = [p for p in enpages if p.get("date", "") != ""]
  130. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  131. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  132. lastyear = "0"
  133. for p in mpages:
  134. lastyear = printMenuItem(p, True, True, False, False, lastyear)
  135. def print3DPrintingMenu():
  136. mpages = [p for p in pages if p.get("parent", "") == "3d-printing" and p.lang == "en"]
  137. mpages.sort(key=lambda p: int(p["position"]))
  138. for p in mpages:
  139. printMenuItem(p, False, True, True)
  140. def printQuadcopterMenu():
  141. mpages = [p for p in pages if p.get("parent", "") == "quadcopters" and p.lang == "en"]
  142. mpages.sort(key=lambda p: int(p["position"]))
  143. for p in mpages:
  144. printMenuItem(p, False, True, True)
  145. def printQuadcopterRelatedMenu():
  146. mpages = [p for p in pages if p.get("show_in_quadcopters", "false") == "true"]
  147. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  148. for p in mpages:
  149. printMenuItem(p, False, True, True)
  150. def printRobotMenuEnglish():
  151. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "en"]
  152. mpages.sort(key=lambda p: int(p["position"]))
  153. for p in mpages:
  154. printMenuItem(p)
  155. def printRobotMenuDeutsch():
  156. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "de"]
  157. mpages.sort(key=lambda p: int(p["position"]))
  158. for p in mpages:
  159. printMenuItem(p, False, False, False, False, "0", "de")
  160. # -----------------------------------------------------------------------------
  161. # lightgallery helper macro
  162. # -----------------------------------------------------------------------------
  163. # call this macro like this
  164. # lightgallery([
  165. # [ "image-link", "description" ],
  166. # [ "image-link", "thumbnail-link", "description" ],
  167. # [ "youtube-link", "thumbnail-link", "description" ],
  168. # [ "video-link", "mime", "thumbnail-link", "image-link", "description" ]
  169. # ])
  170. def lightgallery(links):
  171. videos = [l for l in links if len(l) == 5]
  172. v_i = -1
  173. for v in videos:
  174. link, mime, thumb, poster, alt = v
  175. v_i += 1
  176. print '<div style="display:none;" id="video' + str(v_i) + '">'
  177. print '<video class="lg-video-object lg-html5" controls preload="none">'
  178. print '<source src="' + link + '" type="' + mime + '">'
  179. print 'Your browser does not support HTML5 video.'
  180. print '</video>'
  181. print '</div>'
  182. print '<div class="lightgallery">'
  183. v_i = -1
  184. for l in links:
  185. if (len(l) == 3) or (len(l) == 2):
  186. link = img = alt = ""
  187. if len(l) == 3:
  188. link, img, alt = l
  189. else:
  190. link, alt = l
  191. x = link.rfind('.')
  192. img = link[:x] + '_small' + link[x:]
  193. print '<div class="border" data-src="' + link + '"><a href="' + link + '"><img class="pic" src="' + img + '" alt="' + alt + '"></a></div>'
  194. elif len(l) == 5:
  195. v_i += 1
  196. link, mime, thumb, poster, alt = videos[v_i]
  197. print '<div class="border" data-poster="' + poster + '" data-sub-html="' + alt + '" data-html="#video' + str(v_i) + '"><a href="' + link + '"><img class="pic" src="' + thumb + '"></a></div>'
  198. else:
  199. raise NameError('Invalid number of arguments for lightgallery')
  200. print '</div>'
  201. # -----------------------------------------------------------------------------
  202. # github helper macros
  203. # -----------------------------------------------------------------------------
  204. import urllib, json
  205. def restRequest(url):
  206. response = urllib.urlopen(url)
  207. data = json.loads(response.read())
  208. return data
  209. def restReleases(user, repo):
  210. s = "https://api.github.com/repos/"
  211. s += user
  212. s += "/"
  213. s += repo
  214. s += "/releases"
  215. return restRequest(s)
  216. def printLatestRelease(user, repo):
  217. repo_url = "https://github.com/" + user + "/" + repo
  218. print("<div class=\"releasecard\">")
  219. print("Release builds for " + repo + " are <a href=\"" + repo_url + "/releases\">available on GitHub</a>.<br>\n")
  220. releases = restReleases(user, repo)
  221. if len(releases) <= 0:
  222. print("No release has been published on GitHub yet.")
  223. print("</div>")
  224. return
  225. releases.sort(key=lambda x: x["published_at"], reverse=True)
  226. r = releases[0]
  227. release_url = r["html_url"]
  228. print("Latest release of <a href=\"" + repo_url + "\">" + repo + "</a>, at the time of this writing: <a href=\"" + release_url + "\">" + r["name"] + "</a> (" + datetime.strptime(r["published_at"], "%Y-%m-%dT%H:%M:%SZ").strftime("%Y-%m-%d %H:%M:%S") + ")\n")
  229. if len(r["assets"]) <= 0:
  230. print("<br>No release assets have been published on GitHub for that.")
  231. print("</div>")
  232. return
  233. print("<ul>")
  234. print("Release Assets:")
  235. for a in r["assets"]:
  236. size = int(a["size"])
  237. ss = " "
  238. if size >= (1024 * 1024):
  239. ss += "(%.1f MiB)" % (size / (1024.0 * 1024.0))
  240. elif size >= 1024:
  241. ss += "(%d KiB)" % (size // 1024)
  242. else:
  243. ss += "(%d Byte)" % (size)
  244. print("<li><a href=\"" + a["browser_download_url"] + "\">" + a["name"] + "</a>" + ss)
  245. print("</ul></div>")
  246. # -----------------------------------------------------------------------------
  247. # preconvert hooks
  248. # -----------------------------------------------------------------------------
  249. def hook_preconvert_anotherlang():
  250. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  251. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  252. vpages = [] # Set of all virtual pages
  253. for p in pages:
  254. current_lang = DEFAULT_LANG # Default language
  255. langs = [] # List of languages for the current page
  256. page_vpages = {} # Set of virtual pages for the current page
  257. text_lang = re.split(_re_lang, p.source)
  258. text_grouped = dict(zip([current_lang,] + \
  259. [lang.strip() for lang in text_lang[1::2]], \
  260. text_lang[::2]))
  261. for lang, text in text_grouped.iteritems():
  262. spath = p.fname.split(os.path.sep)
  263. langs.append(lang)
  264. if lang == "en":
  265. filename = re.sub(MKD_PATT, "%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  266. else:
  267. filename = re.sub(MKD_PATT, ".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  268. vp = Page(filename, virtual=text)
  269. # Copy real page attributes to the virtual page
  270. for attr in p:
  271. if not vp.has_key(attr):
  272. vp[attr] = p[attr]
  273. # Define a title in the proper language
  274. vp["title"] = p["title_%s" % lang] \
  275. if p.has_key("title_%s" % lang) \
  276. else p["title"]
  277. # Keep track of the current lang of the virtual page
  278. vp["lang"] = lang
  279. # Fix post name if exists
  280. if vp.has_key("post"):
  281. if lang == "en":
  282. vp["post"] = vp["post"][:]
  283. else:
  284. vp["post"] = vp["post"][:-len(lang) - 1]
  285. page_vpages[lang] = vp
  286. # Each virtual page has to know about its sister vpages
  287. for lang, vpage in page_vpages.iteritems():
  288. vpage["lang_links"] = dict([(l, v["url"]) for l, v in page_vpages.iteritems()])
  289. vpage["other_lang"] = langs # set other langs and link
  290. vpages += page_vpages.values()
  291. pages[:] = vpages
  292. _COMPAT = """ case "%s":
  293. $loc = "%s/%s";
  294. break;
  295. """
  296. _COMPAT_404 = """ default:
  297. $loc = "%s";
  298. break;
  299. """
  300. def hook_preconvert_compat():
  301. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  302. fp.write("<?\n")
  303. fp.write("// Auto generated xyCMS compatibility index.php\n")
  304. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  305. fp.write("if (isset($_GET['p'])) {\n")
  306. fp.write(" if (isset($_GET['lang'])) {\n")
  307. fp.write(" $_GET['p'] .= 'EN';\n")
  308. fp.write(" }\n")
  309. fp.write(" switch($_GET['p']) {\n")
  310. for p in pages:
  311. if p.get("compat", "") != "":
  312. tmp = p["compat"]
  313. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  314. tmp = tmp + "EN"
  315. fp.write(_COMPAT % (tmp, "https://www.xythobuz.de", p.url))
  316. fp.write("\n")
  317. fp.write(_COMPAT_404 % "/404.html")
  318. fp.write(" }\n")
  319. fp.write("}\n")
  320. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  321. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  322. fp.write(" header('Status: 301 Moved Permanently');\n")
  323. fp.write(" } else {\n")
  324. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  325. fp.write(" }\n")
  326. fp.write("}\n");
  327. fp.write("header('Location: '.$loc);\n")
  328. fp.write("?>")
  329. fp.close()
  330. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  331. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  332. %s
  333. </urlset>
  334. """
  335. _SITEMAP_URL = """
  336. <url>
  337. <loc>%s/%s</loc>
  338. <lastmod>%s</lastmod>
  339. <changefreq>%s</changefreq>
  340. <priority>%s</priority>
  341. </url>
  342. """
  343. def hook_preconvert_sitemap():
  344. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  345. urls = []
  346. for p in pages:
  347. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  348. fname = os.path.join(options.project, "output", "sitemap.xml")
  349. fp = open(fname, 'w')
  350. fp.write(_SITEMAP % "".join(urls))
  351. fp.close()
  352. # -----------------------------------------------------------------------------
  353. # postconvert hooks
  354. # -----------------------------------------------------------------------------
  355. _RSS = """<?xml version="1.0"?>
  356. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  357. <channel>
  358. <title>%s</title>
  359. <link>%s</link>
  360. <atom:link href="%s" rel="self" type="application/rss+xml" />
  361. <description>%s</description>
  362. <language>en-us</language>
  363. <pubDate>%s</pubDate>
  364. <lastBuildDate>%s</lastBuildDate>
  365. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  366. <generator>Poole</generator>
  367. %s
  368. </channel>
  369. </rss>
  370. """
  371. _RSS_ITEM = """
  372. <item>
  373. <title>%s</title>
  374. <link>%s</link>
  375. <description>%s</description>
  376. <pubDate>%s</pubDate>
  377. <guid>%s</guid>
  378. </item>
  379. """
  380. def hook_postconvert_rss():
  381. items = []
  382. posts = [p for p in pages if "date" in p]
  383. posts.sort(key=lambda p: p.date, reverse=True)
  384. posts = posts[:10]
  385. for p in posts:
  386. title = p.title
  387. if "post" in p:
  388. title = p.post
  389. link = "%s/%s" % (BASE_URL, p.url)
  390. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  391. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  392. desc = desc.replace("href=\"/img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  393. desc = desc.replace("src=\"/img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  394. desc = htmlspecialchars(desc)
  395. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  396. date = email.utils.formatdate(date)
  397. items.append(_RSS_ITEM % (title, link, desc, date, link))
  398. items = "".join(items)
  399. title = "xythobuz.de Blog"
  400. link = "%s" % BASE_URL
  401. feed = "%s/rss.xml" % BASE_URL
  402. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  403. date = email.utils.formatdate()
  404. rss = _RSS % (title, link, feed, desc, date, date, items)
  405. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  406. fp.write(rss)
  407. fp.close()
  408. _COMPAT_MOB = """ case "%s":
  409. $loc = "%s/%s";
  410. break;
  411. """
  412. _COMPAT_404_MOB = """ default:
  413. $loc = "%s";
  414. break;
  415. """
  416. def hook_postconvert_mobilecompat():
  417. directory = os.path.join(output, "mobile")
  418. if not os.path.exists(directory):
  419. os.makedirs(directory)
  420. fp = codecs.open(os.path.join(directory, "index.php"), "w", "utf-8")
  421. fp.write("<?\n")
  422. fp.write("// Auto generated xyCMS compatibility mobile/index.php\n")
  423. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  424. fp.write("if (isset($_GET['p'])) {\n")
  425. fp.write(" if (isset($_GET['lang'])) {\n")
  426. fp.write(" $_GET['p'] .= 'EN';\n")
  427. fp.write(" }\n")
  428. fp.write(" switch($_GET['p']) {\n")
  429. for p in pages:
  430. if p.get("compat", "") != "":
  431. tmp = p["compat"]
  432. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  433. tmp = tmp + "EN"
  434. fp.write(_COMPAT_MOB % (tmp, "https://www.xythobuz.de", re.sub(".html", ".html", p.url)))
  435. fp.write("\n")
  436. fp.write(_COMPAT_404_MOB % "/404.mob.html")
  437. fp.write(" }\n")
  438. fp.write("}\n")
  439. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  440. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  441. fp.write(" header('Status: 301 Moved Permanently');\n")
  442. fp.write(" } else {\n")
  443. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  444. fp.write(" }\n")
  445. fp.write("}\n");
  446. fp.write("header('Location: '.$loc);\n")
  447. fp.write("?>")
  448. fp.close()
  449. def hook_postconvert_size():
  450. file_ext = '|'.join(['pdf', 'zip', 'rar', 'ods', 'odt', 'odp', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx', 'exe', 'brd', 'plist'])
  451. def matched_link(matchobj):
  452. try:
  453. path = matchobj.group(1)
  454. if path.startswith("http") or path.startswith("//") or path.startswith("ftp"):
  455. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  456. elif path.startswith("/"):
  457. path = path.strip("/")
  458. path = os.path.join("static/", path)
  459. size = os.path.getsize(path)
  460. if size >= (1024 * 1024):
  461. return "<a href=\"%s\">%s</a>&nbsp;(%.1f MiB)" % (matchobj.group(1), matchobj.group(3), size / (1024.0 * 1024.0))
  462. elif size >= 1024:
  463. return "<a href=\"%s\">%s</a>&nbsp;(%d KiB)" % (matchobj.group(1), matchobj.group(3), size // 1024)
  464. else:
  465. return "<a href=\"%s\">%s</a>&nbsp;(%d Byte)" % (matchobj.group(1), matchobj.group(3), size)
  466. except:
  467. print "Unable to estimate file size for %s" % matchobj.group(1)
  468. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  469. _re_url = '<a href=\"([^\"]*?\.(%s))\">(.*?)<\/a>' % file_ext
  470. for p in pages:
  471. p.html = re.sub(_re_url, matched_link, p.html)