My static website generator using poole https://www.xythobuz.de
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

macros.py 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. # -*- coding: utf-8 -*-
  2. import re
  3. import itertools
  4. import email.utils
  5. import os.path
  6. import time
  7. import codecs
  8. from datetime import datetime
  9. DEFAULT_LANG = "en"
  10. BASE_URL = "https://www.xythobuz.de"
  11. # -----------------------------------------------------------------------------
  12. # table helper macro
  13. # -----------------------------------------------------------------------------
  14. def tableHelper(style, header, content):
  15. print "<table>"
  16. if (header != None) and (len(header) == len(style)):
  17. print "<tr>"
  18. for h in header:
  19. print "<th>" + h + "</th>"
  20. print "</tr>"
  21. for ci in range(0, len(content)):
  22. if len(content[ci]) != len(style):
  23. # invalid call of table helper!
  24. continue
  25. print "<tr>"
  26. for i in range(0, len(style)):
  27. s = style[i]
  28. if "align-last-right" in s:
  29. if ci == (len(content) - 1):
  30. print "<td style=\"text-align: right;\">"
  31. else:
  32. if "align-center" in s:
  33. print "<td style=\"text-align: center;\">"
  34. else:
  35. print "<td>"
  36. elif "align-right" in s:
  37. print "<td style=\"text-align: right;\">"
  38. elif "align-center" in s:
  39. print "<td style=\"text-align: center;\">"
  40. else:
  41. print "<td>"
  42. if isinstance(content[ci][i], tuple):
  43. text, link = content[ci][i]
  44. print "<a href=\"" + link + "\">" + text + "</a>"
  45. else:
  46. text = content[ci][i]
  47. print text
  48. print "</td>"
  49. print "</tr>"
  50. print "</table>"
  51. # -----------------------------------------------------------------------------
  52. # menu helper macro
  53. # -----------------------------------------------------------------------------
  54. def printMenuItem(p, yearsAsHeading = False, showDateSpan = False, showOnlyStartDate = False, nicelyFormatFullDate = False, lastyear = "0", lang = ""):
  55. title = p.title
  56. if lang != "":
  57. if p.get("title_" + lang, "") != "":
  58. title = p.get("title_" + lang, "")
  59. if p.title == "Blog":
  60. title = p.post
  61. year = p.get("date", "")[0:4]
  62. if year != lastyear:
  63. lastyear = year
  64. if yearsAsHeading:
  65. print "\n\n#### %s\n" % (year)
  66. dateto = ""
  67. if p.get("date", "" != ""):
  68. year = p.get("date", "")[0:4]
  69. if showOnlyStartDate:
  70. dateto = " (%s)" % (year)
  71. if p.get("update", "") != "" and p.get("update", "")[0:4] != year:
  72. if showDateSpan:
  73. dateto = " (%s - %s)" % (year, p.get("update", "")[0:4])
  74. if nicelyFormatFullDate:
  75. dateto = " - " + datetime.strptime(p.date, "%Y-%m-%d").strftime("%B %d, %Y")
  76. print " * **[%s](%s)**%s" % (title, p.url, dateto)
  77. if p.get("description", "") != "":
  78. description = p.get("description", "")
  79. if lang != "":
  80. if p.get("description_" + lang, "") != "":
  81. description = p.get("description_" + lang, "")
  82. print "<br><span class=\"listdesc\">" + description + "</span>"
  83. return lastyear
  84. def printRecentMenu(count = 5):
  85. posts = [p for p in pages if "date" in p]
  86. posts.sort(key=lambda p: p.get("date"), reverse=True)
  87. for p in posts[0:count]:
  88. printMenuItem(p, False, False, False, True)
  89. def printBlogMenu():
  90. posts = [p for p in pages if "post" in p]
  91. posts.sort(key=lambda p: p.get("date", "9999-01-01"), reverse=True)
  92. lastyear = "0"
  93. for p in posts:
  94. lastyear = printMenuItem(p, True, False, False, True, lastyear)
  95. def printProjectsMenu():
  96. # prints all pages with parent 'projects' or 'stuff'.
  97. # first the ones without date, sorted by position.
  98. # then afterwards those with date, split by year.
  99. # also supports blog posts with parent.
  100. enpages = [p for p in pages if p.lang == "en"]
  101. dpages = [p for p in enpages if p.get("date", "") == ""]
  102. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  103. mpages.sort(key=lambda p: [int(p.get("position", "999"))])
  104. for p in mpages:
  105. printMenuItem(p)
  106. dpages = [p for p in enpages if p.get("date", "") != ""]
  107. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  108. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  109. lastyear = "0"
  110. for p in mpages:
  111. lastyear = printMenuItem(p, True, True, False, False, lastyear)
  112. def print3DPrintingMenu():
  113. mpages = [p for p in pages if p.get("parent", "") == "3d-printing" and p.lang == "en"]
  114. mpages.sort(key=lambda p: int(p["position"]))
  115. for p in mpages:
  116. printMenuItem(p, False, True, True)
  117. def printQuadcopterMenu():
  118. mpages = [p for p in pages if p.get("parent", "") == "quadcopters" and p.lang == "en"]
  119. mpages.sort(key=lambda p: int(p["position"]))
  120. for p in mpages:
  121. printMenuItem(p, False, True, True)
  122. def printQuadcopterRelatedMenu():
  123. mpages = [p for p in pages if p.get("show_in_quadcopters", "false") == "true"]
  124. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  125. for p in mpages:
  126. printMenuItem(p, False, True, True)
  127. def printRobotMenuEnglish():
  128. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "en"]
  129. mpages.sort(key=lambda p: int(p["position"]))
  130. for p in mpages:
  131. printMenuItem(p)
  132. def printRobotMenuDeutsch():
  133. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "de"]
  134. mpages.sort(key=lambda p: int(p["position"]))
  135. for p in mpages:
  136. printMenuItem(p, False, False, False, False, "0", "de")
  137. # -----------------------------------------------------------------------------
  138. # lightgallery helper macro
  139. # -----------------------------------------------------------------------------
  140. # call this macro like this
  141. # lightgallery([
  142. # [ "image-link", "description" ],
  143. # [ "image-link", "thumbnail-link", "description" ],
  144. # [ "youtube-link", "thumbnail-link", "description" ],
  145. # [ "video-link", "mime", "thumbnail-link", "image-link", "description" ]
  146. # ])
  147. def lightgallery(links):
  148. videos = [l for l in links if len(l) == 5]
  149. v_i = 0
  150. for v in videos:
  151. link, mime, thumb, poster, alt = v
  152. v_i += 1
  153. print '<div style="display:none;" id="video' + str(v_i) + '">'
  154. print '<video class="lg-video-object lg-html5" controls preload="none">'
  155. print '<source src="' + link + '" type="' + mime + '">'
  156. print 'Your browser does not support HTML5 video.'
  157. print '</video>'
  158. print '</div>'
  159. print '<div class="lightgallery">'
  160. v_i = 0
  161. for l in links:
  162. if (len(l) == 3) or (len(l) == 2):
  163. link = img = alt = ""
  164. if len(l) == 3:
  165. link, img, alt = l
  166. else:
  167. link, alt = l
  168. x = link.rfind('.')
  169. img = link[:x] + '_small' + link[x:]
  170. print '<div class="border" data-src="' + link + '"><a href="' + link + '"><img class="pic" src="' + img + '" alt="' + alt + '"></a></div>'
  171. elif len(l) == 5:
  172. v_i += 1
  173. link, mime, thumb, poster, alt = v
  174. print '<div class="border" data-poster="' + poster + '" data-sub-html="' + alt + '" data-html="#video' + str(v_i) + '"><a href="' + link + '"><img class="pic" src="' + thumb + '"></a></div>'
  175. else:
  176. raise NameError('Invalid number of arguments for lightgallery')
  177. print '</div>'
  178. # -----------------------------------------------------------------------------
  179. # github helper macros
  180. # -----------------------------------------------------------------------------
  181. import urllib, json
  182. def restRequest(url):
  183. response = urllib.urlopen(url)
  184. data = json.loads(response.read())
  185. return data
  186. def restReleases(user, repo):
  187. s = "https://api.github.com/repos/"
  188. s += user
  189. s += "/"
  190. s += repo
  191. s += "/releases"
  192. return restRequest(s)
  193. def printLatestRelease(user, repo):
  194. repo_url = "https://github.com/" + user + "/" + repo
  195. print("<div class=\"releasecard\">")
  196. print("Release builds for " + repo + " are <a href=\"" + repo_url + "/releases\">available on GitHub</a>.<br>\n")
  197. releases = restReleases(user, repo)
  198. if len(releases) <= 0:
  199. print("No release has been published on GitHub yet.")
  200. print("</div>")
  201. return
  202. releases.sort(key=lambda x: x["published_at"], reverse=True)
  203. r = releases[0]
  204. release_url = r["html_url"]
  205. print("Latest release of <a href=\"" + repo_url + "\">" + repo + "</a>, at the time of this writing: <a href=\"" + release_url + "\">" + r["name"] + "</a> (" + datetime.strptime(r["published_at"], "%Y-%m-%dT%H:%M:%SZ").strftime("%Y-%m-%d %H:%M:%S") + ")\n")
  206. if len(r["assets"]) <= 0:
  207. print("<br>No release assets have been published on GitHub for that.")
  208. print("</div>")
  209. return
  210. print("<ul>")
  211. print("Release Assets:")
  212. for a in r["assets"]:
  213. size = int(a["size"])
  214. ss = " "
  215. if size >= (1024 * 1024):
  216. ss += "(%.1f MiB)" % (size / (1024.0 * 1024.0))
  217. elif size >= 1024:
  218. ss += "(%d KiB)" % (size // 1024)
  219. else:
  220. ss += "(%d Byte)" % (size)
  221. print("<li><a href=\"" + a["browser_download_url"] + "\">" + a["name"] + "</a>" + ss)
  222. print("</ul></div>")
  223. # -----------------------------------------------------------------------------
  224. # preconvert hooks
  225. # -----------------------------------------------------------------------------
  226. def hook_preconvert_anotherlang():
  227. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  228. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  229. vpages = [] # Set of all virtual pages
  230. for p in pages:
  231. current_lang = DEFAULT_LANG # Default language
  232. langs = [] # List of languages for the current page
  233. page_vpages = {} # Set of virtual pages for the current page
  234. text_lang = re.split(_re_lang, p.source)
  235. text_grouped = dict(zip([current_lang,] + \
  236. [lang.strip() for lang in text_lang[1::2]], \
  237. text_lang[::2]))
  238. for lang, text in text_grouped.iteritems():
  239. spath = p.fname.split(os.path.sep)
  240. langs.append(lang)
  241. if lang == "en":
  242. filename = re.sub(MKD_PATT, "%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  243. else:
  244. filename = re.sub(MKD_PATT, ".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  245. vp = Page(filename, virtual=text)
  246. # Copy real page attributes to the virtual page
  247. for attr in p:
  248. if not vp.has_key(attr):
  249. vp[attr] = p[attr]
  250. # Define a title in the proper language
  251. vp["title"] = p["title_%s" % lang] \
  252. if p.has_key("title_%s" % lang) \
  253. else p["title"]
  254. # Keep track of the current lang of the virtual page
  255. vp["lang"] = lang
  256. # Fix post name if exists
  257. if vp.has_key("post"):
  258. if lang == "en":
  259. vp["post"] = vp["post"][:]
  260. else:
  261. vp["post"] = vp["post"][:-len(lang) - 1]
  262. page_vpages[lang] = vp
  263. # Each virtual page has to know about its sister vpages
  264. for lang, vpage in page_vpages.iteritems():
  265. vpage["lang_links"] = dict([(l, v["url"]) for l, v in page_vpages.iteritems()])
  266. vpage["other_lang"] = langs # set other langs and link
  267. vpages += page_vpages.values()
  268. pages[:] = vpages
  269. _COMPAT = """ case "%s":
  270. $loc = "%s/%s";
  271. break;
  272. """
  273. _COMPAT_404 = """ default:
  274. $loc = "%s";
  275. break;
  276. """
  277. def hook_preconvert_compat():
  278. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  279. fp.write("<?\n")
  280. fp.write("// Auto generated xyCMS compatibility index.php\n")
  281. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  282. fp.write("if (isset($_GET['p'])) {\n")
  283. fp.write(" if (isset($_GET['lang'])) {\n")
  284. fp.write(" $_GET['p'] .= 'EN';\n")
  285. fp.write(" }\n")
  286. fp.write(" switch($_GET['p']) {\n")
  287. for p in pages:
  288. if p.get("compat", "") != "":
  289. tmp = p["compat"]
  290. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  291. tmp = tmp + "EN"
  292. fp.write(_COMPAT % (tmp, "https://www.xythobuz.de", p.url))
  293. fp.write("\n")
  294. fp.write(_COMPAT_404 % "/404.html")
  295. fp.write(" }\n")
  296. fp.write("}\n")
  297. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  298. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  299. fp.write(" header('Status: 301 Moved Permanently');\n")
  300. fp.write(" } else {\n")
  301. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  302. fp.write(" }\n")
  303. fp.write("}\n");
  304. fp.write("header('Location: '.$loc);\n")
  305. fp.write("?>")
  306. fp.close()
  307. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  308. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  309. %s
  310. </urlset>
  311. """
  312. _SITEMAP_URL = """
  313. <url>
  314. <loc>%s/%s</loc>
  315. <lastmod>%s</lastmod>
  316. <changefreq>%s</changefreq>
  317. <priority>%s</priority>
  318. </url>
  319. """
  320. def hook_preconvert_sitemap():
  321. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  322. urls = []
  323. for p in pages:
  324. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  325. fname = os.path.join(options.project, "output", "sitemap.xml")
  326. fp = open(fname, 'w')
  327. fp.write(_SITEMAP % "".join(urls))
  328. fp.close()
  329. # -----------------------------------------------------------------------------
  330. # postconvert hooks
  331. # -----------------------------------------------------------------------------
  332. _RSS = """<?xml version="1.0"?>
  333. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  334. <channel>
  335. <title>%s</title>
  336. <link>%s</link>
  337. <atom:link href="%s" rel="self" type="application/rss+xml" />
  338. <description>%s</description>
  339. <language>en-us</language>
  340. <pubDate>%s</pubDate>
  341. <lastBuildDate>%s</lastBuildDate>
  342. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  343. <generator>Poole</generator>
  344. %s
  345. </channel>
  346. </rss>
  347. """
  348. _RSS_ITEM = """
  349. <item>
  350. <title>%s</title>
  351. <link>%s</link>
  352. <description>%s</description>
  353. <pubDate>%s</pubDate>
  354. <guid>%s</guid>
  355. </item>
  356. """
  357. def hook_postconvert_rss():
  358. items = []
  359. posts = [p for p in pages if "date" in p]
  360. posts.sort(key=lambda p: p.date, reverse=True)
  361. posts = posts[:10]
  362. for p in posts:
  363. title = p.title
  364. if "post" in p:
  365. title = p.post
  366. link = "%s/%s" % (BASE_URL, p.url)
  367. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  368. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  369. desc = desc.replace("href=\"/img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  370. desc = desc.replace("src=\"/img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  371. desc = htmlspecialchars(desc)
  372. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  373. date = email.utils.formatdate(date)
  374. items.append(_RSS_ITEM % (title, link, desc, date, link))
  375. items = "".join(items)
  376. title = "xythobuz.de Blog"
  377. link = "%s" % BASE_URL
  378. feed = "%s/rss.xml" % BASE_URL
  379. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  380. date = email.utils.formatdate()
  381. rss = _RSS % (title, link, feed, desc, date, date, items)
  382. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  383. fp.write(rss)
  384. fp.close()
  385. _COMPAT_MOB = """ case "%s":
  386. $loc = "%s/%s";
  387. break;
  388. """
  389. _COMPAT_404_MOB = """ default:
  390. $loc = "%s";
  391. break;
  392. """
  393. def hook_postconvert_mobilecompat():
  394. directory = os.path.join(output, "mobile")
  395. if not os.path.exists(directory):
  396. os.makedirs(directory)
  397. fp = codecs.open(os.path.join(directory, "index.php"), "w", "utf-8")
  398. fp.write("<?\n")
  399. fp.write("// Auto generated xyCMS compatibility mobile/index.php\n")
  400. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  401. fp.write("if (isset($_GET['p'])) {\n")
  402. fp.write(" if (isset($_GET['lang'])) {\n")
  403. fp.write(" $_GET['p'] .= 'EN';\n")
  404. fp.write(" }\n")
  405. fp.write(" switch($_GET['p']) {\n")
  406. for p in pages:
  407. if p.get("compat", "") != "":
  408. tmp = p["compat"]
  409. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  410. tmp = tmp + "EN"
  411. fp.write(_COMPAT_MOB % (tmp, "https://www.xythobuz.de", re.sub(".html", ".html", p.url)))
  412. fp.write("\n")
  413. fp.write(_COMPAT_404_MOB % "/404.mob.html")
  414. fp.write(" }\n")
  415. fp.write("}\n")
  416. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  417. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  418. fp.write(" header('Status: 301 Moved Permanently');\n")
  419. fp.write(" } else {\n")
  420. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  421. fp.write(" }\n")
  422. fp.write("}\n");
  423. fp.write("header('Location: '.$loc);\n")
  424. fp.write("?>")
  425. fp.close()
  426. def hook_postconvert_size():
  427. file_ext = '|'.join(['pdf', 'zip', 'rar', 'ods', 'odt', 'odp', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx', 'exe', 'brd', 'mp3', 'mp4', 'plist'])
  428. def matched_link(matchobj):
  429. try:
  430. path = matchobj.group(1)
  431. if path.startswith("http") or path.startswith("//") or path.startswith("ftp"):
  432. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  433. elif path.startswith("/"):
  434. path = path.strip("/")
  435. path = os.path.join("static/", path)
  436. size = os.path.getsize(path)
  437. if size >= (1024 * 1024):
  438. return "<a href=\"%s\">%s</a>&nbsp;(%.1f MiB)" % (matchobj.group(1), matchobj.group(3), size / (1024.0 * 1024.0))
  439. elif size >= 1024:
  440. return "<a href=\"%s\">%s</a>&nbsp;(%d KiB)" % (matchobj.group(1), matchobj.group(3), size // 1024)
  441. else:
  442. return "<a href=\"%s\">%s</a>&nbsp;(%d Byte)" % (matchobj.group(1), matchobj.group(3), size)
  443. except:
  444. print "Unable to estimate file size for %s" % matchobj.group(1)
  445. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  446. _re_url = '<a href=\"([^\"]*?\.(%s))\">(.*?)<\/a>' % file_ext
  447. for p in pages:
  448. p.html = re.sub(_re_url, matched_link, p.html)