My static website generator using poole https://www.xythobuz.de
Vous ne pouvez pas sélectionner plus de 25 sujets Les noms de sujets doivent commencer par une lettre ou un nombre, peuvent contenir des tirets ('-') et peuvent comporter jusqu'à 35 caractères.

macros.py 27KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751
  1. # -*- coding: utf-8 -*-
  2. from __future__ import print_function
  3. import sys
  4. import re
  5. import itertools
  6. import email.utils
  7. import os.path
  8. import time
  9. import codecs
  10. from datetime import datetime
  11. DEFAULT_LANG = "en"
  12. BASE_URL = "https://www.xythobuz.de"
  13. # =============================================================================
  14. # Python 2/3 hacks
  15. # =============================================================================
  16. PY3 = sys.version_info[0] == 3
  17. if PY3:
  18. import urllib
  19. import urllib.request
  20. def urlparse_foo(link):
  21. return urllib.parse.parse_qs(urllib.parse.urlparse(link).query)['v'][0]
  22. else:
  23. import urllib
  24. import urlparse
  25. def urlparse_foo(link):
  26. return urlparse.parse_qs(urlparse.urlparse(link).query)['v'][0]
  27. # -----------------------------------------------------------------------------
  28. # sub page helper macro
  29. # -----------------------------------------------------------------------------
  30. def backToParent():
  31. # check for special parent cases
  32. posts = []
  33. if page.get("show_in_quadcopters", "false") == "true":
  34. posts = [p for p in pages if p.url == "quadcopters.html"]
  35. # if not, check for actual parent
  36. if len(posts) == 0:
  37. url = page.get("parent", "") + ".html"
  38. posts = [p for p in pages if p.url == url]
  39. # print if any parent link found
  40. if len(posts) > 0:
  41. p = posts[0]
  42. print('<span class="listdesc">[...back to ' + p.title + ' overview](' + p.url + ')</span>')
  43. # -----------------------------------------------------------------------------
  44. # table helper macro
  45. # -----------------------------------------------------------------------------
  46. def tableHelper(style, header, content):
  47. print("<table>")
  48. if (header != None) and (len(header) == len(style)):
  49. print("<tr>")
  50. for h in header:
  51. print("<th>" + h + "</th>")
  52. print("</tr>")
  53. for ci in range(0, len(content)):
  54. if len(content[ci]) != len(style):
  55. # invalid call of table helper!
  56. continue
  57. print("<tr>")
  58. for i in range(0, len(style)):
  59. s = style[i]
  60. td_style = ""
  61. if "monospaced" in s:
  62. td_style += " font-family: monospace;"
  63. if "align-last-right" in s:
  64. if ci == (len(content) - 1):
  65. td_style += " text-align: right;"
  66. else:
  67. if "align-center" in s:
  68. td_style += " text-align: center;"
  69. elif "align-right" in s:
  70. td_style += " text-align: right;"
  71. elif "align-center" in s:
  72. td_style += " text-align: center;"
  73. td_args = ""
  74. if td_style != "":
  75. td_args = " style=\"" + td_style + "\""
  76. print("<td" + td_args + ">")
  77. if isinstance(content[ci][i], tuple):
  78. text, link = content[ci][i]
  79. print("<a href=\"" + link + "\">" + text + "</a>")
  80. else:
  81. text = content[ci][i]
  82. print(text)
  83. print("</td>")
  84. print("</tr>")
  85. print("</table>")
  86. # -----------------------------------------------------------------------------
  87. # menu helper macro
  88. # -----------------------------------------------------------------------------
  89. def githubCommitBadge(p, showInline = False):
  90. ret = ""
  91. if p.get("github", "") != "":
  92. link = p.get("git", p.github)
  93. linkParts = p.github.split("/")
  94. if len(linkParts) >= 5:
  95. ret += "<a href=\"" + link + "\"><img "
  96. if showInline:
  97. ret += "style =\"vertical-align: middle; padding-bottom: 0.25em;\" "
  98. ret += "src=\"https://img.shields.io/github/last-commit/"
  99. ret += linkParts[3] + "/" + linkParts[4]
  100. ret += ".svg?logo=git&style=flat\" /></a>"
  101. return ret
  102. def printMenuItem(p, yearsAsHeading = False, showDateSpan = False, showOnlyStartDate = False, nicelyFormatFullDate = False, lastyear = "0", lang = "", showLastCommit = True):
  103. title = p.title
  104. if lang != "":
  105. if p.get("title_" + lang, "") != "":
  106. title = p.get("title_" + lang, "")
  107. if title == "Blog":
  108. title = p.post
  109. year = p.get("date", "")[0:4]
  110. if year != lastyear:
  111. lastyear = year
  112. if yearsAsHeading:
  113. print("\n\n#### %s\n" % (year))
  114. dateto = ""
  115. if p.get("date", "" != ""):
  116. year = p.get("date", "")[0:4]
  117. if showOnlyStartDate:
  118. dateto = " (%s)" % (year)
  119. if p.get("update", "") != "" and p.get("update", "")[0:4] != year:
  120. if showDateSpan:
  121. dateto = " (%s - %s)" % (year, p.get("update", "")[0:4])
  122. if nicelyFormatFullDate:
  123. dateto = " - " + datetime.strptime(p.get("update", p.date), "%Y-%m-%d").strftime("%B %d, %Y")
  124. print(" * **[%s](%s)**%s" % (title, p.url, dateto))
  125. if p.get("description", "") != "":
  126. description = p.get("description", "")
  127. if lang != "":
  128. if p.get("description_" + lang, "") != "":
  129. description = p.get("description_" + lang, "")
  130. print("<br><span class=\"listdesc\">" + description + "</span>")
  131. if showLastCommit:
  132. link = githubCommitBadge(p)
  133. if len(link) > 0:
  134. print("<br>" + link)
  135. return lastyear
  136. def printRecentMenu(count = 5):
  137. posts = [p for p in pages if "date" in p and p.lang == "en"]
  138. posts.sort(key=lambda p: p.get("update", p.get("date")), reverse=True)
  139. if count > 0:
  140. posts = posts[0:count]
  141. for p in posts:
  142. printMenuItem(p, False, False, False, True, "0", "", False)
  143. def printBlogMenu(year_min=None, year_max=None):
  144. posts = [p for p in pages if "post" in p and p.lang == "en"]
  145. posts.sort(key=lambda p: p.get("date", "9999-01-01"), reverse=True)
  146. if year_min != None:
  147. posts = [p for p in posts if int(p.get("date", "9999-01-01")[0:4]) >= int(year_min)]
  148. if year_max != None:
  149. posts = [p for p in posts if int(p.get("date", "9999-01-01")[0:4]) <= int(year_max)]
  150. lastyear = "0"
  151. for p in posts:
  152. lastyear = printMenuItem(p, True, False, False, True, lastyear)
  153. def printProjectsMenu():
  154. # prints all pages with parent 'projects' or 'stuff'.
  155. # first the ones without date, sorted by position.
  156. # then afterwards those with date, split by year.
  157. # also supports blog posts with parent.
  158. enpages = [p for p in pages if p.lang == "en"]
  159. dpages = [p for p in enpages if p.get("date", "") == ""]
  160. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  161. mpages.sort(key=lambda p: [int(p.get("position", "999"))])
  162. for p in mpages:
  163. printMenuItem(p)
  164. dpages = [p for p in enpages if p.get("date", "") != ""]
  165. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  166. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  167. lastyear = "0"
  168. for p in mpages:
  169. lastyear = printMenuItem(p, True, True, False, False, lastyear)
  170. def print3DPrintingMenu():
  171. mpages = [p for p in pages if p.get("parent", "") == "3d-printing" and p.lang == "en"]
  172. mpages.sort(key=lambda p: int(p["position"]))
  173. for p in mpages:
  174. printMenuItem(p, False, True, True)
  175. def printInputDevicesMenu():
  176. mpages = [p for p in pages if p.get("parent", "") == "input_devices" and p.lang == "en"]
  177. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  178. for p in mpages:
  179. printMenuItem(p, False, True, True)
  180. def printInputDevicesRelatedMenu():
  181. mpages = [p for p in pages if p.get("show_in_input_devices", "false") == "true"]
  182. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  183. for p in mpages:
  184. printMenuItem(p, False, True, True)
  185. def printSmarthomeMenu():
  186. mpages = [p for p in pages if p.get("parent", "") == "smarthome" and p.lang == "en"]
  187. mpages.sort(key=lambda p: int(p["position"]))
  188. for p in mpages:
  189. printMenuItem(p, False, True, True)
  190. def printQuadcopterMenu():
  191. mpages = [p for p in pages if p.get("parent", "") == "quadcopters" and p.lang == "en"]
  192. mpages.sort(key=lambda p: int(p["position"]))
  193. for p in mpages:
  194. printMenuItem(p, False, True, True)
  195. def printQuadcopterRelatedMenu():
  196. mpages = [p for p in pages if p.get("show_in_quadcopters", "false") == "true"]
  197. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  198. for p in mpages:
  199. printMenuItem(p, False, True, True)
  200. def printRobotMenuEnglish():
  201. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "en"]
  202. mpages.sort(key=lambda p: int(p["position"]))
  203. for p in mpages:
  204. printMenuItem(p)
  205. def printRobotMenuDeutsch():
  206. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "de"]
  207. mpages.sort(key=lambda p: int(p["position"]))
  208. for p in mpages:
  209. printMenuItem(p, False, False, False, False, "0", "de")
  210. def printSteamMenuEnglish():
  211. mpages = [p for p in pages if p.get("parent", "") == "steam" and p.lang == "en"]
  212. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  213. for p in mpages:
  214. printMenuItem(p, False, False, False, True)
  215. def printSteamMenuDeutsch():
  216. # TODO show german pages, or english pages when german not available
  217. printSteamMenuEnglish()
  218. # -----------------------------------------------------------------------------
  219. # lightgallery helper macro
  220. # -----------------------------------------------------------------------------
  221. # call this macro like this:
  222. # lightgallery([
  223. # [ "image-link", "description" ],
  224. # [ "image-link", "thumbnail-link", "description" ],
  225. # [ "youtube-link", "thumbnail-link", "description" ],
  226. # [ "video-link", "mime", "thumbnail-link", "image-link", "description" ],
  227. # [ "video-link", "mime", "", "", "description" ],
  228. # ])
  229. # it will also auto-generate thumbnails and resize and strip EXIF from images
  230. # using the included web-image-resize script.
  231. # and it can generate video thumbnails and posters with the video-thumb script.
  232. def lightgallery_check_thumbnail(link, thumb):
  233. # only check local image links
  234. if not link.startswith('img/'):
  235. return
  236. # generate thumbnail filename web-image-resize will create
  237. x = link.rfind('.')
  238. img = link[:x] + '_small' + link[x:]
  239. # only run when desired thumb path matches calculated ones
  240. if thumb != img:
  241. return
  242. # generate fs path to images
  243. path = os.path.join(os.getcwd(), 'static', link)
  244. img = os.path.join(os.getcwd(), 'static', thumb)
  245. # no need to generate thumb again
  246. if os.path.exists(img):
  247. return
  248. # run web-image-resize to generate thumbnail
  249. script = os.path.join(os.getcwd(), 'web-image-resize')
  250. os.system(script + ' ' + path)
  251. def lightgallery_check_thumbnail_video(link, thumb, poster):
  252. # only check local image links
  253. if not link.startswith('img/'):
  254. return
  255. # generate thumbnail filenames video-thumb will create
  256. x = link.rfind('.')
  257. thumb_l = link[:x] + '_thumb.png'
  258. poster_l = link[:x] + '_poster.png'
  259. # only run when desired thumb path matches calculated ones
  260. if (thumb_l != thumb) or (poster_l != poster):
  261. return
  262. # generate fs path to images
  263. path = os.path.join(os.getcwd(), 'static', link)
  264. thumb_p = os.path.join(os.getcwd(), 'static', thumb)
  265. poster_p = os.path.join(os.getcwd(), 'static', poster)
  266. # no need to generate thumb again
  267. if os.path.exists(thumb_p) or os.path.exists(poster_p):
  268. return
  269. # run video-thumb to generate thumbnail
  270. script = os.path.join(os.getcwd(), 'video-thumb')
  271. os.system(script + ' ' + path)
  272. def lightgallery(links):
  273. global v_ii
  274. try:
  275. v_ii += 1
  276. except NameError:
  277. v_ii = 0
  278. videos = [l for l in links if len(l) == 5]
  279. v_i = -1
  280. for v in videos:
  281. link, mime, thumb, poster, alt = v
  282. v_i += 1
  283. print('<div style="display:none;" id="video' + str(v_i) + '_' + str(v_ii) + '">')
  284. print('<video class="lg-video-object lg-html5" controls preload="none">')
  285. print('<source src="' + link + '" type="' + mime + '">')
  286. print('<a href="' + link + '">' + alt + '</a>')
  287. print('</video>')
  288. print('</div>')
  289. print('<div class="lightgallery">')
  290. v_i = -1
  291. for l in links:
  292. if (len(l) == 3) or (len(l) == 2):
  293. link = img = alt = ""
  294. style = img2 = ""
  295. if len(l) == 3:
  296. link, img, alt = l
  297. else:
  298. link, alt = l
  299. if "youtube.com" in link:
  300. img = "https://img.youtube.com/vi/"
  301. img += urlparse_foo(link)
  302. img += "/0.jpg" # full size preview
  303. #img += "/default.jpg" # default thumbnail
  304. style = ' style="width:300px;"'
  305. img2 = '<img src="lg/video-play.png" class="picthumb">'
  306. else:
  307. x = link.rfind('.')
  308. img = link[:x] + '_small' + link[x:]
  309. lightgallery_check_thumbnail(link, img)
  310. print('<div class="border" style="position:relative;" data-src="' + link + '"><a href="' + link + '"><img class="pic" src="' + img + '" alt="' + alt + '"' + style + '>' + img2 + '</a></div>')
  311. elif len(l) == 5:
  312. v_i += 1
  313. link, mime, thumb, poster, alt = videos[v_i]
  314. if len(thumb) <= 0:
  315. x = link.rfind('.')
  316. thumb = link[:x] + '_thumb.png'
  317. if len(poster) <= 0:
  318. x = link.rfind('.')
  319. poster = link[:x] + '_poster.png'
  320. lightgallery_check_thumbnail_video(link, thumb, poster)
  321. print('<div class="border" data-poster="' + poster + '" data-sub-html="' + alt + '" data-html="#video' + str(v_i) + '_' + str(v_ii) + '"><a href="' + link + '"><img class="pic" src="' + thumb + '"></a></div>')
  322. else:
  323. raise NameError('Invalid number of arguments for lightgallery')
  324. print('</div>')
  325. # -----------------------------------------------------------------------------
  326. # github helper macros
  327. # -----------------------------------------------------------------------------
  328. import json, sys
  329. def restRequest(url):
  330. response = urllib.request.urlopen(url) if PY3 else urllib.urlopen(url)
  331. if response.getcode() != 200:
  332. sys.stderr.write("\n")
  333. sys.stderr.write("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  334. sys.stderr.write("!!!!!!! WARNING !!!!!\n")
  335. sys.stderr.write("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  336. sys.stderr.write("invalid response code: " + str(response.getcode()) + "\n")
  337. sys.stderr.write("url: \"" + url + "\"\n")
  338. sys.stderr.write("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  339. sys.stderr.write("!!!!!!! WARNING !!!!!\n")
  340. sys.stderr.write("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  341. sys.stderr.write("\n")
  342. return ""
  343. data = json.loads(response.read().decode("utf-8"))
  344. return data
  345. def restReleases(user, repo):
  346. s = "https://api.github.com/repos/"
  347. s += user
  348. s += "/"
  349. s += repo
  350. s += "/releases"
  351. return restRequest(s)
  352. def printLatestRelease(user, repo):
  353. repo_url = "https://github.com/" + user + "/" + repo
  354. print("<div class=\"releasecard\">")
  355. print("Release builds for " + repo + " are <a href=\"" + repo_url + "/releases\">available on GitHub</a>.<br>\n")
  356. releases = restReleases(user, repo)
  357. if len(releases) <= 0:
  358. print("No release has been published on GitHub yet.")
  359. print("</div>")
  360. return
  361. releases.sort(key=lambda x: x["published_at"], reverse=True)
  362. r = releases[0]
  363. release_url = r["html_url"]
  364. print("Latest release of <a href=\"" + repo_url + "\">" + repo + "</a>, at the time of this writing: <a href=\"" + release_url + "\">" + r["name"] + "</a> (" + datetime.strptime(r["published_at"], "%Y-%m-%dT%H:%M:%SZ").strftime("%Y-%m-%d %H:%M:%S") + ")\n")
  365. if len(r["assets"]) <= 0:
  366. print("<br>No release assets have been published on GitHub for that.")
  367. print("</div>")
  368. return
  369. print("<ul>")
  370. print("Release Assets:")
  371. for a in r["assets"]:
  372. size = int(a["size"])
  373. ss = " "
  374. if size >= (1024 * 1024):
  375. ss += "(%.1f MiB)" % (size / (1024.0 * 1024.0))
  376. elif size >= 1024:
  377. ss += "(%d KiB)" % (size // 1024)
  378. else:
  379. ss += "(%d Byte)" % (size)
  380. print("<li><a href=\"" + a["browser_download_url"] + "\">" + a["name"] + "</a>" + ss)
  381. print("</ul></div>")
  382. def include_url(url):
  383. response = urllib.request.urlopen(url) if PY3 else urllib.urlopen(url)
  384. if response.getcode() != 200:
  385. raise Exception("invalid response code", response.getcode())
  386. data = response.read().decode("utf-8")
  387. print(data, end="")
  388. # -----------------------------------------------------------------------------
  389. # preconvert hooks
  390. # -----------------------------------------------------------------------------
  391. # -----------------------------------------------------------------------------
  392. # multi language support
  393. # -----------------------------------------------------------------------------
  394. def hook_preconvert_anotherlang():
  395. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  396. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  397. vpages = [] # Set of all virtual pages
  398. for p in pages:
  399. current_lang = DEFAULT_LANG # Default language
  400. langs = [] # List of languages for the current page
  401. page_vpages = {} # Set of virtual pages for the current page
  402. text_lang = re.split(_re_lang, p.source)
  403. text_grouped = dict(zip([current_lang,] + \
  404. [lang.strip() for lang in text_lang[1::2]], \
  405. text_lang[::2]))
  406. for lang, text in (iter(text_grouped.items()) if PY3 else text_grouped.iteritems()):
  407. spath = p.fname.split(os.path.sep)
  408. langs.append(lang)
  409. if lang == "en":
  410. filename = re.sub(MKD_PATT, "%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  411. else:
  412. filename = re.sub(MKD_PATT, ".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  413. vp = Page(filename, virtual=text)
  414. # Copy real page attributes to the virtual page
  415. for attr in p:
  416. if not ((attr in vp) if PY3 else vp.has_key(attr)):
  417. vp[attr] = p[attr]
  418. # Define a title in the proper language
  419. vp["title"] = p["title_%s" % lang] \
  420. if ((("title_%s" % lang) in p) if PY3 else p.has_key("title_%s" % lang)) \
  421. else p["title"]
  422. # Keep track of the current lang of the virtual page
  423. vp["lang"] = lang
  424. page_vpages[lang] = vp
  425. # Each virtual page has to know about its sister vpages
  426. for lang, vpage in (iter(page_vpages.items()) if PY3 else page_vpages.iteritems()):
  427. vpage["lang_links"] = dict([(l, v["url"]) for l, v in (iter(page_vpages.items()) if PY3 else page_vpages.iteritems())])
  428. vpage["other_lang"] = langs # set other langs and link
  429. vpages += page_vpages.values()
  430. pages[:] = vpages
  431. # -----------------------------------------------------------------------------
  432. # compatibility redirect for old website URLs
  433. # -----------------------------------------------------------------------------
  434. _COMPAT = """ case "%s":
  435. $loc = "%s/%s";
  436. break;
  437. """
  438. _COMPAT_404 = """ default:
  439. $loc = "%s";
  440. break;
  441. """
  442. def hook_preconvert_compat():
  443. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  444. fp.write("<?\n")
  445. fp.write("// Auto generated xyCMS compatibility index.php\n")
  446. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  447. fp.write("if (isset($_GET['p'])) {\n")
  448. fp.write(" if (isset($_GET['lang'])) {\n")
  449. fp.write(" $_GET['p'] .= 'EN';\n")
  450. fp.write(" }\n")
  451. fp.write(" switch($_GET['p']) {\n")
  452. for p in pages:
  453. if p.get("compat", "") != "":
  454. tmp = p["compat"]
  455. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  456. tmp = tmp + "EN"
  457. fp.write(_COMPAT % (tmp, "https://www.xythobuz.de", p.url))
  458. fp.write("\n")
  459. fp.write(_COMPAT_404 % "/404.html")
  460. fp.write(" }\n")
  461. fp.write("}\n")
  462. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  463. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  464. fp.write(" header('Status: 301 Moved Permanently');\n")
  465. fp.write(" } else {\n")
  466. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  467. fp.write(" }\n")
  468. fp.write("}\n");
  469. fp.write("header('Location: '.$loc);\n")
  470. fp.write("?>")
  471. fp.close()
  472. # -----------------------------------------------------------------------------
  473. # sitemap generation
  474. # -----------------------------------------------------------------------------
  475. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  476. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  477. %s
  478. </urlset>
  479. """
  480. _SITEMAP_URL = """
  481. <url>
  482. <loc>%s/%s</loc>
  483. <lastmod>%s</lastmod>
  484. <changefreq>%s</changefreq>
  485. <priority>%s</priority>
  486. </url>
  487. """
  488. def hook_preconvert_sitemap():
  489. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  490. urls = []
  491. for p in pages:
  492. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  493. fname = os.path.join(options.project, "output", "sitemap.xml")
  494. fp = open(fname, 'w')
  495. fp.write(_SITEMAP % "".join(urls))
  496. fp.close()
  497. # -----------------------------------------------------------------------------
  498. # postconvert hooks
  499. # -----------------------------------------------------------------------------
  500. # -----------------------------------------------------------------------------
  501. # rss feed generation
  502. # -----------------------------------------------------------------------------
  503. _RSS = """<?xml version="1.0" encoding="UTF-8"?>
  504. <?xml-stylesheet href="%s" type="text/xsl"?>
  505. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  506. <channel>
  507. <title>%s</title>
  508. <link>%s</link>
  509. <atom:link href="%s" rel="self" type="application/rss+xml" />
  510. <description>%s</description>
  511. <language>en-us</language>
  512. <pubDate>%s</pubDate>
  513. <lastBuildDate>%s</lastBuildDate>
  514. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  515. <generator>Poole</generator>
  516. <ttl>720</ttl>
  517. %s
  518. </channel>
  519. </rss>
  520. """
  521. _RSS_ITEM = """
  522. <item>
  523. <title>%s</title>
  524. <link>%s</link>
  525. <description>%s</description>
  526. <pubDate>%s</pubDate>
  527. <atom:updated>%s</atom:updated>
  528. <guid>%s</guid>
  529. </item>
  530. """
  531. def hook_postconvert_rss():
  532. items = []
  533. # all pages with "date" get put into feed
  534. posts = [p for p in pages if "date" in p]
  535. # sort by update if available, date else
  536. posts.sort(key=lambda p: p.get("update", p.date), reverse=True)
  537. # only put 20 most recent items in feed
  538. posts = posts[:20]
  539. for p in posts:
  540. title = p.title
  541. if "post" in p:
  542. title = p.post
  543. link = "%s/%s" % (BASE_URL, p.url)
  544. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  545. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  546. desc = desc.replace("href=\"/img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  547. desc = desc.replace("src=\"/img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  548. desc = htmlspecialchars(desc)
  549. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  550. date = email.utils.formatdate(date)
  551. update = time.mktime(time.strptime("%s 12" % p.get("update", p.date), "%Y-%m-%d %H"))
  552. update = email.utils.formatdate(update)
  553. items.append(_RSS_ITEM % (title, link, desc, date, update, link))
  554. items = "".join(items)
  555. style = "/css/rss.xsl"
  556. title = "xythobuz.de Blog"
  557. link = "%s" % BASE_URL
  558. feed = "%s/rss.xml" % BASE_URL
  559. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  560. date = email.utils.formatdate()
  561. rss = _RSS % (style, title, link, feed, desc, date, date, items)
  562. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  563. fp.write(rss)
  564. fp.close()
  565. # -----------------------------------------------------------------------------
  566. # compatibility redirect for old mobile pages
  567. # -----------------------------------------------------------------------------
  568. _COMPAT_MOB = """ case "%s":
  569. $loc = "%s/%s";
  570. break;
  571. """
  572. _COMPAT_404_MOB = """ default:
  573. $loc = "%s";
  574. break;
  575. """
  576. def hook_postconvert_mobilecompat():
  577. directory = os.path.join(output, "mobile")
  578. if not os.path.exists(directory):
  579. os.makedirs(directory)
  580. fp = codecs.open(os.path.join(directory, "index.php"), "w", "utf-8")
  581. fp.write("<?\n")
  582. fp.write("// Auto generated xyCMS compatibility mobile/index.php\n")
  583. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  584. fp.write("if (isset($_GET['p'])) {\n")
  585. fp.write(" if (isset($_GET['lang'])) {\n")
  586. fp.write(" $_GET['p'] .= 'EN';\n")
  587. fp.write(" }\n")
  588. fp.write(" switch($_GET['p']) {\n")
  589. for p in pages:
  590. if p.get("compat", "") != "":
  591. tmp = p["compat"]
  592. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  593. tmp = tmp + "EN"
  594. fp.write(_COMPAT_MOB % (tmp, "https://www.xythobuz.de", re.sub(".html", ".html", p.url)))
  595. fp.write("\n")
  596. fp.write(_COMPAT_404_MOB % "/404.mob.html")
  597. fp.write(" }\n")
  598. fp.write("}\n")
  599. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  600. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  601. fp.write(" header('Status: 301 Moved Permanently');\n")
  602. fp.write(" } else {\n")
  603. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  604. fp.write(" }\n")
  605. fp.write("}\n");
  606. fp.write("header('Location: '.$loc);\n")
  607. fp.write("?>")
  608. fp.close()
  609. # -----------------------------------------------------------------------------
  610. # displaying filesize for download links
  611. # -----------------------------------------------------------------------------
  612. def hook_postconvert_size():
  613. file_ext = '|'.join(['pdf', 'zip', 'rar', 'ods', 'odt', 'odp', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx', 'exe', 'brd', 'plist'])
  614. def matched_link(matchobj):
  615. try:
  616. path = matchobj.group(1)
  617. if path.startswith("http") or path.startswith("//") or path.startswith("ftp"):
  618. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  619. elif path.startswith("/"):
  620. path = path.strip("/")
  621. path = os.path.join("static/", path)
  622. size = os.path.getsize(path)
  623. if size >= (1024 * 1024):
  624. return "<a href=\"%s\">%s</a>&nbsp;(%.1f MiB)" % (matchobj.group(1), matchobj.group(3), size / (1024.0 * 1024.0))
  625. elif size >= 1024:
  626. return "<a href=\"%s\">%s</a>&nbsp;(%d KiB)" % (matchobj.group(1), matchobj.group(3), size // 1024)
  627. else:
  628. return "<a href=\"%s\">%s</a>&nbsp;(%d Byte)" % (matchobj.group(1), matchobj.group(3), size)
  629. except:
  630. print("Unable to estimate file size for %s" % matchobj.group(1))
  631. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  632. _re_url = '<a href=\"([^\"]*?\.(%s))\">(.*?)<\/a>' % file_ext
  633. for p in pages:
  634. p.html = re.sub(_re_url, matched_link, p.html)