My static website generator using poole https://www.xythobuz.de
Você não pode selecionar mais de 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.

macros.py 25KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700
  1. # -*- coding: utf-8 -*-
  2. import re
  3. import itertools
  4. import email.utils
  5. import os.path
  6. import time
  7. import codecs
  8. from datetime import datetime
  9. import urlparse
  10. DEFAULT_LANG = "en"
  11. BASE_URL = "https://www.xythobuz.de"
  12. # -----------------------------------------------------------------------------
  13. # sub page helper macro
  14. # -----------------------------------------------------------------------------
  15. def backToParent():
  16. # check for special parent cases
  17. posts = []
  18. if page.get("show_in_quadcopters", "false") == "true":
  19. posts = [p for p in pages if p.url == "quadcopters.html"]
  20. # if not, check for actual parent
  21. if len(posts) == 0:
  22. url = page.get("parent", "") + ".html"
  23. posts = [p for p in pages if p.url == url]
  24. # print if any parent link found
  25. if len(posts) > 0:
  26. p = posts[0]
  27. print '<span class="listdesc">[...back to ' + p.title + ' overview](' + p.url + ')</span>'
  28. # -----------------------------------------------------------------------------
  29. # table helper macro
  30. # -----------------------------------------------------------------------------
  31. def tableHelper(style, header, content):
  32. print "<table>"
  33. if (header != None) and (len(header) == len(style)):
  34. print "<tr>"
  35. for h in header:
  36. print "<th>" + h + "</th>"
  37. print "</tr>"
  38. for ci in range(0, len(content)):
  39. if len(content[ci]) != len(style):
  40. # invalid call of table helper!
  41. continue
  42. print "<tr>"
  43. for i in range(0, len(style)):
  44. s = style[i]
  45. td_style = ""
  46. if "monospaced" in s:
  47. td_style += " font-family: monospace;"
  48. if "align-last-right" in s:
  49. if ci == (len(content) - 1):
  50. td_style += " text-align: right;"
  51. else:
  52. if "align-center" in s:
  53. td_style += " text-align: center;"
  54. elif "align-right" in s:
  55. td_style += " text-align: right;"
  56. elif "align-center" in s:
  57. td_style += " text-align: center;"
  58. td_args = ""
  59. if td_style != "":
  60. td_args = " style=\"" + td_style + "\""
  61. print "<td" + td_args + ">"
  62. if isinstance(content[ci][i], tuple):
  63. text, link = content[ci][i]
  64. print "<a href=\"" + link + "\">" + text + "</a>"
  65. else:
  66. text = content[ci][i]
  67. print text
  68. print "</td>"
  69. print "</tr>"
  70. print "</table>"
  71. # -----------------------------------------------------------------------------
  72. # menu helper macro
  73. # -----------------------------------------------------------------------------
  74. def githubCommitBadge(p, showInline = False):
  75. ret = ""
  76. if p.get("github", "") != "":
  77. link = p.get("git", p.github)
  78. linkParts = p.github.split("/")
  79. if len(linkParts) >= 5:
  80. ret += "<a href=\"" + link + "\"><img "
  81. if showInline:
  82. ret += "style =\"vertical-align: top;\" "
  83. ret += "src=\"https://img.shields.io/github/last-commit/"
  84. ret += linkParts[3] + "/" + linkParts[4]
  85. ret += ".svg?logo=git&style=flat\" /></a>"
  86. return ret
  87. def printMenuItem(p, yearsAsHeading = False, showDateSpan = False, showOnlyStartDate = False, nicelyFormatFullDate = False, lastyear = "0", lang = "", showLastCommit = True):
  88. title = p.title
  89. if lang != "":
  90. if p.get("title_" + lang, "") != "":
  91. title = p.get("title_" + lang, "")
  92. if title == "Blog":
  93. title = p.post
  94. year = p.get("date", "")[0:4]
  95. if year != lastyear:
  96. lastyear = year
  97. if yearsAsHeading:
  98. print "\n\n#### %s\n" % (year)
  99. dateto = ""
  100. if p.get("date", "" != ""):
  101. year = p.get("date", "")[0:4]
  102. if showOnlyStartDate:
  103. dateto = " (%s)" % (year)
  104. if p.get("update", "") != "" and p.get("update", "")[0:4] != year:
  105. if showDateSpan:
  106. dateto = " (%s - %s)" % (year, p.get("update", "")[0:4])
  107. if nicelyFormatFullDate:
  108. dateto = " - " + datetime.strptime(p.get("update", p.date), "%Y-%m-%d").strftime("%B %d, %Y")
  109. print " * **[%s](%s)**%s" % (title, p.url, dateto)
  110. if p.get("description", "") != "":
  111. description = p.get("description", "")
  112. if lang != "":
  113. if p.get("description_" + lang, "") != "":
  114. description = p.get("description_" + lang, "")
  115. print "<br><span class=\"listdesc\">" + description + "</span>"
  116. if showLastCommit:
  117. link = githubCommitBadge(p)
  118. if len(link) > 0:
  119. print "<br>" + link
  120. return lastyear
  121. def printRecentMenu(count = 5):
  122. posts = [p for p in pages if "date" in p and p.lang == "en"]
  123. posts.sort(key=lambda p: p.get("update", p.get("date")), reverse=True)
  124. if count > 0:
  125. posts = posts[0:count]
  126. for p in posts:
  127. printMenuItem(p, False, False, False, True, "0", "", False)
  128. def printBlogMenu():
  129. posts = [p for p in pages if "post" in p and p.lang == "en"]
  130. posts.sort(key=lambda p: p.get("date", "9999-01-01"), reverse=True)
  131. lastyear = "0"
  132. for p in posts:
  133. lastyear = printMenuItem(p, True, False, False, True, lastyear)
  134. def printProjectsMenu():
  135. # prints all pages with parent 'projects' or 'stuff'.
  136. # first the ones without date, sorted by position.
  137. # then afterwards those with date, split by year.
  138. # also supports blog posts with parent.
  139. enpages = [p for p in pages if p.lang == "en"]
  140. dpages = [p for p in enpages if p.get("date", "") == ""]
  141. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  142. mpages.sort(key=lambda p: [int(p.get("position", "999"))])
  143. for p in mpages:
  144. printMenuItem(p)
  145. dpages = [p for p in enpages if p.get("date", "") != ""]
  146. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  147. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  148. lastyear = "0"
  149. for p in mpages:
  150. lastyear = printMenuItem(p, True, True, False, False, lastyear)
  151. def print3DPrintingMenu():
  152. mpages = [p for p in pages if p.get("parent", "") == "3d-printing" and p.lang == "en"]
  153. mpages.sort(key=lambda p: int(p["position"]))
  154. for p in mpages:
  155. printMenuItem(p, False, True, True)
  156. def printInputDevicesMenu():
  157. mpages = [p for p in pages if p.get("parent", "") == "input_devices" and p.lang == "en"]
  158. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  159. for p in mpages:
  160. printMenuItem(p, False, True, True)
  161. def printInputDevicesRelatedMenu():
  162. mpages = [p for p in pages if p.get("show_in_input_devices", "false") == "true"]
  163. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  164. for p in mpages:
  165. printMenuItem(p, False, True, True)
  166. def printSmarthomeMenu():
  167. mpages = [p for p in pages if p.get("parent", "") == "smarthome" and p.lang == "en"]
  168. mpages.sort(key=lambda p: int(p["position"]))
  169. for p in mpages:
  170. printMenuItem(p, False, True, True)
  171. def printQuadcopterMenu():
  172. mpages = [p for p in pages if p.get("parent", "") == "quadcopters" and p.lang == "en"]
  173. mpages.sort(key=lambda p: int(p["position"]))
  174. for p in mpages:
  175. printMenuItem(p, False, True, True)
  176. def printQuadcopterRelatedMenu():
  177. mpages = [p for p in pages if p.get("show_in_quadcopters", "false") == "true"]
  178. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  179. for p in mpages:
  180. printMenuItem(p, False, True, True)
  181. def printRobotMenuEnglish():
  182. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "en"]
  183. mpages.sort(key=lambda p: int(p["position"]))
  184. for p in mpages:
  185. printMenuItem(p)
  186. def printRobotMenuDeutsch():
  187. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "de"]
  188. mpages.sort(key=lambda p: int(p["position"]))
  189. for p in mpages:
  190. printMenuItem(p, False, False, False, False, "0", "de")
  191. # -----------------------------------------------------------------------------
  192. # lightgallery helper macro
  193. # -----------------------------------------------------------------------------
  194. # call this macro like this:
  195. # lightgallery([
  196. # [ "image-link", "description" ],
  197. # [ "image-link", "thumbnail-link", "description" ],
  198. # [ "youtube-link", "thumbnail-link", "description" ],
  199. # [ "video-link", "mime", "thumbnail-link", "image-link", "description" ],
  200. # [ "video-link", "mime", "", "", "description" ],
  201. # ])
  202. # it will also auto-generate thumbnails and resize and strip EXIF from images
  203. # using the included web-image-resize script.
  204. # and it can generate video thumbnails and posters with the video-thumb script.
  205. def lightgallery_check_thumbnail(link, thumb):
  206. # only check local image links
  207. if not link.startswith('img/'):
  208. return
  209. # generate thumbnail filename web-image-resize will create
  210. x = link.rfind('.')
  211. img = link[:x] + '_small' + link[x:]
  212. # only run when desired thumb path matches calculated ones
  213. if thumb != img:
  214. return
  215. # generate fs path to images
  216. path = os.path.join(os.getcwd(), 'static', link)
  217. img = os.path.join(os.getcwd(), 'static', thumb)
  218. # no need to generate thumb again
  219. if os.path.exists(img):
  220. return
  221. # run web-image-resize to generate thumbnail
  222. script = os.path.join(os.getcwd(), 'web-image-resize')
  223. os.system(script + ' ' + path)
  224. def lightgallery_check_thumbnail_video(link, thumb, poster):
  225. # only check local image links
  226. if not link.startswith('img/'):
  227. return
  228. # generate thumbnail filenames video-thumb will create
  229. x = link.rfind('.')
  230. thumb_l = link[:x] + '_thumb.png'
  231. poster_l = link[:x] + '_poster.png'
  232. # only run when desired thumb path matches calculated ones
  233. if (thumb_l != thumb) or (poster_l != poster):
  234. return
  235. # generate fs path to images
  236. path = os.path.join(os.getcwd(), 'static', link)
  237. thumb_p = os.path.join(os.getcwd(), 'static', thumb)
  238. poster_p = os.path.join(os.getcwd(), 'static', poster)
  239. # no need to generate thumb again
  240. if os.path.exists(thumb_p) or os.path.exists(poster_p):
  241. return
  242. # run video-thumb to generate thumbnail
  243. script = os.path.join(os.getcwd(), 'video-thumb')
  244. os.system(script + ' ' + path)
  245. def lightgallery(links):
  246. videos = [l for l in links if len(l) == 5]
  247. v_i = -1
  248. for v in videos:
  249. link, mime, thumb, poster, alt = v
  250. v_i += 1
  251. print '<div style="display:none;" id="video' + str(v_i) + '">'
  252. print '<video class="lg-video-object lg-html5" controls preload="none">'
  253. print '<source src="' + link + '" type="' + mime + '">'
  254. print 'Your browser does not support HTML5 video.'
  255. print '</video>'
  256. print '</div>'
  257. print '<div class="lightgallery">'
  258. v_i = -1
  259. for l in links:
  260. if (len(l) == 3) or (len(l) == 2):
  261. link = img = alt = ""
  262. style = img2 = ""
  263. if len(l) == 3:
  264. link, img, alt = l
  265. else:
  266. link, alt = l
  267. if "youtube.com" in link:
  268. img = "https://img.youtube.com/vi/"
  269. img += urlparse.parse_qs(urlparse.urlparse(link).query)['v'][0]
  270. img += "/0.jpg" # full size preview
  271. #img += "/default.jpg" # default thumbnail
  272. style = ' style="width:300px;"'
  273. img2 = '<img src="lg/video-play.png" class="picthumb">'
  274. else:
  275. x = link.rfind('.')
  276. img = link[:x] + '_small' + link[x:]
  277. lightgallery_check_thumbnail(link, img)
  278. print '<div class="border" style="position:relative;" data-src="' + link + '"><a href="' + link + '"><img class="pic" src="' + img + '" alt="' + alt + '"' + style + '>' + img2 + '</a></div>'
  279. elif len(l) == 5:
  280. v_i += 1
  281. link, mime, thumb, poster, alt = videos[v_i]
  282. if len(thumb) <= 0:
  283. x = link.rfind('.')
  284. thumb = link[:x] + '_thumb.png'
  285. if len(poster) <= 0:
  286. x = link.rfind('.')
  287. poster = link[:x] + '_poster.png'
  288. lightgallery_check_thumbnail_video(link, thumb, poster)
  289. print '<div class="border" data-poster="' + poster + '" data-sub-html="' + alt + '" data-html="#video' + str(v_i) + '"><a href="' + link + '"><img class="pic" src="' + thumb + '"></a></div>'
  290. else:
  291. raise NameError('Invalid number of arguments for lightgallery')
  292. print '</div>'
  293. # -----------------------------------------------------------------------------
  294. # github helper macros
  295. # -----------------------------------------------------------------------------
  296. import urllib, json
  297. def restRequest(url):
  298. response = urllib.urlopen(url)
  299. if response.getcode() != 200:
  300. raise Exception("invalid response code", response.getcode())
  301. data = json.loads(response.read())
  302. return data
  303. def restReleases(user, repo):
  304. s = "https://api.github.com/repos/"
  305. s += user
  306. s += "/"
  307. s += repo
  308. s += "/releases"
  309. return restRequest(s)
  310. def printLatestRelease(user, repo):
  311. repo_url = "https://github.com/" + user + "/" + repo
  312. print("<div class=\"releasecard\">")
  313. print("Release builds for " + repo + " are <a href=\"" + repo_url + "/releases\">available on GitHub</a>.<br>\n")
  314. releases = restReleases(user, repo)
  315. if len(releases) <= 0:
  316. print("No release has been published on GitHub yet.")
  317. print("</div>")
  318. return
  319. releases.sort(key=lambda x: x["published_at"], reverse=True)
  320. r = releases[0]
  321. release_url = r["html_url"]
  322. print("Latest release of <a href=\"" + repo_url + "\">" + repo + "</a>, at the time of this writing: <a href=\"" + release_url + "\">" + r["name"] + "</a> (" + datetime.strptime(r["published_at"], "%Y-%m-%dT%H:%M:%SZ").strftime("%Y-%m-%d %H:%M:%S") + ")\n")
  323. if len(r["assets"]) <= 0:
  324. print("<br>No release assets have been published on GitHub for that.")
  325. print("</div>")
  326. return
  327. print("<ul>")
  328. print("Release Assets:")
  329. for a in r["assets"]:
  330. size = int(a["size"])
  331. ss = " "
  332. if size >= (1024 * 1024):
  333. ss += "(%.1f MiB)" % (size / (1024.0 * 1024.0))
  334. elif size >= 1024:
  335. ss += "(%d KiB)" % (size // 1024)
  336. else:
  337. ss += "(%d Byte)" % (size)
  338. print("<li><a href=\"" + a["browser_download_url"] + "\">" + a["name"] + "</a>" + ss)
  339. print("</ul></div>")
  340. def include_url(url):
  341. response = urllib.urlopen(url)
  342. if response.getcode() != 200:
  343. raise Exception("invalid response code", response.getcode())
  344. data = response.read()
  345. print data,
  346. # -----------------------------------------------------------------------------
  347. # preconvert hooks
  348. # -----------------------------------------------------------------------------
  349. # -----------------------------------------------------------------------------
  350. # multi language support
  351. # -----------------------------------------------------------------------------
  352. def hook_preconvert_anotherlang():
  353. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  354. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  355. vpages = [] # Set of all virtual pages
  356. for p in pages:
  357. current_lang = DEFAULT_LANG # Default language
  358. langs = [] # List of languages for the current page
  359. page_vpages = {} # Set of virtual pages for the current page
  360. text_lang = re.split(_re_lang, p.source)
  361. text_grouped = dict(zip([current_lang,] + \
  362. [lang.strip() for lang in text_lang[1::2]], \
  363. text_lang[::2]))
  364. for lang, text in text_grouped.iteritems():
  365. spath = p.fname.split(os.path.sep)
  366. langs.append(lang)
  367. if lang == "en":
  368. filename = re.sub(MKD_PATT, "%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  369. else:
  370. filename = re.sub(MKD_PATT, ".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  371. vp = Page(filename, virtual=text)
  372. # Copy real page attributes to the virtual page
  373. for attr in p:
  374. if not vp.has_key(attr):
  375. vp[attr] = p[attr]
  376. # Define a title in the proper language
  377. vp["title"] = p["title_%s" % lang] \
  378. if p.has_key("title_%s" % lang) \
  379. else p["title"]
  380. # Keep track of the current lang of the virtual page
  381. vp["lang"] = lang
  382. page_vpages[lang] = vp
  383. # Each virtual page has to know about its sister vpages
  384. for lang, vpage in page_vpages.iteritems():
  385. vpage["lang_links"] = dict([(l, v["url"]) for l, v in page_vpages.iteritems()])
  386. vpage["other_lang"] = langs # set other langs and link
  387. vpages += page_vpages.values()
  388. pages[:] = vpages
  389. # -----------------------------------------------------------------------------
  390. # compatibility redirect for old website URLs
  391. # -----------------------------------------------------------------------------
  392. _COMPAT = """ case "%s":
  393. $loc = "%s/%s";
  394. break;
  395. """
  396. _COMPAT_404 = """ default:
  397. $loc = "%s";
  398. break;
  399. """
  400. def hook_preconvert_compat():
  401. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  402. fp.write("<?\n")
  403. fp.write("// Auto generated xyCMS compatibility index.php\n")
  404. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  405. fp.write("if (isset($_GET['p'])) {\n")
  406. fp.write(" if (isset($_GET['lang'])) {\n")
  407. fp.write(" $_GET['p'] .= 'EN';\n")
  408. fp.write(" }\n")
  409. fp.write(" switch($_GET['p']) {\n")
  410. for p in pages:
  411. if p.get("compat", "") != "":
  412. tmp = p["compat"]
  413. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  414. tmp = tmp + "EN"
  415. fp.write(_COMPAT % (tmp, "https://www.xythobuz.de", p.url))
  416. fp.write("\n")
  417. fp.write(_COMPAT_404 % "/404.html")
  418. fp.write(" }\n")
  419. fp.write("}\n")
  420. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  421. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  422. fp.write(" header('Status: 301 Moved Permanently');\n")
  423. fp.write(" } else {\n")
  424. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  425. fp.write(" }\n")
  426. fp.write("}\n");
  427. fp.write("header('Location: '.$loc);\n")
  428. fp.write("?>")
  429. fp.close()
  430. # -----------------------------------------------------------------------------
  431. # sitemap generation
  432. # -----------------------------------------------------------------------------
  433. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  434. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  435. %s
  436. </urlset>
  437. """
  438. _SITEMAP_URL = """
  439. <url>
  440. <loc>%s/%s</loc>
  441. <lastmod>%s</lastmod>
  442. <changefreq>%s</changefreq>
  443. <priority>%s</priority>
  444. </url>
  445. """
  446. def hook_preconvert_sitemap():
  447. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  448. urls = []
  449. for p in pages:
  450. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  451. fname = os.path.join(options.project, "output", "sitemap.xml")
  452. fp = open(fname, 'w')
  453. fp.write(_SITEMAP % "".join(urls))
  454. fp.close()
  455. # -----------------------------------------------------------------------------
  456. # postconvert hooks
  457. # -----------------------------------------------------------------------------
  458. # -----------------------------------------------------------------------------
  459. # rss feed generation
  460. # -----------------------------------------------------------------------------
  461. _RSS = """<?xml version="1.0" encoding="UTF-8"?>
  462. <?xml-stylesheet href="%s" type="text/xsl"?>
  463. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  464. <channel>
  465. <title>%s</title>
  466. <link>%s</link>
  467. <atom:link href="%s" rel="self" type="application/rss+xml" />
  468. <description>%s</description>
  469. <language>en-us</language>
  470. <pubDate>%s</pubDate>
  471. <lastBuildDate>%s</lastBuildDate>
  472. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  473. <generator>Poole</generator>
  474. <ttl>720</ttl>
  475. %s
  476. </channel>
  477. </rss>
  478. """
  479. _RSS_ITEM = """
  480. <item>
  481. <title>%s</title>
  482. <link>%s</link>
  483. <description>%s</description>
  484. <pubDate>%s</pubDate>
  485. <atom:updated>%s</atom:updated>
  486. <guid>%s</guid>
  487. </item>
  488. """
  489. def hook_postconvert_rss():
  490. items = []
  491. # all pages with "date" get put into feed
  492. posts = [p for p in pages if "date" in p]
  493. # sort by update if available, date else
  494. posts.sort(key=lambda p: p.get("update", p.date), reverse=True)
  495. # only put 20 most recent items in feed
  496. posts = posts[:20]
  497. for p in posts:
  498. title = p.title
  499. if "post" in p:
  500. title = p.post
  501. link = "%s/%s" % (BASE_URL, p.url)
  502. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  503. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  504. desc = desc.replace("href=\"/img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  505. desc = desc.replace("src=\"/img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  506. desc = htmlspecialchars(desc)
  507. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  508. date = email.utils.formatdate(date)
  509. update = time.mktime(time.strptime("%s 12" % p.get("update", p.date), "%Y-%m-%d %H"))
  510. update = email.utils.formatdate(update)
  511. items.append(_RSS_ITEM % (title, link, desc, date, update, link))
  512. items = "".join(items)
  513. style = "/css/rss.xsl"
  514. title = "xythobuz.de Blog"
  515. link = "%s" % BASE_URL
  516. feed = "%s/rss.xml" % BASE_URL
  517. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  518. date = email.utils.formatdate()
  519. rss = _RSS % (style, title, link, feed, desc, date, date, items)
  520. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  521. fp.write(rss)
  522. fp.close()
  523. # -----------------------------------------------------------------------------
  524. # compatibility redirect for old mobile pages
  525. # -----------------------------------------------------------------------------
  526. _COMPAT_MOB = """ case "%s":
  527. $loc = "%s/%s";
  528. break;
  529. """
  530. _COMPAT_404_MOB = """ default:
  531. $loc = "%s";
  532. break;
  533. """
  534. def hook_postconvert_mobilecompat():
  535. directory = os.path.join(output, "mobile")
  536. if not os.path.exists(directory):
  537. os.makedirs(directory)
  538. fp = codecs.open(os.path.join(directory, "index.php"), "w", "utf-8")
  539. fp.write("<?\n")
  540. fp.write("// Auto generated xyCMS compatibility mobile/index.php\n")
  541. fp.write("$loc = 'https://www.xythobuz.de/index.de.html';\n")
  542. fp.write("if (isset($_GET['p'])) {\n")
  543. fp.write(" if (isset($_GET['lang'])) {\n")
  544. fp.write(" $_GET['p'] .= 'EN';\n")
  545. fp.write(" }\n")
  546. fp.write(" switch($_GET['p']) {\n")
  547. for p in pages:
  548. if p.get("compat", "") != "":
  549. tmp = p["compat"]
  550. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  551. tmp = tmp + "EN"
  552. fp.write(_COMPAT_MOB % (tmp, "https://www.xythobuz.de", re.sub(".html", ".html", p.url)))
  553. fp.write("\n")
  554. fp.write(_COMPAT_404_MOB % "/404.mob.html")
  555. fp.write(" }\n")
  556. fp.write("}\n")
  557. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  558. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  559. fp.write(" header('Status: 301 Moved Permanently');\n")
  560. fp.write(" } else {\n")
  561. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  562. fp.write(" }\n")
  563. fp.write("}\n");
  564. fp.write("header('Location: '.$loc);\n")
  565. fp.write("?>")
  566. fp.close()
  567. # -----------------------------------------------------------------------------
  568. # displaying filesize for download links
  569. # -----------------------------------------------------------------------------
  570. def hook_postconvert_size():
  571. file_ext = '|'.join(['pdf', 'zip', 'rar', 'ods', 'odt', 'odp', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx', 'exe', 'brd', 'plist'])
  572. def matched_link(matchobj):
  573. try:
  574. path = matchobj.group(1)
  575. if path.startswith("http") or path.startswith("//") or path.startswith("ftp"):
  576. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  577. elif path.startswith("/"):
  578. path = path.strip("/")
  579. path = os.path.join("static/", path)
  580. size = os.path.getsize(path)
  581. if size >= (1024 * 1024):
  582. return "<a href=\"%s\">%s</a>&nbsp;(%.1f MiB)" % (matchobj.group(1), matchobj.group(3), size / (1024.0 * 1024.0))
  583. elif size >= 1024:
  584. return "<a href=\"%s\">%s</a>&nbsp;(%d KiB)" % (matchobj.group(1), matchobj.group(3), size // 1024)
  585. else:
  586. return "<a href=\"%s\">%s</a>&nbsp;(%d Byte)" % (matchobj.group(1), matchobj.group(3), size)
  587. except:
  588. print "Unable to estimate file size for %s" % matchobj.group(1)
  589. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  590. _re_url = '<a href=\"([^\"]*?\.(%s))\">(.*?)<\/a>' % file_ext
  591. for p in pages:
  592. p.html = re.sub(_re_url, matched_link, p.html)