My static website generator using poole https://www.xythobuz.de
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

macros.py 6.2KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194
  1. import re
  2. import itertools
  3. import email.utils
  4. import os.path
  5. import time
  6. import codecs
  7. from datetime import datetime
  8. DEFAULT_LANG = "en"
  9. BASE_URL = "http://www.xythobuz.de"
  10. # -----------------------------------------------------------------------------
  11. # preconvert hooks
  12. # -----------------------------------------------------------------------------
  13. def hook_preconvert_anotherlang():
  14. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  15. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  16. vpages = [] # Set of all virtual pages
  17. for p in pages:
  18. current_lang = DEFAULT_LANG # Default language
  19. langs = [] # List of languages for the current page
  20. page_vpages = {} # Set of virtual pages for the current page
  21. text_lang = re.split(_re_lang, p.source)
  22. text_grouped = dict(zip([current_lang,] + \
  23. [lang.strip() for lang in text_lang[1::2]], \
  24. text_lang[::2]))
  25. for lang, text in text_grouped.iteritems():
  26. spath = p.fname.split(os.path.sep)
  27. langs.append(lang)
  28. if lang == "en":
  29. filename = re.sub(MKD_PATT, "%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  30. else:
  31. filename = re.sub(MKD_PATT, ".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  32. vp = Page(filename, virtual=text)
  33. # Copy real page attributes to the virtual page
  34. for attr in p:
  35. if not vp.has_key(attr):
  36. vp[attr] = p[attr]
  37. # Define a title in the proper language
  38. vp["title"] = p["title_%s" % lang] \
  39. if p.has_key("title_%s" % lang) \
  40. else p["title"]
  41. # Keep track of the current lang of the virtual page
  42. vp["lang"] = lang
  43. # Fix post name if exists
  44. if vp.has_key("post"):
  45. if lang == "en":
  46. vp["post"] = vp["post"][:]
  47. else:
  48. vp["post"] = vp["post"][:-len(lang) - 1]
  49. page_vpages[lang] = vp
  50. # Each virtual page has to know about its sister vpages
  51. for lang, vpage in page_vpages.iteritems():
  52. vpage["lang_links"] = dict([(l, v["url"]) for l, v in page_vpages.iteritems()])
  53. vpage["other_lang"] = langs # set other langs and link
  54. vpages += page_vpages.values()
  55. pages[:] = vpages
  56. _COMPAT = """ case "%s":
  57. $loc = "%s/%s";
  58. break;
  59. """
  60. _COMPAT_404 = """ default:
  61. $loc = "%s";
  62. break;
  63. """
  64. def hook_preconvert_compat():
  65. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  66. fp.write("<?\n")
  67. fp.write("// Auto generated xyCMS compatibility index.php\n")
  68. fp.write("$loc = 'index.de.html';\n")
  69. fp.write("if (isset($_GET['p'])) {\n")
  70. fp.write(" if (isset($_GET['lang'])) {\n")
  71. fp.write(" $_GET['p'] .= EN;\n")
  72. fp.write(" }\n")
  73. fp.write(" switch($_GET['p']) {\n")
  74. for p in pages:
  75. if p.get("compat", "") != "":
  76. tmp = p["compat"]
  77. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  78. tmp = tmp + "EN"
  79. fp.write(_COMPAT % (tmp, options.base_url.rstrip('/'), p.url))
  80. fp.write("\n")
  81. fp.write(_COMPAT_404 % "/404.html")
  82. fp.write(" }\n")
  83. fp.write("}\n")
  84. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  85. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  86. fp.write(" header('Status: 301 Moved Permanently');\n")
  87. fp.write(" } else {\n")
  88. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  89. fp.write(" }\n")
  90. fp.write("}\n");
  91. fp.write("header('Location: '.$loc);\n")
  92. fp.write("?>")
  93. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  94. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  95. %s
  96. </urlset>
  97. """
  98. _SITEMAP_URL = """
  99. <url>
  100. <loc>%s/%s</loc>
  101. <lastmod>%s</lastmod>
  102. <changefreq>%s</changefreq>
  103. <priority>%s</priority>
  104. </url>
  105. """
  106. def hook_preconvert_sitemap():
  107. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  108. urls = []
  109. for p in pages:
  110. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  111. fname = os.path.join(options.project, "output", "sitemap.xml")
  112. fp = open(fname, 'w')
  113. fp.write(_SITEMAP % "".join(urls))
  114. fp.close()
  115. # -----------------------------------------------------------------------------
  116. # postconvert hooks
  117. # -----------------------------------------------------------------------------
  118. _RSS = """<?xml version="1.0"?>
  119. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  120. <channel>
  121. <title>%s</title>
  122. <link>%s</link>
  123. <atom:link href="%s" rel="self" type="application/rss+xml" />
  124. <description>%s</description>
  125. <language>en-us</language>
  126. <pubDate>%s</pubDate>
  127. <lastBuildDate>%s</lastBuildDate>
  128. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  129. <generator>Poole</generator>
  130. %s
  131. </channel>
  132. </rss>
  133. """
  134. _RSS_ITEM = """
  135. <item>
  136. <title>%s</title>
  137. <link>%s</link>
  138. <description>%s</description>
  139. <pubDate>%s</pubDate>
  140. <guid>%s</guid>
  141. </item>
  142. """
  143. def hook_postconvert_rss():
  144. items = []
  145. posts = [p for p in pages if "post" in p] # get all blog post pages
  146. posts.sort(key=lambda p: p.date, reverse=True)
  147. posts = posts[:10]
  148. for p in posts:
  149. title = p.post
  150. link = "%s/%s" % (BASE_URL, p.url)
  151. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  152. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  153. desc = htmlspecialchars(desc)
  154. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  155. date = email.utils.formatdate(date)
  156. items.append(_RSS_ITEM % (title, link, desc, date, link))
  157. items = "".join(items)
  158. title = "xythobuz.de Blog"
  159. link = "%s/blog.html" % BASE_URL
  160. feed = "%s/rss.xml" % BASE_URL
  161. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  162. date = email.utils.formatdate()
  163. rss = _RSS % (title, link, feed, desc, date, date, items)
  164. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  165. fp.write(rss)
  166. fp.close()