My static website generator using poole https://www.xythobuz.de
Você não pode selecionar mais de 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899
  1. # -*- coding: utf-8 -*-
  2. from __future__ import print_function
  3. import sys
  4. import re
  5. import itertools
  6. import email.utils
  7. import os.path
  8. import time
  9. import codecs
  10. from datetime import datetime
  11. # -----------------------------------------------------------------------------
  12. # Python 2/3 hacks
  13. # -----------------------------------------------------------------------------
  14. PY3 = sys.version_info[0] == 3
  15. if PY3:
  16. import html
  17. import urllib
  18. import urllib.request
  19. from urllib.error import HTTPError, URLError
  20. def urlparse_foo(link):
  21. return urllib.parse.parse_qs(urllib.parse.urlparse(link).query)['v'][0]
  22. else:
  23. import cgi
  24. import urllib
  25. import urlparse
  26. def urlparse_foo(link):
  27. return urlparse.parse_qs(urlparse.urlparse(link).query)['v'][0]
  28. # -----------------------------------------------------------------------------
  29. # config "system"
  30. # -----------------------------------------------------------------------------
  31. conf = {
  32. "default_lang": "en",
  33. "base_url": "https://www.xythobuz.de",
  34. "birthday": datetime(1994, 1, 22, 0, 0),
  35. "blog_years_back": 6,
  36. }
  37. def get_conf(name):
  38. return conf[name]
  39. # -----------------------------------------------------------------------------
  40. # local vars for compatibility
  41. # -----------------------------------------------------------------------------
  42. DEFAULT_LANG = get_conf("default_lang")
  43. BASE_URL = get_conf("base_url")
  44. # -----------------------------------------------------------------------------
  45. # birthday calculation
  46. # -----------------------------------------------------------------------------
  47. from datetime import timedelta
  48. from calendar import isleap
  49. size_of_day = 1. / 366.
  50. size_of_second = size_of_day / (24. * 60. * 60.)
  51. def date_as_float(dt):
  52. days_from_jan1 = dt - datetime(dt.year, 1, 1)
  53. if not isleap(dt.year) and days_from_jan1.days >= 31+28:
  54. days_from_jan1 += timedelta(1)
  55. return dt.year + days_from_jan1.days * size_of_day + days_from_jan1.seconds * size_of_second
  56. def difference_in_years(start_date, end_date):
  57. return int(date_as_float(end_date) - date_as_float(start_date))
  58. def own_age():
  59. age_dec = difference_in_years(get_conf("birthday"), datetime.now())
  60. age_hex = '0x%X' % age_dec
  61. return '<abbr title="' + str(age_dec) + '">' + str(age_hex) + '</abbr>'
  62. # -----------------------------------------------------------------------------
  63. # sub page helper macro
  64. # -----------------------------------------------------------------------------
  65. def backToParent():
  66. # check for special parent cases
  67. posts = []
  68. if page.get("show_in_quadcopters", "false") == "true":
  69. posts = [p for p in pages if p.url == "quadcopters.html"]
  70. # if not, check for actual parent
  71. if len(posts) == 0:
  72. url = page.get("parent", "") + ".html"
  73. posts = [p for p in pages if p.url == url]
  74. # print if any parent link found
  75. if len(posts) > 0:
  76. p = posts[0]
  77. print('<span class="listdesc">[...back to ' + p.title + ' overview](' + p.url + ')</span>')
  78. # -----------------------------------------------------------------------------
  79. # table helper macro
  80. # -----------------------------------------------------------------------------
  81. def tableHelper(style, header, content):
  82. print("<table>")
  83. if (header != None) and (len(header) == len(style)):
  84. print("<tr>")
  85. for h in header:
  86. print("<th>" + h + "</th>")
  87. print("</tr>")
  88. for ci in range(0, len(content)):
  89. if len(content[ci]) != len(style):
  90. # invalid call of table helper!
  91. continue
  92. print("<tr>")
  93. for i in range(0, len(style)):
  94. s = style[i]
  95. td_style = ""
  96. if "monospaced" in s:
  97. td_style += " font-family: monospace;"
  98. if "align-last-right" in s:
  99. if ci == (len(content) - 1):
  100. td_style += " text-align: right;"
  101. else:
  102. if "align-center" in s:
  103. td_style += " text-align: center;"
  104. elif "align-right" in s:
  105. td_style += " text-align: right;"
  106. elif "align-center" in s:
  107. td_style += " text-align: center;"
  108. td_args = ""
  109. if td_style != "":
  110. td_args = " style=\"" + td_style + "\""
  111. print("<td" + td_args + ">")
  112. if isinstance(content[ci][i], tuple):
  113. text, link = content[ci][i]
  114. print("<a href=\"" + link + "\">" + text + "</a>")
  115. else:
  116. text = content[ci][i]
  117. print(text)
  118. print("</td>")
  119. print("</tr>")
  120. print("</table>")
  121. # -----------------------------------------------------------------------------
  122. # menu helper macro
  123. # -----------------------------------------------------------------------------
  124. def githubCommitBadge(p, showInline = False):
  125. ret = ""
  126. if p.get("github", "") != "":
  127. link = p.get("git", p.github)
  128. linkParts = p.github.split("/")
  129. if len(linkParts) >= 5:
  130. ret += "<a href=\"" + link + "\"><img "
  131. if showInline:
  132. ret += "style =\"vertical-align: middle; padding-bottom: 0.25em;\" "
  133. ret += "src=\"https://img.shields.io/github/last-commit/"
  134. ret += linkParts[3] + "/" + linkParts[4]
  135. ret += ".svg?logo=git&style=flat\" /></a>"
  136. return ret
  137. def printMenuItem(p, yearsAsHeading = False, showDateSpan = False, showOnlyStartDate = False, nicelyFormatFullDate = False, lastyear = "0", lang = "", showLastCommit = True, hide_description = False, updates_as_heading = False, desc_has_collapse = False):
  138. title = p.title
  139. if lang != "":
  140. if p.get("title_" + lang, "") != "":
  141. title = p.get("title_" + lang, "")
  142. if title == "Blog":
  143. title = p.post
  144. if updates_as_heading:
  145. year = p.get("update", p.get("date", ""))[0:4]
  146. else:
  147. year = p.get("date", "")[0:4]
  148. if year != lastyear:
  149. lastyear = year
  150. if yearsAsHeading:
  151. print("<h4>" + str(year) + "</h4>")
  152. dateto = ""
  153. if p.get("date", "" != ""):
  154. year = p.get("date", "")[0:4]
  155. if showOnlyStartDate:
  156. dateto = " (%s)" % (year)
  157. if p.get("update", "") != "" and p.get("update", "")[0:4] != year:
  158. if showDateSpan:
  159. dateto = " (%s - %s)" % (year, p.get("update", "")[0:4])
  160. if nicelyFormatFullDate:
  161. dateto = " - " + datetime.strptime(p.get("update", p.date), "%Y-%m-%d").strftime("%B %d, %Y")
  162. print("<li>")
  163. print("<a href=\"" + p.url + "\"><b>" + title + "</b></a>" + dateto)
  164. if hide_description == False:
  165. if p.get("description", "") != "":
  166. description = p.get("description", "")
  167. if lang != "":
  168. description = p.get("description_" + lang, description)
  169. if desc_has_collapse:
  170. print("<br><span class=\"listdesc collapse_menu\">" + description + "</span>")
  171. else:
  172. print("<br><span class=\"listdesc\">" + description + "</span>")
  173. if showLastCommit:
  174. link = githubCommitBadge(p)
  175. if len(link) > 0:
  176. print("<br>" + link)
  177. print("</li>")
  178. return lastyear
  179. def printRecentMenu(count = 5):
  180. posts = [p for p in pages if "date" in p and p.lang == "en"]
  181. posts.sort(key=lambda p: p.get("update", p.get("date")), reverse=True)
  182. if count > 0:
  183. posts = posts[0:count]
  184. print("<ul id='menulist'>")
  185. lastyear = "0"
  186. for p in posts:
  187. lastyear = printMenuItem(p, count == 0, False, False, True, lastyear, "", False, False, True)
  188. print("</ul>")
  189. def printBlogMenu(year_min=None, year_max=None):
  190. posts = [p for p in pages if "post" in p and p.lang == "en"]
  191. posts.sort(key=lambda p: p.get("date", "9999-01-01"), reverse=True)
  192. if year_min != None:
  193. posts = [p for p in posts if int(p.get("date", "9999-01-01")[0:4]) >= int(year_min)]
  194. if year_max != None:
  195. posts = [p for p in posts if int(p.get("date", "9999-01-01")[0:4]) <= int(year_max)]
  196. print("<ul id='menulist'>")
  197. lastyear = "0"
  198. for p in posts:
  199. lastyear = printMenuItem(p, True, False, False, True, lastyear)
  200. print("</ul>")
  201. def printProjectsMenu():
  202. # prints all pages with parent 'projects' or 'stuff'.
  203. # first the ones without date, sorted by position.
  204. # this first section includes sub-headings for children
  205. # in a hidden div, expanding when clicking the description.
  206. # then afterwards those with date, split by year.
  207. # also supports blog posts with parent.
  208. enpages = [p for p in pages if p.lang == "en"]
  209. # select pages without date
  210. dpages = [p for p in enpages if p.get("date", "") == ""]
  211. # only those that have a parent in ['projects', 'stuff']
  212. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  213. # sort by position
  214. mpages.sort(key=lambda p: [int(p.get("position", "999"))])
  215. print("<ul id='menulist'>")
  216. # print all pages
  217. for p in mpages:
  218. # fetch subpages for these top-level items
  219. subpages = [sub for sub in enpages if sub.get("parent", "none") == p.get("child-id", "unknown")]
  220. order = p.get("sort-order", "date")
  221. if order == "position":
  222. subpages.sort(key=lambda p: p["position"])
  223. else:
  224. subpages.sort(key=lambda p: p["date"], reverse = True)
  225. printMenuItem(p, False, False, False, False, "0", "", True, False, False, len(subpages) > 0)
  226. # print subpages
  227. if len(subpages) > 0:
  228. print("<div class='collapsecontent_menu'>")
  229. print("<ul>")
  230. for sp in subpages:
  231. printMenuItem(sp, False, True, True, False, "0", "", False, True)
  232. print("</ul>")
  233. print("</div>")
  234. # slect pages with a date
  235. dpages = [p for p in enpages if p.get("date", "") != ""]
  236. # only those that have a parent in ['projects', 'stuff']
  237. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  238. # sort by date
  239. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  240. # print all pages
  241. lastyear = "0"
  242. for p in mpages:
  243. # fetch subpages for these top-level items
  244. subpages = [sub for sub in enpages if sub.get("parent", "none") == p.get("child-id", "unknown")]
  245. order = p.get("sort-order", "date")
  246. if order == "position":
  247. subpages.sort(key=lambda p: p["position"])
  248. else:
  249. subpages.sort(key=lambda p: p["date"], reverse = True)
  250. lastyear = printMenuItem(p, True, True, False, False, lastyear, "", True, False, False, len(subpages) > 0)
  251. # print subpages
  252. if len(subpages) > 0:
  253. print("<div class='collapsecontent_menu'>")
  254. print("<ul>")
  255. for sp in subpages:
  256. printMenuItem(sp, False, True, True, False, "0", "", False, True)
  257. print("</ul>")
  258. print("</div>")
  259. print("</ul>")
  260. def printMenuGeneric(mpages = None, sortKey = None, sortReverse = True):
  261. if mpages == None:
  262. mpages = [p for p in pages if p.get("parent", "__none__") == page["child-id"] and p.lang == "en"]
  263. if sortKey != None:
  264. mpages.sort(key = sortKey, reverse = sortReverse)
  265. if len(mpages) > 0:
  266. print("<ul id='menulist'>")
  267. for p in mpages:
  268. printMenuItem(p, False, True, True)
  269. print("</ul>")
  270. def printMenuDate(mpages = None, sortReverse = True):
  271. sortKey = lambda p: p["date"]
  272. printMenuGeneric(mpages, sortKey, sortReverse)
  273. def printMenuPositional(mpages = None):
  274. printMenuGeneric(mpages, lambda p: int(p["position"]), False)
  275. def printMenu(mpages = None):
  276. order = page.get("sort-order", "date")
  277. if order == "position":
  278. printMenuPositional(mpages)
  279. else:
  280. printMenuDate(mpages)
  281. def printRobotMenuEnglish():
  282. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "en"]
  283. mpages.sort(key=lambda p: int(p["position"]))
  284. print("<ul id='menulist'>")
  285. for p in mpages:
  286. printMenuItem(p)
  287. print("</ul>")
  288. def printRobotMenuDeutsch():
  289. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "de"]
  290. mpages.sort(key=lambda p: int(p["position"]))
  291. print("<ul id='menulist'>")
  292. for p in mpages:
  293. printMenuItem(p, False, False, False, False, "0", "de")
  294. print("</ul>")
  295. def printSteamMenuEnglish():
  296. mpages = [p for p in pages if p.get("parent", "") == "steam" and p.lang == "en"]
  297. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  298. print("<ul id='menulist'>")
  299. for p in mpages:
  300. printMenuItem(p, False, False, False, True)
  301. print("</ul>")
  302. def printSteamMenuDeutsch():
  303. # TODO show german pages, or english pages when german not available
  304. printSteamMenuEnglish()
  305. # -----------------------------------------------------------------------------
  306. # lightgallery helper macro
  307. # -----------------------------------------------------------------------------
  308. # call this macro like this:
  309. # lightgallery([
  310. # [ "image-link", "description" ],
  311. # [ "image-link", "thumbnail-link", "description" ],
  312. # [ "youtube-link", "thumbnail-link", "description" ],
  313. # [ "video-link", "mime", "thumbnail-link", "image-link", "description" ],
  314. # [ "video-link", "mime", "", "", "description" ],
  315. # ])
  316. # it will also auto-generate thumbnails and resize and strip EXIF from images
  317. # using the included web-image-resize script.
  318. # and it can generate video thumbnails and posters with the video-thumb script.
  319. def lightgallery_check_thumbnail(link, thumb):
  320. # only check local image links
  321. if not link.startswith('img/'):
  322. return
  323. # generate thumbnail filename web-image-resize will create
  324. x = link.rfind('.')
  325. img = link[:x] + '_small' + link[x:]
  326. # only run when desired thumb path matches calculated ones
  327. if thumb != img:
  328. return
  329. # generate fs path to images
  330. path = os.path.join(os.getcwd(), 'static', link)
  331. img = os.path.join(os.getcwd(), 'static', thumb)
  332. # no need to generate thumb again
  333. if os.path.exists(img):
  334. return
  335. # run web-image-resize to generate thumbnail
  336. script = os.path.join(os.getcwd(), 'web-image-resize')
  337. os.system(script + ' ' + path)
  338. def lightgallery_check_thumbnail_video(link, thumb, poster):
  339. # only check local image links
  340. if not link.startswith('img/'):
  341. return
  342. # generate thumbnail filenames video-thumb will create
  343. x = link.rfind('.')
  344. thumb_l = link[:x] + '_thumb.png'
  345. poster_l = link[:x] + '_poster.png'
  346. # only run when desired thumb path matches calculated ones
  347. if (thumb_l != thumb) or (poster_l != poster):
  348. return
  349. # generate fs path to images
  350. path = os.path.join(os.getcwd(), 'static', link)
  351. thumb_p = os.path.join(os.getcwd(), 'static', thumb)
  352. poster_p = os.path.join(os.getcwd(), 'static', poster)
  353. # no need to generate thumb again
  354. if os.path.exists(thumb_p) or os.path.exists(poster_p):
  355. return
  356. # run video-thumb to generate thumbnail
  357. script = os.path.join(os.getcwd(), 'video-thumb')
  358. os.system(script + ' ' + path)
  359. def lightgallery(links):
  360. global v_ii
  361. try:
  362. v_ii += 1
  363. except NameError:
  364. v_ii = 0
  365. videos = [l for l in links if len(l) == 5]
  366. v_i = -1
  367. for v in videos:
  368. link, mime, thumb, poster, alt = v
  369. v_i += 1
  370. print('<div style="display:none;" id="video' + str(v_i) + '_' + str(v_ii) + '">')
  371. print('<video class="lg-video-object lg-html5" controls preload="none">')
  372. print('<source src="' + link + '" type="' + mime + '">')
  373. print('<a href="' + link + '">' + alt + '</a>')
  374. print('</video>')
  375. print('</div>')
  376. print('<div class="lightgallery">')
  377. v_i = -1
  378. for l in links:
  379. if (len(l) == 3) or (len(l) == 2):
  380. link = img = alt = ""
  381. style = img2 = ""
  382. if len(l) == 3:
  383. link, img, alt = l
  384. else:
  385. link, alt = l
  386. if "youtube.com" in link:
  387. img = "https://img.youtube.com/vi/"
  388. img += urlparse_foo(link)
  389. img += "/0.jpg" # full size preview
  390. #img += "/default.jpg" # default thumbnail
  391. style = ' style="width:300px;"'
  392. img2 = '<img src="lg/video-play.png" class="picthumb">'
  393. else:
  394. x = link.rfind('.')
  395. img = link[:x] + '_small' + link[x:]
  396. lightgallery_check_thumbnail(link, img)
  397. print('<div class="border" style="position:relative;" data-src="' + link + '"><a href="' + link + '"><img class="pic" src="' + img + '" alt="' + alt + '"' + style + '>' + img2 + '</a></div>')
  398. elif len(l) == 5:
  399. v_i += 1
  400. link, mime, thumb, poster, alt = videos[v_i]
  401. if len(thumb) <= 0:
  402. x = link.rfind('.')
  403. thumb = link[:x] + '_thumb.png'
  404. if len(poster) <= 0:
  405. x = link.rfind('.')
  406. poster = link[:x] + '_poster.png'
  407. lightgallery_check_thumbnail_video(link, thumb, poster)
  408. print('<div class="border" data-poster="' + poster + '" data-sub-html="' + alt + '" data-html="#video' + str(v_i) + '_' + str(v_ii) + '"><a href="' + link + '"><img class="pic" src="' + thumb + '"></a></div>')
  409. else:
  410. raise NameError('Invalid number of arguments for lightgallery')
  411. print('</div>')
  412. # -----------------------------------------------------------------------------
  413. # github helper macros
  414. # -----------------------------------------------------------------------------
  415. import json, sys
  416. def print_cnsl_error(s, url):
  417. sys.stderr.write("\n")
  418. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  419. sys.stderr.write("warning: !!!!!!! WARNING !!!!!\n")
  420. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  421. sys.stderr.write("warning: " + s + "\n")
  422. sys.stderr.write("warning: URL: \"" + url + "\"\n")
  423. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  424. sys.stderr.write("warning: !!!!!!! WARNING !!!!!\n")
  425. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  426. sys.stderr.write("\n")
  427. def http_request(url, timeout = 5):
  428. if PY3:
  429. response = urllib.request.urlopen(url, timeout = timeout)
  430. else:
  431. response = urllib.urlopen(url)
  432. if response.getcode() != 200:
  433. raise RuntimeError("invalid response code: " + str(response.getcode()))
  434. data = response.read().decode("utf-8")
  435. return data
  436. def include_url(url, fallback = None, timeout = 2):
  437. sys.stderr.write('sub : fetching page "%s"\n' % url)
  438. if fallback == None:
  439. print_cnsl_error("include_url() without fallback option", url)
  440. timeout = timeout * 3
  441. try:
  442. data = http_request(url, timeout)
  443. except Exception as e:
  444. if fallback != None:
  445. sys.stderr.write('sub : fetching fallback page "%s"\n' % fallback)
  446. try:
  447. data = http_request(fallback, timeout * 3)
  448. except Exception as e:
  449. print_cnsl_error(str(e), fallback)
  450. return
  451. else:
  452. print_cnsl_error(str(e), url)
  453. return
  454. if PY3:
  455. encoded = html.escape(data)
  456. else:
  457. encoded = cgi.escape(data)
  458. print(encoded, end="")
  459. def restRequest(url):
  460. sys.stderr.write('sub : fetching REST "%s"\n' % url)
  461. data = json.loads(http_request(url))
  462. return data
  463. def restReleases(user, repo):
  464. s = "https://api.github.com/repos/"
  465. s += user
  466. s += "/"
  467. s += repo
  468. s += "/releases"
  469. return restRequest(s)
  470. def printLatestRelease(user, repo):
  471. repo_url = "https://github.com/" + user + "/" + repo
  472. print("<div class=\"releasecard\">")
  473. print("Release builds for " + repo + " are <a href=\"" + repo_url + "/releases\">available on GitHub</a>.<br>\n")
  474. releases = restReleases(user, repo)
  475. if len(releases) <= 0:
  476. print("No release has been published on GitHub yet.")
  477. print("</div>")
  478. return
  479. releases.sort(key=lambda x: x["published_at"], reverse=True)
  480. r = releases[0]
  481. release_url = r["html_url"]
  482. print("Latest release of <a href=\"" + repo_url + "\">" + repo + "</a>, at the time of this writing: <a href=\"" + release_url + "\">" + r["name"] + "</a> (" + datetime.strptime(r["published_at"], "%Y-%m-%dT%H:%M:%SZ").strftime("%Y-%m-%d %H:%M:%S") + ")\n")
  483. if len(r["assets"]) <= 0:
  484. print("<br>No release assets have been published on GitHub for that.")
  485. print("</div>")
  486. return
  487. print("<ul>")
  488. print("Release Assets:")
  489. for a in r["assets"]:
  490. size = int(a["size"])
  491. ss = " "
  492. if size >= (1024 * 1024):
  493. ss += "(%.1f MiB)" % (size / (1024.0 * 1024.0))
  494. elif size >= 1024:
  495. ss += "(%d KiB)" % (size // 1024)
  496. else:
  497. ss += "(%d Byte)" % (size)
  498. print("<li><a href=\"" + a["browser_download_url"] + "\">" + a["name"] + "</a>" + ss)
  499. print("</ul></div>")
  500. # -----------------------------------------------------------------------------
  501. # preconvert hooks
  502. # -----------------------------------------------------------------------------
  503. # -----------------------------------------------------------------------------
  504. # multi language support
  505. # -----------------------------------------------------------------------------
  506. def hook_preconvert_anotherlang():
  507. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  508. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  509. vpages = [] # Set of all virtual pages
  510. for p in pages:
  511. current_lang = DEFAULT_LANG # Default language
  512. langs = [] # List of languages for the current page
  513. page_vpages = {} # Set of virtual pages for the current page
  514. text_lang = re.split(_re_lang, p.source)
  515. text_grouped = dict(zip([current_lang,] + \
  516. [lang.strip() for lang in text_lang[1::2]], \
  517. text_lang[::2]))
  518. for lang, text in (iter(text_grouped.items()) if PY3 else text_grouped.iteritems()):
  519. spath = p.fname.split(os.path.sep)
  520. langs.append(lang)
  521. if lang == "en":
  522. filename = re.sub(MKD_PATT, r"%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  523. else:
  524. filename = re.sub(MKD_PATT, r".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  525. vp = Page(filename, virtual=text)
  526. # Copy real page attributes to the virtual page
  527. for attr in p:
  528. if not ((attr in vp) if PY3 else vp.has_key(attr)):
  529. vp[attr] = p[attr]
  530. # Define a title in the proper language
  531. vp["title"] = p["title_%s" % lang] \
  532. if ((("title_%s" % lang) in p) if PY3 else p.has_key("title_%s" % lang)) \
  533. else p["title"]
  534. # Keep track of the current lang of the virtual page
  535. vp["lang"] = lang
  536. page_vpages[lang] = vp
  537. # Each virtual page has to know about its sister vpages
  538. for lang, vpage in (iter(page_vpages.items()) if PY3 else page_vpages.iteritems()):
  539. vpage["lang_links"] = dict([(l, v["url"]) for l, v in (iter(page_vpages.items()) if PY3 else page_vpages.iteritems())])
  540. vpage["other_lang"] = langs # set other langs and link
  541. vpages += page_vpages.values()
  542. pages[:] = vpages
  543. # -----------------------------------------------------------------------------
  544. # compatibility redirect for old website URLs
  545. # -----------------------------------------------------------------------------
  546. _COMPAT = """ case "%s":
  547. $loc = "%s/%s";
  548. break;
  549. """
  550. _COMPAT_404 = """ default:
  551. $loc = "%s";
  552. break;
  553. """
  554. def hook_preconvert_compat():
  555. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  556. fp.write("<?\n")
  557. fp.write("// Auto generated xyCMS compatibility index.php\n")
  558. fp.write("$loc = '" + get_conf("base_url") + "/index.de.html';\n")
  559. fp.write("if (isset($_GET['p'])) {\n")
  560. fp.write(" if (isset($_GET['lang'])) {\n")
  561. fp.write(" $_GET['p'] .= 'EN';\n")
  562. fp.write(" }\n")
  563. fp.write(" switch($_GET['p']) {\n")
  564. for p in pages:
  565. if p.get("compat", "") != "":
  566. tmp = p["compat"]
  567. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  568. tmp = tmp + "EN"
  569. fp.write(_COMPAT % (tmp, get_conf("base_url"), p.url))
  570. fp.write("\n")
  571. fp.write(_COMPAT_404 % "/404.html")
  572. fp.write(" }\n")
  573. fp.write("}\n")
  574. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  575. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  576. fp.write(" header('Status: 301 Moved Permanently');\n")
  577. fp.write(" } else {\n")
  578. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  579. fp.write(" }\n")
  580. fp.write("}\n");
  581. fp.write("header('Location: '.$loc);\n")
  582. fp.write("?>")
  583. fp.close()
  584. # -----------------------------------------------------------------------------
  585. # sitemap generation
  586. # -----------------------------------------------------------------------------
  587. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  588. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  589. %s
  590. </urlset>
  591. """
  592. _SITEMAP_URL = """
  593. <url>
  594. <loc>%s/%s</loc>
  595. <lastmod>%s</lastmod>
  596. <changefreq>%s</changefreq>
  597. <priority>%s</priority>
  598. </url>
  599. """
  600. def hook_preconvert_sitemap():
  601. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  602. urls = []
  603. for p in pages:
  604. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  605. fname = os.path.join(options.project, "output", "sitemap.xml")
  606. fp = open(fname, 'w')
  607. fp.write(_SITEMAP % "".join(urls))
  608. fp.close()
  609. # -----------------------------------------------------------------------------
  610. # postconvert hooks
  611. # -----------------------------------------------------------------------------
  612. # -----------------------------------------------------------------------------
  613. # rss feed generation
  614. # -----------------------------------------------------------------------------
  615. _RSS = """<?xml version="1.0" encoding="UTF-8"?>
  616. <?xml-stylesheet href="%s" type="text/xsl"?>
  617. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  618. <channel>
  619. <title>%s</title>
  620. <link>%s</link>
  621. <atom:link href="%s" rel="self" type="application/rss+xml" />
  622. <description>%s</description>
  623. <language>en-us</language>
  624. <pubDate>%s</pubDate>
  625. <lastBuildDate>%s</lastBuildDate>
  626. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  627. <generator>Poole</generator>
  628. <ttl>720</ttl>
  629. %s
  630. </channel>
  631. </rss>
  632. """
  633. _RSS_ITEM = """
  634. <item>
  635. <title>%s</title>
  636. <link>%s</link>
  637. <description>%s</description>
  638. <pubDate>%s</pubDate>
  639. <atom:updated>%s</atom:updated>
  640. <guid>%s</guid>
  641. </item>
  642. """
  643. def hook_postconvert_rss():
  644. items = []
  645. # all pages with "date" get put into feed
  646. posts = [p for p in pages if "date" in p]
  647. # sort by update if available, date else
  648. posts.sort(key=lambda p: p.get("update", p.date), reverse=True)
  649. # only put 20 most recent items in feed
  650. posts = posts[:20]
  651. for p in posts:
  652. title = p.title
  653. if "post" in p:
  654. title = p.post
  655. link = "%s/%s" % (BASE_URL, p.url)
  656. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  657. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  658. desc = desc.replace("href=\"/img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  659. desc = desc.replace("src=\"/img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  660. desc = htmlspecialchars(desc)
  661. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  662. date = email.utils.formatdate(date)
  663. update = time.mktime(time.strptime("%s 12" % p.get("update", p.date), "%Y-%m-%d %H"))
  664. update = email.utils.formatdate(update)
  665. items.append(_RSS_ITEM % (title, link, desc, date, update, link))
  666. items = "".join(items)
  667. style = "/css/rss.xsl"
  668. title = "xythobuz.de Blog"
  669. link = "%s" % BASE_URL
  670. feed = "%s/rss.xml" % BASE_URL
  671. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  672. date = email.utils.formatdate()
  673. rss = _RSS % (style, title, link, feed, desc, date, date, items)
  674. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  675. fp.write(rss)
  676. fp.close()
  677. # -----------------------------------------------------------------------------
  678. # compatibility redirect for old mobile pages
  679. # -----------------------------------------------------------------------------
  680. _COMPAT_MOB = """ case "%s":
  681. $loc = "%s/%s";
  682. break;
  683. """
  684. _COMPAT_404_MOB = """ default:
  685. $loc = "%s";
  686. break;
  687. """
  688. def hook_postconvert_mobilecompat():
  689. directory = os.path.join(output, "mobile")
  690. if not os.path.exists(directory):
  691. os.makedirs(directory)
  692. fp = codecs.open(os.path.join(directory, "index.php"), "w", "utf-8")
  693. fp.write("<?\n")
  694. fp.write("// Auto generated xyCMS compatibility mobile/index.php\n")
  695. fp.write("$loc = '" + get_conf("base_url") + "/index.de.html';\n")
  696. fp.write("if (isset($_GET['p'])) {\n")
  697. fp.write(" if (isset($_GET['lang'])) {\n")
  698. fp.write(" $_GET['p'] .= 'EN';\n")
  699. fp.write(" }\n")
  700. fp.write(" switch($_GET['p']) {\n")
  701. for p in pages:
  702. if p.get("compat", "") != "":
  703. tmp = p["compat"]
  704. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  705. tmp = tmp + "EN"
  706. fp.write(_COMPAT_MOB % (tmp, get_conf("base_url"), re.sub(".html", ".html", p.url)))
  707. fp.write("\n")
  708. fp.write(_COMPAT_404_MOB % "/404.mob.html")
  709. fp.write(" }\n")
  710. fp.write("}\n")
  711. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  712. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  713. fp.write(" header('Status: 301 Moved Permanently');\n")
  714. fp.write(" } else {\n")
  715. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  716. fp.write(" }\n")
  717. fp.write("}\n");
  718. fp.write("header('Location: '.$loc);\n")
  719. fp.write("?>")
  720. fp.close()
  721. # -----------------------------------------------------------------------------
  722. # displaying filesize for download links
  723. # -----------------------------------------------------------------------------
  724. def hook_postconvert_size():
  725. file_ext = '|'.join(['pdf', 'zip', 'rar', 'ods', 'odt', 'odp', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx', 'exe', 'brd', 'plist'])
  726. def matched_link(matchobj):
  727. try:
  728. path = matchobj.group(1)
  729. if path.startswith("http") or path.startswith("//") or path.startswith("ftp"):
  730. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  731. elif path.startswith("/"):
  732. path = path.strip("/")
  733. path = os.path.join("static/", path)
  734. size = os.path.getsize(path)
  735. if size >= (1024 * 1024):
  736. return "<a href=\"%s\">%s</a>&nbsp;(%.1f MiB)" % (matchobj.group(1), matchobj.group(3), size / (1024.0 * 1024.0))
  737. elif size >= 1024:
  738. return "<a href=\"%s\">%s</a>&nbsp;(%d KiB)" % (matchobj.group(1), matchobj.group(3), size // 1024)
  739. else:
  740. return "<a href=\"%s\">%s</a>&nbsp;(%d Byte)" % (matchobj.group(1), matchobj.group(3), size)
  741. except:
  742. print("Unable to estimate file size for %s" % matchobj.group(1))
  743. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  744. _re_url = r'<a href=\"([^\"]*?\.(%s))\">(.*?)<\/a>' % file_ext
  745. for p in pages:
  746. p.html = re.sub(_re_url, matched_link, p.html)