My static website generator using poole https://www.xythobuz.de
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

macros.py 34KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954
  1. # -*- coding: utf-8 -*-
  2. from __future__ import print_function
  3. import sys
  4. import re
  5. import itertools
  6. import email.utils
  7. import os.path
  8. import time
  9. import codecs
  10. from datetime import datetime
  11. import json
  12. def print_cnsl_error(s, url = None):
  13. sys.stderr.write("\n")
  14. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  15. sys.stderr.write("warning: !!!!!!! WARNING !!!!!\n")
  16. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  17. sys.stderr.write("warning: " + s + "\n")
  18. if url != None:
  19. sys.stderr.write("warning: URL: \"" + url + "\"\n")
  20. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  21. sys.stderr.write("warning: !!!!!!! WARNING !!!!!\n")
  22. sys.stderr.write("warning: !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
  23. sys.stderr.write("\n")
  24. # -----------------------------------------------------------------------------
  25. # Python 2/3 hacks
  26. # -----------------------------------------------------------------------------
  27. PY3 = sys.version_info[0] == 3
  28. if PY3:
  29. import html
  30. import urllib
  31. import urllib.request
  32. from urllib.error import HTTPError, URLError
  33. def urlparse_foo(link):
  34. return urllib.parse.parse_qs(urllib.parse.urlparse(link).query)['v'][0]
  35. else:
  36. import cgi
  37. import urllib
  38. import urlparse
  39. def urlparse_foo(link):
  40. return urlparse.parse_qs(urlparse.urlparse(link).query)['v'][0]
  41. # -----------------------------------------------------------------------------
  42. # config "system"
  43. # -----------------------------------------------------------------------------
  44. conf = {
  45. "default_lang": "en",
  46. "base_url": "https://www.xythobuz.de",
  47. "birthday": datetime(1994, 1, 22, 0, 0),
  48. "blog_years_back": 6,
  49. }
  50. def get_conf(name):
  51. return conf[name]
  52. # -----------------------------------------------------------------------------
  53. # local vars for compatibility
  54. # -----------------------------------------------------------------------------
  55. DEFAULT_LANG = get_conf("default_lang")
  56. BASE_URL = get_conf("base_url")
  57. # -----------------------------------------------------------------------------
  58. # birthday calculation
  59. # -----------------------------------------------------------------------------
  60. from datetime import timedelta
  61. from calendar import isleap
  62. size_of_day = 1. / 366.
  63. size_of_second = size_of_day / (24. * 60. * 60.)
  64. def date_as_float(dt):
  65. days_from_jan1 = dt - datetime(dt.year, 1, 1)
  66. if not isleap(dt.year) and days_from_jan1.days >= 31+28:
  67. days_from_jan1 += timedelta(1)
  68. return dt.year + days_from_jan1.days * size_of_day + days_from_jan1.seconds * size_of_second
  69. def difference_in_years(start_date, end_date):
  70. return int(date_as_float(end_date) - date_as_float(start_date))
  71. def own_age():
  72. age_dec = difference_in_years(get_conf("birthday"), datetime.now())
  73. age_hex = '0x%X' % age_dec
  74. return '<abbr title="' + str(age_dec) + '">' + str(age_hex) + '</abbr>'
  75. # -----------------------------------------------------------------------------
  76. # sub page helper macro
  77. # -----------------------------------------------------------------------------
  78. def backToParent():
  79. # check for special parent cases
  80. posts = []
  81. if page.get("show_in_quadcopters", "false") == "true":
  82. posts = [p for p in pages if p.url == "quadcopters.html"]
  83. # if not, check for actual parent
  84. if len(posts) == 0:
  85. url = page.get("parent", "") + ".html"
  86. posts = [p for p in pages if p.url == url]
  87. # print if any parent link found
  88. if len(posts) > 0:
  89. p = posts[0]
  90. print('<span class="listdesc">[...back to ' + p.title + ' overview](' + p.url + ')</span>')
  91. # -----------------------------------------------------------------------------
  92. # table helper macro
  93. # -----------------------------------------------------------------------------
  94. def tableHelper(style, header, content):
  95. print("<table>")
  96. if (header != None) and (len(header) == len(style)):
  97. print("<tr>")
  98. for h in header:
  99. print("<th>" + h + "</th>")
  100. print("</tr>")
  101. for ci in range(0, len(content)):
  102. if len(content[ci]) < len(style):
  103. # invalid call of table helper!
  104. print_cnsl_error("invalid table: {}[{}] != {}", len(content[ci]), ci, len(style))
  105. continue
  106. if len(content[ci]) > len(style):
  107. print("<tr " + content[ci][len(style)] + ">")
  108. else:
  109. print("<tr>")
  110. for i in range(0, len(style)):
  111. s = style[i]
  112. td_style = ""
  113. if "monospaced" in s:
  114. td_style += " font-family: monospace;"
  115. if "align-last-right" in s:
  116. if ci == (len(content) - 1):
  117. td_style += " text-align: right;"
  118. else:
  119. if "align-center" in s:
  120. td_style += " text-align: center;"
  121. elif "align-right" in s:
  122. td_style += " text-align: right;"
  123. elif "align-center" in s:
  124. td_style += " text-align: center;"
  125. td_args = ""
  126. if td_style != "":
  127. td_args = " style=\"" + td_style + "\""
  128. print("<td" + td_args + ">")
  129. if isinstance(content[ci][i], tuple):
  130. text, link = content[ci][i]
  131. print("<a href=\"" + link + "\">" + text + "</a>")
  132. else:
  133. text = content[ci][i]
  134. print(text)
  135. print("</td>")
  136. print("</tr>")
  137. print("</table>")
  138. # -----------------------------------------------------------------------------
  139. # menu helper macro
  140. # -----------------------------------------------------------------------------
  141. def githubCommitBadge(p, showInline = False):
  142. ret = ""
  143. if p.get("github", "") != "":
  144. link = p.get("git", p.github)
  145. linkParts = p.github.split("/")
  146. if len(linkParts) >= 5:
  147. ret += "<a href=\"" + link + "\"><img "
  148. if showInline:
  149. ret += "style =\"vertical-align: middle; padding-bottom: 0.25em;\" "
  150. ret += "src=\"https://img.shields.io/github/last-commit/"
  151. ret += linkParts[3] + "/" + linkParts[4]
  152. ret += ".svg?logo=git&style=flat\" /></a>"
  153. return ret
  154. def printMenuItem(p, yearsAsHeading = False, showDateSpan = False, showOnlyStartDate = False, nicelyFormatFullDate = False, lastyear = "0", lang = "", showLastCommit = True, hide_description = False, updates_as_heading = False, desc_has_collapse = False):
  155. title = p.title
  156. if lang != "":
  157. if p.get("title_" + lang, "") != "":
  158. title = p.get("title_" + lang, "")
  159. if title == "Blog":
  160. title = p.post
  161. if updates_as_heading:
  162. year = p.get("update", p.get("date", ""))[0:4]
  163. else:
  164. year = p.get("date", "")[0:4]
  165. if year != lastyear:
  166. lastyear = year
  167. if yearsAsHeading:
  168. print("<h4>" + str(year) + "</h4>")
  169. dateto = ""
  170. if p.get("date", "" != ""):
  171. year = p.get("date", "")[0:4]
  172. if showOnlyStartDate:
  173. dateto = " (%s)" % (year)
  174. if p.get("update", "") != "" and p.get("update", "")[0:4] != year:
  175. if showDateSpan:
  176. dateto = " (%s - %s)" % (year, p.get("update", "")[0:4])
  177. if nicelyFormatFullDate:
  178. dateto = " - " + datetime.strptime(p.get("update", p.date), "%Y-%m-%d").strftime("%B %d, %Y")
  179. print("<li>")
  180. print("<a href=\"" + p.url + "\"><b>" + title + "</b></a>" + dateto)
  181. if hide_description == False:
  182. if p.get("description", "") != "":
  183. description = p.get("description", "")
  184. if lang != "":
  185. description = p.get("description_" + lang, description)
  186. if desc_has_collapse:
  187. print("<br><span class=\"listdesc collapse_menu\">" + description + "</span>")
  188. else:
  189. print("<br><span class=\"listdesc\">" + description + "</span>")
  190. if showLastCommit:
  191. link = githubCommitBadge(p)
  192. if len(link) > 0:
  193. print("<br>" + link)
  194. print("</li>")
  195. return lastyear
  196. def printRecentMenu(count = 5):
  197. posts = [p for p in pages if "date" in p and p.lang == "en"]
  198. posts.sort(key=lambda p: p.get("update", p.get("date")), reverse=True)
  199. if count > 0:
  200. posts = posts[0:count]
  201. print("<ul id='menulist'>")
  202. lastyear = "0"
  203. for p in posts:
  204. lastyear = printMenuItem(p, count == 0, False, False, True, lastyear, "", False, False, True)
  205. print("</ul>")
  206. def printBlogMenu(year_min=None, year_max=None):
  207. posts = [p for p in pages if "post" in p and p.lang == "en"]
  208. posts.sort(key=lambda p: p.get("date", "9999-01-01"), reverse=True)
  209. if year_min != None:
  210. posts = [p for p in posts if int(p.get("date", "9999-01-01")[0:4]) >= int(year_min)]
  211. if year_max != None:
  212. posts = [p for p in posts if int(p.get("date", "9999-01-01")[0:4]) <= int(year_max)]
  213. print("<ul id='menulist'>")
  214. lastyear = "0"
  215. for p in posts:
  216. lastyear = printMenuItem(p, True, False, False, True, lastyear)
  217. print("</ul>")
  218. def printProjectsMenu():
  219. # prints all pages with parent 'projects' or 'stuff'.
  220. # first the ones without date, sorted by position.
  221. # this first section includes sub-headings for children
  222. # in a hidden div, expanding when clicking the description.
  223. # then afterwards those with date, split by year.
  224. # also supports blog posts with parent.
  225. enpages = [p for p in pages if p.lang == "en"]
  226. # select pages without date
  227. dpages = [p for p in enpages if p.get("date", "") == ""]
  228. # only those that have a parent in ['projects', 'stuff']
  229. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  230. # sort by position
  231. mpages.sort(key=lambda p: [int(p.get("position", "999"))])
  232. print("<ul id='menulist'>")
  233. # print all pages
  234. for p in mpages:
  235. # fetch subpages for these top-level items
  236. subpages = [sub for sub in enpages if sub.get("parent", "none") == p.get("child-id", "unknown")]
  237. order = p.get("sort-order", "date")
  238. if order == "position":
  239. subpages.sort(key=lambda p: p["position"])
  240. else:
  241. subpages.sort(key=lambda p: p["date"], reverse = True)
  242. printMenuItem(p, False, False, False, False, "0", "", True, False, False, len(subpages) > 0)
  243. # print subpages
  244. if len(subpages) > 0:
  245. print("<div class='collapsecontent_menu'>")
  246. print("<ul>")
  247. for sp in subpages:
  248. printMenuItem(sp, False, True, True, False, "0", "", False, True)
  249. print("</ul>")
  250. print("</div>")
  251. # slect pages with a date
  252. dpages = [p for p in enpages if p.get("date", "") != ""]
  253. # only those that have a parent in ['projects', 'stuff']
  254. mpages = [p for p in dpages if any(x in p.get("parent", "") for x in [ 'projects', 'stuff' ])]
  255. # sort by date
  256. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  257. # print all pages
  258. lastyear = "0"
  259. for p in mpages:
  260. # fetch subpages for these top-level items
  261. subpages = [sub for sub in enpages if sub.get("parent", "none") == p.get("child-id", "unknown")]
  262. order = p.get("sort-order", "date")
  263. if order == "position":
  264. subpages.sort(key=lambda p: p["position"])
  265. else:
  266. subpages.sort(key=lambda p: p["date"], reverse = True)
  267. lastyear = printMenuItem(p, True, True, False, False, lastyear, "", True, False, False, len(subpages) > 0)
  268. # print subpages
  269. if len(subpages) > 0:
  270. print("<div class='collapsecontent_menu'>")
  271. print("<ul>")
  272. for sp in subpages:
  273. printMenuItem(sp, False, True, True, False, "0", "", False, True)
  274. print("</ul>")
  275. print("</div>")
  276. print("</ul>")
  277. def printMenuGeneric(mpages = None, sortKey = None, sortReverse = True):
  278. if mpages == None:
  279. mpages = [p for p in pages if p.get("parent", "__none__") == page["child-id"] and p.lang == "en"]
  280. if sortKey != None:
  281. mpages.sort(key = sortKey, reverse = sortReverse)
  282. if len(mpages) > 0:
  283. print("<ul id='menulist'>")
  284. for p in mpages:
  285. printMenuItem(p, False, True, True)
  286. print("</ul>")
  287. def printMenuDate(mpages = None, sortReverse = True):
  288. sortKey = lambda p: p["date"]
  289. printMenuGeneric(mpages, sortKey, sortReverse)
  290. def printMenuPositional(mpages = None):
  291. printMenuGeneric(mpages, lambda p: int(p["position"]), False)
  292. def printMenu(mpages = None):
  293. order = page.get("sort-order", "date")
  294. if order == "position":
  295. printMenuPositional(mpages)
  296. else:
  297. printMenuDate(mpages)
  298. def printRobotMenuEnglish():
  299. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "en"]
  300. mpages.sort(key=lambda p: int(p["position"]))
  301. print("<ul id='menulist'>")
  302. for p in mpages:
  303. printMenuItem(p)
  304. print("</ul>")
  305. def printRobotMenuDeutsch():
  306. mpages = [p for p in pages if p.get("parent", "") == "xyrobot" and p.lang == "de"]
  307. mpages.sort(key=lambda p: int(p["position"]))
  308. print("<ul id='menulist'>")
  309. for p in mpages:
  310. printMenuItem(p, False, False, False, False, "0", "de")
  311. print("</ul>")
  312. def printSteamMenuEnglish():
  313. mpages = [p for p in pages if p.get("parent", "") == "steam" and p.lang == "en"]
  314. mpages.sort(key=lambda p: [p.get("date", "9999-01-01")], reverse = True)
  315. print("<ul id='menulist'>")
  316. for p in mpages:
  317. printMenuItem(p, False, False, False, True)
  318. print("</ul>")
  319. def printSteamMenuDeutsch():
  320. # TODO show german pages, or english pages when german not available
  321. printSteamMenuEnglish()
  322. # -----------------------------------------------------------------------------
  323. # lightgallery helper macro
  324. # -----------------------------------------------------------------------------
  325. # call this macro like this:
  326. # lightgallery([
  327. # [ "image-link", "description" ],
  328. # [ "image-link", "thumbnail-link", "description" ],
  329. # [ "youtube-link", "thumbnail-link", "description" ],
  330. # [ "video-link", "mime", "thumbnail-link", "image-link", "description" ],
  331. # [ "video-link", "mime", "", "", "description" ],
  332. # ])
  333. # it will also auto-generate thumbnails and resize and strip EXIF from images
  334. # using the included web-image-resize script.
  335. # and it can generate video thumbnails and posters with the video-thumb script.
  336. def lightgallery_check_thumbnail(link, thumb):
  337. # only check local image links
  338. if not link.startswith('img/'):
  339. return
  340. # generate thumbnail filename web-image-resize will create
  341. x = link.rfind('.')
  342. img = link[:x] + '_small' + link[x:]
  343. # only run when desired thumb path matches calculated ones
  344. if thumb != img:
  345. return
  346. # generate fs path to images
  347. path = os.path.join(os.getcwd(), 'static', link)
  348. img = os.path.join(os.getcwd(), 'static', thumb)
  349. # no need to generate thumb again
  350. if os.path.exists(img):
  351. return
  352. # run web-image-resize to generate thumbnail
  353. script = os.path.join(os.getcwd(), 'web-image-resize')
  354. os.system(script + ' ' + path)
  355. def lightgallery_check_thumbnail_video(link, thumb, poster):
  356. # only check local image links
  357. if not link.startswith('img/'):
  358. return
  359. # generate thumbnail filenames video-thumb will create
  360. x = link.rfind('.')
  361. thumb_l = link[:x] + '_thumb.png'
  362. poster_l = link[:x] + '_poster.png'
  363. # only run when desired thumb path matches calculated ones
  364. if (thumb_l != thumb) or (poster_l != poster):
  365. return
  366. # generate fs path to images
  367. path = os.path.join(os.getcwd(), 'static', link)
  368. thumb_p = os.path.join(os.getcwd(), 'static', thumb)
  369. poster_p = os.path.join(os.getcwd(), 'static', poster)
  370. # no need to generate thumb again
  371. if os.path.exists(thumb_p) or os.path.exists(poster_p):
  372. return
  373. # run video-thumb to generate thumbnail
  374. script = os.path.join(os.getcwd(), 'video-thumb')
  375. os.system(script + ' ' + path)
  376. def lightgallery(links):
  377. global v_ii
  378. try:
  379. v_ii += 1
  380. except NameError:
  381. v_ii = 0
  382. videos = [l for l in links if len(l) == 5]
  383. v_i = -1
  384. for v in videos:
  385. link, mime, thumb, poster, alt = v
  386. v_i += 1
  387. print('<div style="display:none;" id="video' + str(v_i) + '_' + str(v_ii) + '">')
  388. print('<video class="lg-video-object lg-html5" controls preload="none">')
  389. print('<source src="' + link + '" type="' + mime + '">')
  390. print('<a href="' + link + '">' + alt + '</a>')
  391. print('</video>')
  392. print('</div>')
  393. print('<div class="lightgallery">')
  394. v_i = -1
  395. for l in links:
  396. if (len(l) == 3) or (len(l) == 2):
  397. link = img = alt = ""
  398. style = img2 = ""
  399. if len(l) == 3:
  400. link, img, alt = l
  401. else:
  402. link, alt = l
  403. if "youtube.com" in link:
  404. img = "https://img.youtube.com/vi/"
  405. img += urlparse_foo(link)
  406. img += "/0.jpg" # full size preview
  407. #img += "/default.jpg" # default thumbnail
  408. style = ' style="width:300px;"'
  409. img2 = '<img src="lg/video-play.png" class="picthumb">'
  410. elif link.startswith('img/'):
  411. x = link.rfind('.')
  412. img = link[:x] + '_small' + link[x:]
  413. else:
  414. img = link
  415. style = ' style="max-width:300px;max-height:300px;"'
  416. lightgallery_check_thumbnail(link, img)
  417. print('<div class="border" style="position:relative;" data-src="' + link + '"><a href="' + link + '"><img class="pic" src="' + img + '" alt="' + alt + '"' + style + '>' + img2 + '</a></div>')
  418. elif len(l) == 5:
  419. v_i += 1
  420. link, mime, thumb, poster, alt = videos[v_i]
  421. if len(thumb) <= 0:
  422. x = link.rfind('.')
  423. thumb = link[:x] + '_thumb.png'
  424. if len(poster) <= 0:
  425. x = link.rfind('.')
  426. poster = link[:x] + '_poster.png'
  427. lightgallery_check_thumbnail_video(link, thumb, poster)
  428. print('<div class="border" data-poster="' + poster + '" data-sub-html="' + alt + '" data-html="#video' + str(v_i) + '_' + str(v_ii) + '"><a href="' + link + '"><img class="pic" src="' + thumb + '"></a></div>')
  429. else:
  430. raise NameError('Invalid number of arguments for lightgallery')
  431. print('</div>')
  432. # -----------------------------------------------------------------------------
  433. # github helper macros
  434. # -----------------------------------------------------------------------------
  435. def http_request(url, timeout = 5):
  436. if PY3:
  437. response = urllib.request.urlopen(url, timeout = timeout)
  438. else:
  439. response = urllib.urlopen(url)
  440. if response.getcode() != 200:
  441. raise RuntimeError("invalid response code: " + str(response.getcode()))
  442. data = response.read().decode("utf-8")
  443. return data
  444. def include_url(url, fallback = None, data_slice = None, timeout = 2):
  445. sys.stderr.write('sub : fetching page "%s"\n' % url)
  446. if fallback == None:
  447. print_cnsl_error("include_url() without fallback option", url)
  448. timeout = timeout * 3
  449. try:
  450. data = http_request(url, timeout)
  451. except Exception as e:
  452. if fallback != None:
  453. sys.stderr.write('sub : fetching fallback page "%s"\n' % fallback)
  454. try:
  455. data = http_request(fallback, timeout * 3)
  456. except Exception as e:
  457. print_cnsl_error(str(e), fallback)
  458. return
  459. else:
  460. print_cnsl_error(str(e), url)
  461. return
  462. if isinstance(data_slice, tuple):
  463. start, end = data_slice
  464. if end < start:
  465. print_cnsl_error("invalid slice: end={} < start={}", end, start)
  466. else:
  467. lines = data.split("\n")
  468. slc = lines[max(0, start - 1) : end]
  469. data = "\n".join(slc)
  470. #sys.stderr.write("\n")
  471. #sys.stderr.write("Selected Slice:\n")
  472. #sys.stderr.write(str(len(slc)))
  473. #sys.stderr.write("\n")
  474. #for l in slc:
  475. # sys.stderr.write(l + "\n")
  476. #sys.stderr.write("\n\n")
  477. elif isinstance(data_slice, list):
  478. lines = data.split("\n")
  479. data = []
  480. for ds in data_slice:
  481. start, end = ds
  482. if end < start:
  483. print_cnsl_error("invalid slice: end={} < start={}", end, start)
  484. else:
  485. slc = lines[max(0, start - 1) : end]
  486. data.append("\n".join(slc))
  487. data = "\n\n// ...\n\n".join(data)
  488. if PY3:
  489. encoded = html.escape(data)
  490. else:
  491. encoded = cgi.escape(data)
  492. print(encoded, end="")
  493. def include_sourcecode_slice(sh_type, data_slice, filename, url_pre, fallback_pre = None, timeout = 2):
  494. url = url_pre + filename
  495. fallback = (fallback_pre + filename) if fallback_pre != None else None
  496. off = data_slice[0] if data_slice != None else 1
  497. print('<pre class="sh_' + sh_type + '" offset="' + str(off))
  498. if isinstance(data_slice, list):
  499. print(' skip_line_no')
  500. print('">')
  501. include_url(url, fallback, data_slice, timeout)
  502. print('</pre>')
  503. print('<p class="sh_link_upstream">Link to the complete file "<a href="' + url + '">' + url.split("/")[-1] + '</a>"')
  504. if fallback != None:
  505. print(' (<a href="' + fallback + '">alternative</a>)')
  506. print('</p>')
  507. def restRequest(url):
  508. sys.stderr.write('sub : fetching REST "%s"\n' % url)
  509. data = json.loads(http_request(url))
  510. return data
  511. def restReleases(user, repo):
  512. s = "https://api.github.com/repos/"
  513. s += user
  514. s += "/"
  515. s += repo
  516. s += "/releases"
  517. return restRequest(s)
  518. def printLatestRelease(user, repo):
  519. repo_url = "https://github.com/" + user + "/" + repo
  520. print("<div class=\"releasecard\">")
  521. print("Release builds for " + repo + " are <a href=\"" + repo_url + "/releases\">available on GitHub</a>.<br>\n")
  522. releases = restReleases(user, repo)
  523. if len(releases) <= 0:
  524. print("No release has been published on GitHub yet.")
  525. print("</div>")
  526. return
  527. releases.sort(key=lambda x: x["published_at"], reverse=True)
  528. r = releases[0]
  529. release_url = r["html_url"]
  530. print("Latest release of <a href=\"" + repo_url + "\">" + repo + "</a>, at the time of this writing: <a href=\"" + release_url + "\">" + r["name"] + "</a> (" + datetime.strptime(r["published_at"], "%Y-%m-%dT%H:%M:%SZ").strftime("%Y-%m-%d %H:%M:%S") + ")\n")
  531. if len(r["assets"]) <= 0:
  532. print("<br>No release assets have been published on GitHub for that.")
  533. print("</div>")
  534. return
  535. print("<ul>")
  536. print("Release Assets:")
  537. for a in r["assets"]:
  538. size = int(a["size"])
  539. ss = " "
  540. if size >= (1024 * 1024):
  541. ss += "(%.1f MiB)" % (size / (1024.0 * 1024.0))
  542. elif size >= 1024:
  543. ss += "(%d KiB)" % (size // 1024)
  544. else:
  545. ss += "(%d Byte)" % (size)
  546. print("<li><a href=\"" + a["browser_download_url"] + "\">" + a["name"] + "</a>" + ss)
  547. print("</ul></div>")
  548. # -----------------------------------------------------------------------------
  549. # preconvert hooks
  550. # -----------------------------------------------------------------------------
  551. # -----------------------------------------------------------------------------
  552. # multi language support
  553. # -----------------------------------------------------------------------------
  554. def hook_preconvert_anotherlang():
  555. MKD_PATT = r'\.(?:md|mkd|mdown|markdown)$'
  556. _re_lang = re.compile(r'^[\s+]?lang[\s+]?[:=]((?:.|\n )*)', re.MULTILINE)
  557. vpages = [] # Set of all virtual pages
  558. for p in pages:
  559. current_lang = DEFAULT_LANG # Default language
  560. langs = [] # List of languages for the current page
  561. page_vpages = {} # Set of virtual pages for the current page
  562. text_lang = re.split(_re_lang, p.source)
  563. text_grouped = dict(zip([current_lang,] + \
  564. [lang.strip() for lang in text_lang[1::2]], \
  565. text_lang[::2]))
  566. for lang, text in (iter(text_grouped.items()) if PY3 else text_grouped.iteritems()):
  567. spath = p.fname.split(os.path.sep)
  568. langs.append(lang)
  569. if lang == "en":
  570. filename = re.sub(MKD_PATT, r"%s\g<0>" % "", p.fname).split(os.path.sep)[-1]
  571. else:
  572. filename = re.sub(MKD_PATT, r".%s\g<0>" % lang, p.fname).split(os.path.sep)[-1]
  573. vp = Page(filename, virtual=text)
  574. # Copy real page attributes to the virtual page
  575. for attr in p:
  576. if not ((attr in vp) if PY3 else vp.has_key(attr)):
  577. vp[attr] = p[attr]
  578. # Define a title in the proper language
  579. vp["title"] = p["title_%s" % lang] \
  580. if ((("title_%s" % lang) in p) if PY3 else p.has_key("title_%s" % lang)) \
  581. else p["title"]
  582. # Keep track of the current lang of the virtual page
  583. vp["lang"] = lang
  584. page_vpages[lang] = vp
  585. # Each virtual page has to know about its sister vpages
  586. for lang, vpage in (iter(page_vpages.items()) if PY3 else page_vpages.iteritems()):
  587. vpage["lang_links"] = dict([(l, v["url"]) for l, v in (iter(page_vpages.items()) if PY3 else page_vpages.iteritems())])
  588. vpage["other_lang"] = langs # set other langs and link
  589. vpages += page_vpages.values()
  590. pages[:] = vpages
  591. # -----------------------------------------------------------------------------
  592. # compatibility redirect for old website URLs
  593. # -----------------------------------------------------------------------------
  594. _COMPAT = """ case "%s":
  595. $loc = "%s/%s";
  596. break;
  597. """
  598. _COMPAT_404 = """ default:
  599. $loc = "%s";
  600. break;
  601. """
  602. def hook_preconvert_compat():
  603. fp = open(os.path.join(options.project, "output", "index.php"), 'w')
  604. fp.write("<?\n")
  605. fp.write("// Auto generated xyCMS compatibility index.php\n")
  606. fp.write("$loc = '" + get_conf("base_url") + "/index.de.html';\n")
  607. fp.write("if (isset($_GET['p'])) {\n")
  608. fp.write(" if (isset($_GET['lang'])) {\n")
  609. fp.write(" $_GET['p'] .= 'EN';\n")
  610. fp.write(" }\n")
  611. fp.write(" switch($_GET['p']) {\n")
  612. for p in pages:
  613. if p.get("compat", "") != "":
  614. tmp = p["compat"]
  615. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  616. tmp = tmp + "EN"
  617. fp.write(_COMPAT % (tmp, get_conf("base_url"), p.url))
  618. fp.write("\n")
  619. fp.write(_COMPAT_404 % "/404.html")
  620. fp.write(" }\n")
  621. fp.write("}\n")
  622. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  623. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  624. fp.write(" header('Status: 301 Moved Permanently');\n")
  625. fp.write(" } else {\n")
  626. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  627. fp.write(" }\n")
  628. fp.write("}\n");
  629. fp.write("header('Location: '.$loc);\n")
  630. fp.write("?>")
  631. fp.close()
  632. # -----------------------------------------------------------------------------
  633. # sitemap generation
  634. # -----------------------------------------------------------------------------
  635. _SITEMAP = """<?xml version="1.0" encoding="UTF-8"?>
  636. <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
  637. %s
  638. </urlset>
  639. """
  640. _SITEMAP_URL = """
  641. <url>
  642. <loc>%s/%s</loc>
  643. <lastmod>%s</lastmod>
  644. <changefreq>%s</changefreq>
  645. <priority>%s</priority>
  646. </url>
  647. """
  648. def hook_preconvert_sitemap():
  649. date = datetime.strftime(datetime.now(), "%Y-%m-%d")
  650. urls = []
  651. for p in pages:
  652. urls.append(_SITEMAP_URL % (BASE_URL, p.url, date, p.get("changefreq", "monthly"), p.get("priority", "0.5")))
  653. fname = os.path.join(options.project, "output", "sitemap.xml")
  654. fp = open(fname, 'w')
  655. fp.write(_SITEMAP % "".join(urls))
  656. fp.close()
  657. # -----------------------------------------------------------------------------
  658. # postconvert hooks
  659. # -----------------------------------------------------------------------------
  660. # -----------------------------------------------------------------------------
  661. # rss feed generation
  662. # -----------------------------------------------------------------------------
  663. _RSS = """<?xml version="1.0" encoding="UTF-8"?>
  664. <?xml-stylesheet href="%s" type="text/xsl"?>
  665. <rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
  666. <channel>
  667. <title>%s</title>
  668. <link>%s</link>
  669. <atom:link href="%s" rel="self" type="application/rss+xml" />
  670. <description>%s</description>
  671. <language>en-us</language>
  672. <pubDate>%s</pubDate>
  673. <lastBuildDate>%s</lastBuildDate>
  674. <docs>http://blogs.law.harvard.edu/tech/rss</docs>
  675. <generator>Poole</generator>
  676. <ttl>720</ttl>
  677. %s
  678. </channel>
  679. </rss>
  680. """
  681. _RSS_ITEM = """
  682. <item>
  683. <title>%s</title>
  684. <link>%s</link>
  685. <description>%s</description>
  686. <pubDate>%s</pubDate>
  687. <atom:updated>%s</atom:updated>
  688. <guid>%s</guid>
  689. </item>
  690. """
  691. def hook_postconvert_rss():
  692. items = []
  693. # all pages with "date" get put into feed
  694. posts = [p for p in pages if "date" in p]
  695. # sort by update if available, date else
  696. posts.sort(key=lambda p: p.get("update", p.date), reverse=True)
  697. # only put 20 most recent items in feed
  698. posts = posts[:20]
  699. for p in posts:
  700. title = p.title
  701. if "post" in p:
  702. title = p.post
  703. link = "%s/%s" % (BASE_URL, p.url)
  704. desc = p.html.replace("href=\"img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  705. desc = desc.replace("src=\"img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  706. desc = desc.replace("href=\"/img", "%s%s%s" % ("href=\"", BASE_URL, "/img"))
  707. desc = desc.replace("src=\"/img", "%s%s%s" % ("src=\"", BASE_URL, "/img"))
  708. desc = htmlspecialchars(desc)
  709. date = time.mktime(time.strptime("%s 12" % p.date, "%Y-%m-%d %H"))
  710. date = email.utils.formatdate(date)
  711. update = time.mktime(time.strptime("%s 12" % p.get("update", p.date), "%Y-%m-%d %H"))
  712. update = email.utils.formatdate(update)
  713. items.append(_RSS_ITEM % (title, link, desc, date, update, link))
  714. items = "".join(items)
  715. style = "/css/rss.xsl"
  716. title = "xythobuz.de Blog"
  717. link = "%s" % BASE_URL
  718. feed = "%s/rss.xml" % BASE_URL
  719. desc = htmlspecialchars("xythobuz Electronics & Software Projects")
  720. date = email.utils.formatdate()
  721. rss = _RSS % (style, title, link, feed, desc, date, date, items)
  722. fp = codecs.open(os.path.join(output, "rss.xml"), "w", "utf-8")
  723. fp.write(rss)
  724. fp.close()
  725. # -----------------------------------------------------------------------------
  726. # compatibility redirect for old mobile pages
  727. # -----------------------------------------------------------------------------
  728. _COMPAT_MOB = """ case "%s":
  729. $loc = "%s/%s";
  730. break;
  731. """
  732. _COMPAT_404_MOB = """ default:
  733. $loc = "%s";
  734. break;
  735. """
  736. def hook_postconvert_mobilecompat():
  737. directory = os.path.join(output, "mobile")
  738. if not os.path.exists(directory):
  739. os.makedirs(directory)
  740. fp = codecs.open(os.path.join(directory, "index.php"), "w", "utf-8")
  741. fp.write("<?\n")
  742. fp.write("// Auto generated xyCMS compatibility mobile/index.php\n")
  743. fp.write("$loc = '" + get_conf("base_url") + "/index.de.html';\n")
  744. fp.write("if (isset($_GET['p'])) {\n")
  745. fp.write(" if (isset($_GET['lang'])) {\n")
  746. fp.write(" $_GET['p'] .= 'EN';\n")
  747. fp.write(" }\n")
  748. fp.write(" switch($_GET['p']) {\n")
  749. for p in pages:
  750. if p.get("compat", "") != "":
  751. tmp = p["compat"]
  752. if p.get("lang", DEFAULT_LANG) == DEFAULT_LANG:
  753. tmp = tmp + "EN"
  754. fp.write(_COMPAT_MOB % (tmp, get_conf("base_url"), re.sub(".html", ".html", p.url)))
  755. fp.write("\n")
  756. fp.write(_COMPAT_404_MOB % "/404.mob.html")
  757. fp.write(" }\n")
  758. fp.write("}\n")
  759. fp.write("if ($_SERVER['SERVER_PROTOCOL'] == 'HTTP/1.1') {\n")
  760. fp.write(" if (php_sapi_name() == 'cgi') {\n")
  761. fp.write(" header('Status: 301 Moved Permanently');\n")
  762. fp.write(" } else {\n")
  763. fp.write(" header('HTTP/1.1 301 Moved Permanently');\n")
  764. fp.write(" }\n")
  765. fp.write("}\n");
  766. fp.write("header('Location: '.$loc);\n")
  767. fp.write("?>")
  768. fp.close()
  769. # -----------------------------------------------------------------------------
  770. # displaying filesize for download links
  771. # -----------------------------------------------------------------------------
  772. def hook_postconvert_size():
  773. file_ext = '|'.join(['pdf', 'zip', 'rar', 'ods', 'odt', 'odp', 'doc', 'xls', 'ppt', 'docx', 'xlsx', 'pptx', 'exe', 'brd', 'plist'])
  774. def matched_link(matchobj):
  775. try:
  776. path = matchobj.group(1)
  777. if path.startswith("http") or path.startswith("//") or path.startswith("ftp"):
  778. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  779. elif path.startswith("/"):
  780. path = path.strip("/")
  781. path = os.path.join("static/", path)
  782. size = os.path.getsize(path)
  783. if size >= (1024 * 1024):
  784. return "<a href=\"%s\">%s</a>&nbsp;(%.1f MiB)" % (matchobj.group(1), matchobj.group(3), size / (1024.0 * 1024.0))
  785. elif size >= 1024:
  786. return "<a href=\"%s\">%s</a>&nbsp;(%d KiB)" % (matchobj.group(1), matchobj.group(3), size // 1024)
  787. else:
  788. return "<a href=\"%s\">%s</a>&nbsp;(%d Byte)" % (matchobj.group(1), matchobj.group(3), size)
  789. except:
  790. print("Unable to estimate file size for %s" % matchobj.group(1))
  791. return '<a href=\"%s\">%s</a>' % (matchobj.group(1), matchobj.group(3))
  792. _re_url = r'<a href=\"([^\"]*?\.(%s))\">(.*?)<\/a>' % file_ext
  793. for p in pages:
  794. p.html = re.sub(_re_url, matched_link, p.html)