tinycm view plugins/export/export.cgi @ rev 54

Improve the export plugin
author Christophe Lincoln <pankso@slitaz.org>
date Sat Jan 25 13:00:48 2014 +0100 (2014-01-25)
parents c4a472d0a45e
children 91c28ed67409
line source
1 #!/bin/sh
2 #
3 # TinyCM Plugin - Export to static content
4 #
5 . /usr/lib/slitaz/httphelper
7 #
8 # NOTE: Exporting wiki to HTML and making all urls work is a bit tricky.
9 # Actually it doesn't work as expected. The goal is to have a SliTaz codex
10 # online that can be included on the ISO, so we could have an export
11 # including a small CGI script to simply display wiki pages via HTTPd
12 # knowing that with HTML we must also deal with ../../
13 #
15 if [ "$(GET export)" ]; then
16 d="Export"
17 date=$(date "+%Y%m%d")
18 tmpdir="$tmp/export/$$/wiki-$date"
19 header
20 html_header
21 user_box
22 cat << EOT
23 <h2>Export</h2>
24 <p>
25 $(gettext "Create a tarball off your wiki and plugins files. EXPERIMENTAL:
26 Export wiki documents to HTML.")
27 </p>
28 <form method="get" action="$WEB_URL">
29 <select name="export">
30 EOT
31 for c in $(ls -1 content/)
32 do
33 echo "<option value=\"${c}\">$c</option>"
34 done
35 echo "<option value='wikitohtml'>wiki to HTML</option>"
36 cat << EOT
37 </select>
38 <input type="submit" value="$(gettext "Export")" />
39 </form>
40 EOT
41 # HTML fixes EPERIMENTAL Functions
42 css_path() {
43 # Sed CSS style path in all documents
44 sed -i s'/style.css/..\/style.css/' */*.html
45 sed -i s'/style.css/..\/..\/style.css/' */*/*.html
46 sed -i s'/style.css/..\/..\/..\/style.css/' */*/*/*.html
47 }
48 gen_tarball() {
49 gettext "Creating tarball"; echo -n ": "
50 cd $tmpdir && mkdir $tiny/$cache/export
51 # Clean cache
52 find $tiny/$cache/export -mtime +1 | xargs rm -rf
53 tar czf $tiny/$cache/export/$export-$date.tar.gz $export
54 cd $tiny/$cache/export && du -sh $export-$date.tar.gz
55 }
56 dl_link() {
57 gettext "Download"; echo \
58 ": <a href='cache/export/$export-$date.tar.gz'>$export-$date.tar.gz</a>"
59 }
60 # Export requested content
61 case " $(GET export) " in
62 *\ wikitohtml\ *)
63 export="wiki"
64 echo '<pre>'
65 gettext "Exporting:"; echo " $export"
66 mkdir -p $tmpdir/$export
67 gettext "Copying CSS style and images..."; echo
68 cp -a style.css images $tmpdir/$export
69 cd $content/$export
70 for d in $(find . -type f | sed s'!./!!')
71 do
72 d=${d%.txt}
73 [ "$d" == "en/help" ] && continue
74 gettext "Exporting: "; echo "$d.txt"
75 mkdir -p $tmpdir/$export/$(dirname $d)
76 f=$tmpdir/$export/$d.html
77 html_header > ${f}
78 sed -i '/functions.js/'d ${f}
79 sed -i '/favicon.ico/'d ${f}
80 sed -i s'/index.cgi/index.html/'/ ${f}
81 doc="[0-9a-zA-Z\.\#/~\_%=\?\&,\+\:@;!\(\)\*\$'\-]*"
82 #
83 # The sed from wiki urls to html bug if there is 2 links
84 # on same line: [test|Test] tralala [en/index|English]
85 #
86 cat $d.txt | wiki_parser | sed \
87 s"#href='\([^]]*\)?d=\($doc\)'>#href='\2.html'>#"g >> ${f}
88 html_footer >> ${f}
89 done
90 cd $tmpdir/$export
91 css_path
92 gen_tarball
93 rm -rf $tmp/export/$$
94 echo '</pre>'
95 dl_link ;;
96 *\ export\ )
97 html_footer && exit 0 ;;
98 *)
99 export="$(GET export)"
100 tmpdir="content"
101 echo '<pre>'
102 gettext "Exporting:"; echo " $export"
103 gen_tarball
104 echo '</pre>'
105 dl_link ;;
106 esac
108 html_footer && exit 0
109 fi