tinycm view plugins/export/export.cgi @ rev 121

Remove ashism ==
author Pascal Bellard <pascal.bellard@slitaz.org>
date Tue Feb 26 12:26:25 2019 +0100 (2019-02-26)
parents 854c4a8f0972
children
line source
1 #!/bin/sh
2 #
3 # TinyCM Plugin - Export to static content
4 #
5 . /usr/lib/slitaz/httphelper
7 #
8 # NOTE: Exporting wiki to HTML and making all urls work is a bit tricky.
9 # Actually it doesn't work as expected. The goal is to have a SliTaz codex
10 # online that can be included on the ISO, so we could have an export
11 # including a small CGI script to simply display wiki pages via HTTPd
12 # knowing that with HTML we must also deal with ../../
13 #
15 if [ "$(GET export)" ]; then
16 d="Export"
17 date=$(date "+%Y%m%d")
18 tmpdir="$tmp/export/$$/wiki-$date"
19 header
20 html_header
21 user_box
22 if ! check_auth && ! admin_user; then
23 gettext "You must be admin to export content."
24 html_footer && exit 0
25 fi
26 cat << EOT
27 <h2>Export</h2>
28 <p>
29 $(gettext "Create a tarball of your wiki and plugins files. EXPERIMENTAL:
30 Export wiki documents to HTML.")
31 </p>
32 <form method="get" action="$WEB_URL">
33 <select name="export">
34 EOT
35 for c in $(ls -1 content/)
36 do
37 echo "<option value=\"${c}\">$c</option>"
38 done
39 echo "<option value='wikitohtml'>wiki to HTML</option>"
40 cat << EOT
41 </select>
42 <input type="submit" value="$(gettext "Export")" />
43 </form>
44 EOT
45 # HTML fixes EXPERIMENTAL Functions
46 css_path() {
47 # Sed CSS style path in all documents
48 sed -i s'/style.css/..\/style.css/' */*.html
49 sed -i s'/style.css/..\/..\/style.css/' */*/*.html
50 sed -i s'/style.css/..\/..\/..\/style.css/' */*/*/*.html
51 }
52 gen_tarball() {
53 gettext "Creating tarball"; echo -n ": "
54 cd $tmpdir && mkdir $tiny/$cache/export
55 # Clean cache
56 find $tiny/$cache/export -mtime +1 | xargs rm -rf
57 tar czf $tiny/$cache/export/$export-$date.tar.gz $export
58 cd $tiny/$cache/export && du -sh $export-$date.tar.gz
59 }
60 dl_link() {
61 gettext "Download"; echo \
62 ": <a href='cache/export/$export-$date.tar.gz'>$export-$date.tar.gz</a>"
63 }
64 # Export requested content
65 case " $(GET export) " in
66 *\ wikitohtml\ *)
67 export="wiki"
68 echo '<pre>'
69 gettext "Exporting:"; echo " $export"
70 mkdir -p $tmpdir/$export
71 gettext "Copying CSS style and images..."; echo
72 cp -a style.css images $tmpdir/$export
73 cd $content/$export
74 for d in $(find . -type f | sed s'!./!!')
75 do
76 d=${d%.txt}
77 [ "$d" = "en/help" ] && continue
78 gettext "Exporting: "; echo "$d.txt"
79 mkdir -p $tmpdir/$export/$(dirname $d)
80 f=$tmpdir/$export/$d.html
81 html_header > ${f}
82 sed -i '/functions.js/'d ${f}
83 sed -i '/favicon.ico/'d ${f}
84 sed -i s'/index.cgi/index.html/'/ ${f}
85 doc="[0-9a-zA-Z\.\#/~\_%=\?\&,\+\:@;!\(\)\*\$'\-]*"
86 #
87 # The sed from wiki urls to html bug if there is 2 links
88 # on same line: [test|Test] tralala [en/index|English]
89 #
90 cat $d.txt | wiki_parser | sed \
91 s"#href='\([^]]*\)?d=\($doc\)'>#href='\2.html'>#"g >> ${f}
92 html_footer >> ${f}
93 done
94 cd $tmpdir/$export
95 css_path
96 gen_tarball
97 rm -rf $tmp/export/$$
98 echo '</pre>'
99 dl_link ;;
100 *\ export\ )
101 html_footer && exit 0 ;;
102 *)
103 export="$(GET export)"
104 tmpdir="content"
105 echo '<pre>'
106 gettext "Exporting:"; echo " $export"
107 gen_tarball
108 echo '</pre>'
109 dl_link ;;
110 esac
112 html_footer && exit 0
113 fi