9 sub linkify ($$) { #{{{
13 $content =~ s{(\\?)$config{wiki_link_regexp}}{
14 $2 ? ( $1 ? "[[$2|$3]]" : htmllink($page, titlepage($3), 0, 0, pagetitle($2)))
15 : ( $1 ? "[[$3]]" : htmllink($page, titlepage($3)))
23 return $_scrubber if defined $_scrubber;
25 eval q{use HTML::Scrubber};
26 # Lists based on http://feedparser.org/docs/html-sanitization.html
27 $_scrubber = HTML::Scrubber->new(
29 a abbr acronym address area b big blockquote br
30 button caption center cite code col colgroup dd del
31 dfn dir div dl dt em fieldset font form h1 h2 h3 h4
32 h5 h6 hr i img input ins kbd label legend li map
33 menu ol optgroup option p pre q s samp select small
34 span strike strong sub sup table tbody td textarea
35 tfoot th thead tr tt u ul var
37 default => [undef, { map { $_ => 1 } qw{
38 abbr accept accept-charset accesskey action
39 align alt axis border cellpadding cellspacing
40 char charoff charset checked cite class
41 clear cols colspan color compact coords
42 datetime dir disabled enctype for frame
43 headers height href hreflang hspace id ismap
44 label lang longdesc maxlength media method
45 multiple name nohref noshade nowrap prompt
46 readonly rel rev rows rowspan rules scope
47 selected shape size span src start summary
48 tabindex target title type usemap valign
55 sub htmlize ($$) { #{{{
59 if (! $INC{"/usr/bin/markdown"}) {
61 $blosxom::version="is a proper perl module too much to ask?";
63 do "/usr/bin/markdown";
66 if ($type eq '.mdwn') {
67 $content=Markdown::Markdown($content);
70 error("htmlization of $type not supported");
73 if ($config{sanitize}) {
74 $content=scrubber()->scrub($content);
80 sub backlinks ($) { #{{{
84 foreach my $p (keys %links) {
85 next if bestlink($page, $p) eq $page;
86 if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
87 my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
89 # Trim common dir prefixes from both pages.
91 my $page_trimmed=$page;
93 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
95 $p_trimmed=~s/^\Q$dir\E// &&
96 $page_trimmed=~s/^\Q$dir\E//;
98 push @links, { url => $href, page => $p_trimmed };
102 return sort { $a->{page} cmp $b->{page} } @links;
105 sub parentlinks ($) { #{{{
112 foreach my $dir (reverse split("/", $page)) {
115 unshift @ret, { url => "$path$dir.html", page => $dir };
121 unshift @ret, { url => length $path ? $path : ".", page => $config{wikiname} };
125 sub rsspage ($) { #{{{
131 sub preprocess ($$) { #{{{
135 my %commands=(inline => \&preprocess_inline);
141 if (length $escape) {
142 return "[[$command $params]]";
144 elsif (exists $commands{$command}) {
146 while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) {
149 return $commands{$command}->($page, %params);
152 return "[[bad directive $command]]";
156 $content =~ s{(\\?)$config{wiki_processor_regexp}}{$handle->($1, $2, $3)}eg;
160 sub blog_list ($$) { #{{{
165 foreach my $page (keys %pagesources) {
166 if (globlist_match($page, $globlist)) {
171 @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
172 return @list if ! $maxitems || @list <= $maxitems;
173 return @list[0..$maxitems - 1];
176 sub get_inline_content ($$) { #{{{
177 my $parentpage=shift;
180 my $file=$pagesources{$page};
181 my $type=pagetype($file);
182 if ($type ne 'unknown') {
183 return htmlize($type, linkify(readfile(srcfile($file)), $parentpage));
190 sub preprocess_inline ($@) { #{{{
191 my $parentpage=shift;
194 if (! exists $params{pages}) {
197 if (! exists $params{archive}) {
198 $params{archive}="no";
200 if (! exists $params{show} && $params{archive} eq "no") {
203 if (! exists $depends{$parentpage}) {
204 $depends{$parentpage}=$params{pages};
207 $depends{$parentpage}.=" ".$params{pages};
212 if (exists $params{rootpage}) {
213 # Add a blog post form, with a rss link button.
214 my $formtemplate=HTML::Template->new(blind_cache => 1,
215 filename => "$config{templatedir}/blogpost.tmpl");
216 $formtemplate->param(cgiurl => $config{cgiurl});
217 $formtemplate->param(rootpage => $params{rootpage});
219 $formtemplate->param(rssurl => rsspage(basename($parentpage)));
221 $ret.=$formtemplate->output;
223 elsif ($config{rss}) {
224 # Add a rss link button.
225 my $linktemplate=HTML::Template->new(blind_cache => 1,
226 filename => "$config{templatedir}/rsslink.tmpl");
227 $linktemplate->param(rssurl => rsspage(basename($parentpage)));
228 $ret.=$linktemplate->output;
231 my $template=HTML::Template->new(blind_cache => 1,
232 filename => (($params{archive} eq "no")
233 ? "$config{templatedir}/inlinepage.tmpl"
234 : "$config{templatedir}/inlinepagetitle.tmpl"));
237 foreach my $page (blog_list($params{pages}, $params{show})) {
238 next if $page eq $parentpage;
240 $template->param(pagelink => htmllink($parentpage, $page));
241 $template->param(content => get_inline_content($parentpage, $page))
242 if $params{archive} eq "no";
243 $template->param(ctime => scalar(gmtime($pagectime{$page})));
244 $ret.=$template->output;
247 # TODO: should really add this to renderedfiles and call
248 # check_overwrite, but currently renderedfiles
249 # only supports listing one file per page.
251 writefile(rsspage($parentpage), $config{destdir},
252 genrss($parentpage, @pages));
258 sub genpage ($$$) { #{{{
263 my $title=pagetitle(basename($page));
265 my $template=HTML::Template->new(blind_cache => 1,
266 filename => "$config{templatedir}/page.tmpl");
268 if (length $config{cgiurl}) {
269 $template->param(editurl => cgiurl(do => "edit", page => $page));
270 $template->param(prefsurl => cgiurl(do => "prefs"));
272 $template->param(recentchangesurl => cgiurl(do => "recentchanges"));
276 if (length $config{historyurl}) {
277 my $u=$config{historyurl};
278 $u=~s/\[\[file\]\]/$pagesources{$page}/g;
279 $template->param(historyurl => $u);
281 if ($config{hyperestraier}) {
282 $template->param(hyperestraierurl => cgiurl());
287 wikiname => $config{wikiname},
288 parentlinks => [parentlinks($page)],
290 backlinks => [backlinks($page)],
291 discussionlink => htmllink($page, "Discussion", 1, 1),
292 mtime => scalar(gmtime($mtime)),
293 styleurl => styleurl($page),
296 return $template->output;
299 sub date_822 ($) { #{{{
303 return POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
306 sub absolute_urls ($$) { #{{{
307 # sucky sub because rss sucks
313 $content=~s/<a\s+href="(?!http:\/\/)([^"]+)"/<a href="$url$1"/ig;
314 $content=~s/<img\s+src="(?!http:\/\/)([^"]+)"/<img src="$url$1"/ig;
318 sub genrss ($@) { #{{{
322 my $url="$config{url}/".htmlpage($page);
324 my $template=HTML::Template->new(blind_cache => 1,
325 filename => "$config{templatedir}/rsspage.tmpl");
328 foreach my $p (@pages) {
330 itemtitle => pagetitle(basename($p)),
331 itemurl => "$config{url}/$renderedfiles{$p}",
332 itempubdate => date_822($pagectime{$p}),
333 itemcontent => absolute_urls(get_inline_content($page, $p), $url),
334 } if exists $renderedfiles{$p};
338 title => $config{wikiname},
343 return $template->output;
346 sub check_overwrite ($$) { #{{{
347 # Important security check. Make sure to call this before saving
348 # any files to the source directory.
352 if (! exists $renderedfiles{$src} && -e $dest && ! $config{rebuild}) {
353 error("$dest already exists and was rendered from ".
354 join(" ",(grep { $renderedfiles{$_} eq $dest } keys
356 ", before, so not rendering from $src");
363 return (stat($file))[9];
366 sub findlinks ($$) { #{{{
371 while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
372 push @links, titlepage($2);
374 # Discussion links are a special case since they're not in the text
375 # of the page, but on its template.
376 return @links, "$page/discussion";
379 sub render ($) { #{{{
382 my $type=pagetype($file);
383 my $srcfile=srcfile($file);
384 if ($type ne 'unknown') {
385 my $content=readfile($srcfile);
386 my $page=pagename($file);
388 $links{$page}=[findlinks($content, $page)];
389 delete $depends{$page};
391 $content=linkify($content, $page);
392 $content=preprocess($page, $content);
393 $content=htmlize($type, $content);
395 check_overwrite("$config{destdir}/".htmlpage($page), $page);
396 writefile(htmlpage($page), $config{destdir},
397 genpage($content, $page, mtime($srcfile)));
398 $oldpagemtime{$page}=time;
399 $renderedfiles{$page}=htmlpage($page);
402 my $content=readfile($srcfile, 1);
404 check_overwrite("$config{destdir}/$file", $file);
405 writefile($file, $config{destdir}, $content, 1);
406 $oldpagemtime{$file}=time;
407 $renderedfiles{$file}=$file;
415 my $dir=dirname($file);
416 while (rmdir($dir)) {
422 my $estdir="$config{wikistatedir}/hyperestraier";
423 my $cgi=basename($config{cgiurl});
425 open(TEMPLATE, ">$estdir/$cgi.tmpl") ||
426 error("write $estdir/$cgi.tmpl: $!");
427 print TEMPLATE misctemplate("search",
428 "<!--ESTFORM-->\n\n<!--ESTRESULT-->\n\n<!--ESTINFO-->\n\n");
430 open(TEMPLATE, ">$estdir/$cgi.conf") ||
431 error("write $estdir/$cgi.conf: $!");
432 my $template=HTML::Template->new(
433 filename => "$config{templatedir}/estseek.conf"
435 eval q{use Cwd 'abs_path'};
438 tmplfile => "$estdir/$cgi.tmpl",
439 destdir => abs_path($config{destdir}),
442 print TEMPLATE $template->output;
444 $cgi="$estdir/".basename($config{cgiurl});
446 symlink("/usr/lib/estraier/estseek.cgi", $cgi) ||
447 error("symlink $cgi: $!");
450 sub estcmd ($;@) { #{{{
451 my @params=split(' ', shift);
452 push @params, "-cl", "$config{wikistatedir}/hyperestraier";
457 my $pid=open(CHILD, "|-");
463 close(CHILD) || error("estcmd @params exited nonzero: $?");
467 open(STDOUT, "/dev/null"); # shut it up (closing won't work)
468 exec("estcmd", @params) || error("can't run estcmd");
472 sub refresh () { #{{{
473 # find existing pages
476 eval q{use File::Find};
480 if (/$config{wiki_file_prune_regexp}/) {
481 $File::Find::prune=1;
483 elsif (! -d $_ && ! -l $_) {
484 my ($f)=/$config{wiki_file_regexp}/; # untaint
486 warn("skipping bad filename $_\n");
489 $f=~s/^\Q$config{srcdir}\E\/?//;
491 $exists{pagename($f)}=1;
499 if (/$config{wiki_file_prune_regexp}/) {
500 $File::Find::prune=1;
502 elsif (! -d $_ && ! -l $_) {
503 my ($f)=/$config{wiki_file_regexp}/; # untaint
505 warn("skipping bad filename $_\n");
508 # Don't add files that are in the
510 $f=~s/^\Q$config{underlaydir}\E\/?//;
511 if (! -e "$config{srcdir}/$f" &&
512 ! -l "$config{srcdir}/$f") {
514 $exists{pagename($f)}=1;
519 }, $config{underlaydir});
523 # check for added or removed pages
525 foreach my $file (@files) {
526 my $page=pagename($file);
527 if (! $oldpagemtime{$page}) {
528 debug("new page $page") unless exists $pagectime{$page};
531 $pagesources{$page}=$file;
532 $pagectime{$page}=mtime(srcfile($file))
533 unless exists $pagectime{$page};
537 foreach my $page (keys %oldpagemtime) {
538 if (! $exists{$page}) {
539 debug("removing old page $page");
540 push @del, $pagesources{$page};
541 prune($config{destdir}."/".$renderedfiles{$page});
542 delete $renderedfiles{$page};
543 $oldpagemtime{$page}=0;
544 delete $pagesources{$page};
548 # render any updated files
549 foreach my $file (@files) {
550 my $page=pagename($file);
552 if (! exists $oldpagemtime{$page} ||
553 mtime(srcfile($file)) > $oldpagemtime{$page}) {
554 debug("rendering changed file $file");
560 # if any files were added or removed, check to see if each page
561 # needs an update due to linking to them or inlining them.
562 # TODO: inefficient; pages may get rendered above and again here;
563 # problem is the bestlink may have changed and we won't know until
566 FILE: foreach my $file (@files) {
567 my $page=pagename($file);
568 foreach my $f (@add, @del) {
570 foreach my $link (@{$links{$page}}) {
571 if (bestlink($page, $link) eq $p) {
572 debug("rendering $file, which links to $p");
582 # Handle backlinks; if a page has added/removed links, update the
583 # pages it links to. Also handles rebuilding dependat pages.
584 # TODO: inefficient; pages may get rendered above and again here;
585 # problem is the backlinks could be wrong in the first pass render
587 if (%rendered || @del) {
588 foreach my $f (@files) {
590 if (exists $depends{$p}) {
591 foreach my $file (keys %rendered, @del) {
592 my $page=pagename($file);
593 if (globlist_match($page, $depends{$p})) {
594 debug("rendering $f, which depends on $page");
604 foreach my $file (keys %rendered, @del) {
605 my $page=pagename($file);
607 if (exists $links{$page}) {
608 foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
610 ! exists $oldlinks{$page} ||
611 ! grep { $_ eq $link } @{$oldlinks{$page}}) {
612 $linkchanged{$link}=1;
616 if (exists $oldlinks{$page}) {
617 foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
619 ! exists $links{$page} ||
620 ! grep { $_ eq $link } @{$links{$page}}) {
621 $linkchanged{$link}=1;
626 foreach my $link (keys %linkchanged) {
627 my $linkfile=$pagesources{$link};
628 if (defined $linkfile) {
629 debug("rendering $linkfile, to update its backlinks");
631 $rendered{$linkfile}=1;
636 if ($config{hyperestraier} && (%rendered || @del)) {
637 debug("updating hyperestraier search index");
639 estcmd("gather -cm -bc -cl -sd",
640 map { $config{destdir}."/".$renderedfiles{pagename($_)} }
647 debug("generating hyperestraier cgi config");