9 $ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
12 $blosxom::version="is a proper perl module too much to ask?";
13 do "/usr/bin/markdown";
16 my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
18 my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
19 my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/;
20 my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.|^\.|\/\.|\.html?$)!;
23 my $default_pagetype=".mdwn";
30 die "usage: ikiwiki [options] source dest\n";
35 print "Content-type: text/html\n\n";
45 print "@_\n" if $verbose;
51 return (stat($page))[9];
54 sub possibly_foolish_untaint ($) {
56 my ($untainted)=$tainted=~/(.*)/;
77 if ($page =~ /\.mdwn$/) {
88 my $type=pagetype($file);
90 $page=~s/\Q$type\E*$// unless $type eq 'unknown';
104 open (IN, "$file") || error("failed to read $file: $!");
114 my $dir=dirname($file);
117 foreach my $s (split(m!/+!, $dir)) {
120 mkdir($d) || error("failed to create directory $d: $!");
125 open (OUT, ">$file") || error("failed to write $file: $!");
134 while ($content =~ /$wiki_link_regexp/g) {
140 # Given a page and the text of a link on the page, determine which existing
141 # page that link best points to. Prefers pages under a subdirectory with
142 # the same name as the source page, failing that goes down the directory tree
143 # to the base looking for matching pages.
151 $l.="/" if length $l;
154 if (exists $links{$l}) {
155 #debug("for $page, \"$link\", use $l");
158 } while $cwd=~s!/?[^/]+$!!;
160 #print STDERR "warning: page $page, broken link: $link\n";
164 sub isinlinableimage ($) {
167 $file=~/\.(png|gif|jpg|jpeg)$/;
173 my $noimagelink=shift;
175 my $bestlink=bestlink($page, $link);
177 return $link if $page eq $bestlink;
179 # TODO BUG: %renderedfiles may not have it, if the linked to page
180 # was also added and isn't yet rendered! Note that this bug is
181 # masked by the bug mentioned below that makes all new files
183 if (! grep { $_ eq $bestlink } values %renderedfiles) {
184 $bestlink=htmlpage($bestlink);
186 if (! grep { $_ eq $bestlink } values %renderedfiles) {
187 return "<a href=\"$cgiurl?do=create&page=$link&from=$page\">?</a>$link"
190 $bestlink=File::Spec->abs2rel($bestlink, dirname($page));
192 if (! $noimagelink && isinlinableimage($bestlink)) {
193 return "<img src=\"$bestlink\">";
195 return "<a href=\"$bestlink\">$link</a>";
202 $content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
211 if ($type eq '.mdwn') {
212 return Markdown::Markdown($content);
215 error("htmlization of $type not supported");
224 foreach my $p (keys %links) {
225 next if bestlink($page, $p) eq $page;
226 if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
227 my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
229 # Trim common dir prefixes from both pages.
231 my $page_trimmed=$page;
233 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
235 $p_trimmed=~s/^\Q$dir\E// &&
236 $page_trimmed=~s/^\Q$dir\E//;
238 push @links, "<a href=\"$href\">$p_trimmed</a>";
242 $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
250 my $title=basename($page);
255 foreach my $dir (reverse split("/", $page)) {
256 if (length($pagelink)) {
257 $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
264 $path=~s/\.\.\/$/index.html/;
265 $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
268 if (length $cgiurl) {
269 push @actions, "<a href=\"$cgiurl?do=edit&page=$page\">Edit</a>";
270 push @actions, "<a href=\"$cgiurl?do=recentchanges\">RecentChanges</a>";
273 $content="<html>\n<head><title>$title</title></head>\n<body>\n".
274 "<h1>$pagelink</h1>\n".
277 "</body>\n</html>\n";
285 my $type=pagetype($file);
286 my $content=readfile("$srcdir/$file");
287 if ($type ne 'unknown') {
288 my $page=pagename($file);
290 $links{$page}=[findlinks($content)];
292 $content=linkify($content, $file);
293 $content=htmlize($type, $content);
294 $content=linkbacks($content, $page);
295 $content=finalize($content, $page);
297 writefile("$destdir/".htmlpage($page), $content);
298 $oldpagemtime{$page}=time;
299 $renderedfiles{$page}=htmlpage($page);
303 writefile("$destdir/$file", $content);
304 $oldpagemtime{$file}=time;
305 $renderedfiles{$file}=$file;
310 open (IN, "$srcdir/.index") || return;
312 $_=possibly_foolish_untaint($_);
314 my ($mtime, $file, $rendered, @links)=split(' ', $_);
315 my $page=pagename($file);
316 $pagesources{$page}=$file;
317 $oldpagemtime{$page}=$mtime;
318 $oldlinks{$page}=[@links];
319 $links{$page}=[@links];
320 $renderedfiles{$page}=$rendered;
326 open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
327 foreach my $page (keys %oldpagemtime) {
328 print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
329 join(" ", @{$links{$page}})."\n"
330 if $oldpagemtime{$page};
336 if (-d "$srcdir/.svn") {
337 if (system("svn", "update", "--quiet", $srcdir) != 0) {
338 warn("svn update failed\n");
346 if (-d "$srcdir/.svn") {
347 if (system("svn", "commit", "--quiet", "-m",
348 possibly_foolish_untaint($message), $srcdir) != 0) {
349 warn("svn commit failed\n");
357 if (-d "$srcdir/.svn") {
358 my $parent=dirname($file);
359 while (! -d "$srcdir/$parent/.svn") {
361 $parent=dirname($file);
364 if (system("svn", "add", "--quiet", "$srcdir/$file") != 0) {
365 warn("svn add failed\n");
370 sub rcs_recentchanges ($) {
374 eval q{use Date::Parse};
375 eval q{use Time::Duration};
377 if (-d "$srcdir/.svn") {
378 my $info=`LANG=C svn info $srcdir`;
379 my ($svn_url)=$info=~/^URL: (.*)$/m;
381 # FIXME: currently assumes that the wiki is somewhere
382 # under trunk in svn, doesn't support other layouts.
383 my ($svn_base)=$svn_url=~m!(/trunk(?:/.*)?)$!;
385 my $div=qr/^--------------------+$/;
386 my $infoline=qr/^r(\d+)\s+\|\s+([^\s]+)\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/;
388 my ($rev, $user, $when, @pages, $message);
389 foreach (`LANG=C svn log -v '$svn_url'`) {
391 if ($state eq 'start' && /$div/) {
394 elsif ($state eq 'header' && /$infoline/) {
397 $when=concise(ago(time - str2time($3)));
399 elsif ($state eq 'header' && /^\s+[A-Z]\s+\Q$svn_base\E\/(.+)$/) {
400 push @pages, pagename($1) if length $1;
402 elsif ($state eq 'header' && /^$/) {
405 elsif ($state eq 'body' && /$div/) {
406 push @ret, { rev => $rev, user => $user,
407 when => $when, message => $message,
408 pages => [@pages] } if @pages;
409 return @ret if @ret >= $num;
412 $message=$rev=$user=$when=undef;
415 elsif ($state eq 'body') {
416 $message.="$_<br>\n";
428 my $dir=dirname($file);
429 while (rmdir($dir)) {
435 # Find existing pages.
441 if (/$wiki_file_prune_regexp/) {
442 $File::Find::prune=1;
445 my ($f)=/$wiki_file_regexp/; # untaint
447 warn("skipping bad filename $_\n");
450 $f=~s/^\Q$srcdir\E\/?//;
452 $exists{pagename($f)}=1;
460 # check for added or removed pages
462 foreach my $file (@files) {
463 my $page=pagename($file);
464 if (! $oldpagemtime{$page}) {
465 debug("new page $page");
468 $pagesources{$page}=$file;
472 foreach my $page (keys %oldpagemtime) {
473 if (! $exists{$page}) {
474 debug("removing old page $page");
475 push @del, $renderedfiles{$page};
476 prune($destdir."/".$renderedfiles{$page});
477 delete $renderedfiles{$page};
478 $oldpagemtime{$page}=0;
479 delete $pagesources{$page};
483 # render any updated files
484 foreach my $file (@files) {
485 my $page=pagename($file);
487 if (! exists $oldpagemtime{$page} ||
488 mtime("$srcdir/$file") > $oldpagemtime{$page}) {
489 debug("rendering changed file $file");
495 # if any files were added or removed, check to see if each page
496 # needs an update due to linking to them
497 # TODO: inefficient; pages may get rendered above and again here;
498 # problem is the bestlink may have changed and we won't know until
501 FILE: foreach my $file (@files) {
502 my $page=pagename($file);
503 foreach my $f (@add, @del) {
505 foreach my $link (@{$links{$page}}) {
506 if (bestlink($page, $link) eq $p) {
507 debug("rendering $file, which links to $p");
517 # handle linkbacks; if a page has added/removed links, update the
519 # TODO: inefficient; pages may get rendered above and again here;
520 # problem is the linkbacks could be wrong in the first pass render
524 foreach my $file (keys %rendered, @del) {
525 my $page=pagename($file);
526 if (exists $links{$page}) {
527 foreach my $link (@{$links{$page}}) {
528 $link=bestlink($page, $link);
530 ! exists $oldlinks{$page} ||
531 ! grep { $_ eq $link } @{$oldlinks{$page}}) {
532 $linkchanged{$link}=1;
536 if (exists $oldlinks{$page}) {
537 foreach my $link (@{$oldlinks{$page}}) {
538 $link=bestlink($page, $link);
540 ! exists $links{$page} ||
541 ! grep { $_ eq $link } @{$links{$page}}) {
542 $linkchanged{$link}=1;
547 foreach my $link (keys %linkchanged) {
548 my $linkfile=$pagesources{$link};
549 if (defined $linkfile) {
550 debug("rendering $linkfile, to update its linkbacks");
557 # Generates a C wrapper program for running ikiwiki in a specific way.
558 # The wrapper may be safely made suid.
559 sub gen_wrapper ($$) {
560 my ($svn, $rebuild)=@_;
562 eval q{use Cwd 'abs_path'};
563 $srcdir=abs_path($srcdir);
564 $destdir=abs_path($destdir);
565 my $this=abs_path($0);
567 error("$this doesn't seem to be executable");
570 my @params=($srcdir, $destdir, "--wikiname=$wikiname");
571 push @params, "--verbose" if $verbose;
572 push @params, "--rebuild" if $rebuild;
573 push @params, "--nosvn" if !$svn;
574 push @params, "--cgi" if $cgi;
575 push @params, "--url=$url" if $url;
576 push @params, "--cgiurl=$cgiurl" if $cgiurl;
577 my $params=join(" ", @params);
579 foreach my $p ($this, $this, @params) {
585 push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI
586 CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi;
588 foreach my $var (@envsave) {
590 if ((s=getenv("$var")))
591 asprintf(&newenviron[i++], "%s=%s", "$var", s);
595 open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
597 /* A wrapper for ikiwiki, can be safely made suid. */
604 extern char **environ;
606 int main (int argc, char **argv) {
607 /* Sanitize environment. */
609 char *newenviron[$#envsave+3];
612 newenviron[i++]="HOME=$ENV{HOME}";
616 if (argc == 2 && strcmp(argv[1], "--params") == 0) {
617 printf("$params\\n");
622 perror("failed to run $this");
627 if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
628 error("failed to compile ikiwiki-wrap.c");
630 unlink("ikiwiki-wrap.c");
631 print "successfully generated ikiwiki-wrap\n";
639 my $do=$q->param('do');
640 if (! defined $do || ! length $do) {
641 error("\"do\" parameter missing");
644 if ($do eq 'recentchanges') {
646 foreach my $change (rcs_recentchanges(100)) {
648 $list.=join(", ", map { htmllink("", $_, 1) } @{$change->{pages}});
650 $list.="changed ".$change->{when}." by ".
651 htmllink("", $change->{user}, 1).
652 ": <i>".$change->{message}."</i>\n";
658 $q->start_html("RecentChanges"),
659 $q->h1("<a href=\"$url\">$wikiname</a>/ RecentChanges"),
666 my ($page)=$q->param('page')=~/$wiki_file_regexp/;
667 if (! defined $page || ! length $page || $page ne $q->param('page') ||
668 $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) {
669 error("bad page name");
673 my $action=$q->request_uri;
676 if ($do eq 'create') {
677 if (exists $pagesources{lc($page)}) {
678 # hmm, someone else made the page in the meantime?
679 print $q->redirect("$url/".htmlpage($page));
683 my ($from)=$q->param('from')=~/$wiki_file_regexp/;
684 if (! defined $from || ! length $from ||
685 $from ne $q->param('from') ||
686 $from=~/$wiki_file_prune_regexp/ || $from=~/^\//) {
692 push @page_locs, $dir.$page;
693 push @page_locs, "$from/$page";
694 while (length $dir) {
696 push @page_locs, $dir.$page;
700 $q->param("do", "save");
702 $q->start_html("Creating $page"),
703 $q->h1("<a href=\"$url\">$wikiname</a>/ Creating $page"),
704 $q->start_form(-action => $action),
706 "Select page location:",
707 $q->popup_menu('page', \@page_locs),
708 $q->textarea(-name => 'content',
713 "Optional comment about this change:",
715 $q->textfield(-name => "comments", -size => 80),
717 $q->submit("Save Page"),
721 elsif ($do eq 'edit') {
723 if (exists $pagesources{lc($page)}) {
724 $content=readfile("$srcdir/$pagesources{lc($page)}");
725 $content=~s/\n/\r\n/g;
727 $q->param("do", "save");
729 $q->start_html("Editing $page"),
730 $q->h1("<a href=\"$url\">$wikiname</a>/ Editing $page"),
731 $q->start_form(-action => $action),
734 $q->textarea(-name => 'content',
735 -default => $content,
739 "Optional comment about this change:",
741 $q->textfield(-name => "comments", -size => 80),
743 $q->submit("Save Page"),
747 elsif ($do eq 'save') {
748 my $file=$page.$default_pagetype;
750 if (exists $pagesources{lc($page)}) {
751 $file=$pagesources{lc($page)};
755 my $content=$q->param('content');
756 $content=~s/\r\n/\n/g;
758 writefile("$srcdir/$file", $content);
760 my $message="web commit from $ENV{REMOTE_ADDR}";
761 if (defined $q->param('comments')) {
762 $message.=": ".$q->param('comments');
769 # presumably the commit will trigger an update
771 rcs_commit($message);
777 print $q->redirect("$url/".htmlpage($page));
780 error("unknown do parameter");
786 if (grep /^-/, @ARGV) {
787 eval {use Getopt::Long};
789 "wikiname=s" => \$wikiname,
790 "verbose|v" => \$verbose,
791 "rebuild" => \$rebuild,
792 "wrapper" => \$wrapper,
796 "cgiurl=s" => \$cgiurl,
799 usage() unless @ARGV == 2;
800 ($srcdir) = possibly_foolish_untaint(shift);
801 ($destdir) = possibly_foolish_untaint(shift);
803 if ($cgi && ! length $url) {
804 error("Must specify url to wiki with --url when using --cgi");
807 gen_wrapper($svn, $rebuild) if $wrapper;
810 loadindex() unless $rebuild;
815 rcs_update() if $svn;