Merge branch 'master' into aggregateinternal
[ikiwiki] / IkiWiki / Plugin / aggregate.pm
1 #!/usr/bin/perl
2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
4
5 use warnings;
6 use strict;
7 use IkiWiki 2.00;
8 use HTML::Parser;
9 use HTML::Tagset;
10 use HTML::Entities;
11 use URI;
12 use open qw{:utf8 :std};
13
14 my %feeds;
15 my %guids;
16
17 sub import { #{{{
18         hook(type => "getopt", id => "aggregate", call => \&getopt);
19         hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
20         hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
21         hook(type => "preprocess", id => "aggregate", call => \&preprocess);
22         hook(type => "delete", id => "aggregate", call => \&delete);
23         hook(type => "savestate", id => "aggregate", call => \&savestate);
24         hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
25         if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
26                 hook(type => "cgi", id => "aggregate", call => \&cgi);
27         }
28 } # }}}
29
30 sub getopt () { #{{{
31         eval q{use Getopt::Long};
32         error($@) if $@;
33         Getopt::Long::Configure('pass_through');
34         GetOptions(
35                 "aggregate" => \$config{aggregate},
36                 "aggregateinternal!" => \$config{aggregateinternal},
37         );
38 } #}}}
39
40 sub checkconfig () { #{{{
41         if ($config{aggregate} && ! ($config{post_commit} && 
42                                      IkiWiki::commit_hook_enabled())) {
43                 launchaggregation();
44         }
45 } #}}}
46
47 sub cgi ($) { #{{{
48         my $cgi=shift;
49
50         if (defined $cgi->param('do') &&
51             $cgi->param("do") eq "aggregate_webtrigger") {
52                 $|=1;
53                 print "Content-Type: text/plain\n\n";
54                 $config{cgi}=0;
55                 $config{verbose}=1;
56                 $config{syslog}=0;
57                 print gettext("Aggregation triggered via web.")."\n\n";
58                 if (launchaggregation()) {
59                         IkiWiki::lockwiki();
60                         IkiWiki::loadindex();
61                         require IkiWiki::Render;
62                         IkiWiki::refresh();
63                         IkiWiki::saveindex();
64                 }
65                 else {
66                         print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
67                 }
68                 exit 0;
69         }
70 } #}}}
71
72 sub launchaggregation () { #{{{
73         # See if any feeds need aggregation.
74         loadstate();
75         my @feeds=needsaggregate();
76         return unless @feeds;
77         if (! lockaggregate()) {
78                 debug("an aggregation process is already running");
79                 return;
80         }
81         # force a later rebuild of source pages
82         $IkiWiki::forcerebuild{$_->{sourcepage}}=1
83                 foreach @feeds;
84
85         # Fork a child process to handle the aggregation.
86         # The parent process will then handle building the
87         # result. This avoids messy code to clear state
88         # accumulated while aggregating.
89         defined(my $pid = fork) or error("Can't fork: $!");
90         if (! $pid) {
91                 IkiWiki::loadindex();
92                 # Aggregation happens without the main wiki lock
93                 # being held. This allows editing pages etc while
94                 # aggregation is running.
95                 aggregate(@feeds);
96
97                 IkiWiki::lockwiki;
98                 # Merge changes, since aggregation state may have
99                 # changed on disk while the aggregation was happening.
100                 mergestate();
101                 expire();
102                 savestate();
103                 IkiWiki::unlockwiki;
104                 exit 0;
105         }
106         waitpid($pid,0);
107         if ($?) {
108                 error "aggregation failed with code $?";
109         }
110
111         clearstate();
112         unlockaggregate();
113
114         return 1;
115 } #}}}
116
117 #  Pages with extension _aggregated have plain html markup, pass through.
118 sub htmlize (@) { #{{{
119         my %params=@_;
120         return $params{content};
121 } #}}}
122
123 # Used by ikiwiki-transition aggregateinternal.
124 sub migrate_to_internal { #{{{
125         if (! lockaggregate()) {
126                 error("an aggregation process is currently running");
127         }
128
129         IkiWiki::lockwiki();
130         loadstate();
131         $config{verbose}=1;
132
133         foreach my $data (values %guids) {
134                 next unless $data->{page};
135                 
136                 $config{aggregateinternal} = 0;
137                 my $oldname = pagefile($data->{page});
138                 
139                 $config{aggregateinternal} = 1;
140                 my $newname = pagefile($data->{page});
141                 
142                 debug "moving $oldname -> $newname";
143                 if (-e $newname) {
144                         if (-e $oldname) {
145                                 error("$newname already exists");
146                         }
147                         else {
148                                 debug("already renamed to $newname?");
149                         }
150                 }
151                 elsif (-e $oldname) {
152                         rename($oldname, $newname) || error("$!");
153                 }
154                 else {
155                         debug("$oldname not found");
156                 }
157         }
158         
159         savestate();
160         IkiWiki::unlockwiki;
161         
162         unlockaggregate();
163 } #}}}
164
165 sub needsbuild (@) { #{{{
166         my $needsbuild=shift;
167         
168         loadstate();
169
170         foreach my $feed (values %feeds) {
171                 if (exists $pagesources{$feed->{sourcepage}} && 
172                     grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
173                         # Mark all feeds originating on this page as 
174                         # not yet seen; preprocess will unmark those that
175                         # still exist.
176                         markunseen($feed->{sourcepage});
177                 }
178         }
179 } # }}}
180
181 sub preprocess (@) { #{{{
182         my %params=@_;
183
184         foreach my $required (qw{name url}) {
185                 if (! exists $params{$required}) {
186                         error sprintf(gettext("missing %s parameter"), $required)
187                 }
188         }
189
190         my $feed={};
191         my $name=$params{name};
192         if (exists $feeds{$name}) {
193                 $feed=$feeds{$name};
194         }
195         else {
196                 $feeds{$name}=$feed;
197         }
198         $feed->{name}=$name;
199         $feed->{sourcepage}=$params{page};
200         $feed->{url}=$params{url};
201         my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
202         $dir=~s/^\/+//;
203         ($dir)=$dir=~/$config{wiki_file_regexp}/;
204         $feed->{dir}=$dir;
205         $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
206         $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
207         $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
208         $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
209         if (exists $params{template}) {
210                 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
211         }
212         else {
213                 $params{template} = "aggregatepost"
214         }
215         $feed->{template}=$params{template} . ".tmpl";
216         delete $feed->{unseen};
217         $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
218         $feed->{numposts}=0 unless defined $feed->{numposts};
219         $feed->{newposts}=0 unless defined $feed->{newposts};
220         $feed->{message}=gettext("new feed") unless defined $feed->{message};
221         $feed->{error}=0 unless defined $feed->{error};
222         $feed->{tags}=[];
223         while (@_) {
224                 my $key=shift;
225                 my $value=shift;
226                 if ($key eq 'tag') {
227                         push @{$feed->{tags}}, $value;
228                 }
229         }
230
231         return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
232                ($feed->{error} ? "<em>" : "").$feed->{message}.
233                ($feed->{error} ? "</em>" : "").
234                " (".$feed->{numposts}." ".gettext("posts").
235                ($feed->{newposts} ? "; ".$feed->{newposts}.
236                                     " ".gettext("new") : "").
237                ")";
238 } # }}}
239
240 sub delete (@) { #{{{
241         my @files=@_;
242
243         # Remove feed data for removed pages.
244         foreach my $file (@files) {
245                 my $page=pagename($file);
246                 markunseen($page);
247         }
248 } #}}}
249
250 sub markunseen ($) { #{{{
251         my $page=shift;
252
253         foreach my $id (keys %feeds) {
254                 if ($feeds{$id}->{sourcepage} eq $page) {
255                         $feeds{$id}->{unseen}=1;
256                 }
257         }
258 } #}}}
259
260 my $state_loaded=0;
261
262 sub loadstate () { #{{{
263         return if $state_loaded;
264         $state_loaded=1;
265         if (-e "$config{wikistatedir}/aggregate") {
266                 open(IN, "$config{wikistatedir}/aggregate") ||
267                         die "$config{wikistatedir}/aggregate: $!";
268                 while (<IN>) {
269                         $_=IkiWiki::possibly_foolish_untaint($_);
270                         chomp;
271                         my $data={};
272                         foreach my $i (split(/ /, $_)) {
273                                 my ($field, $val)=split(/=/, $i, 2);
274                                 if ($field eq "name" || $field eq "feed" ||
275                                     $field eq "guid" || $field eq "message") {
276                                         $data->{$field}=decode_entities($val, " \t\n");
277                                 }
278                                 elsif ($field eq "tag") {
279                                         push @{$data->{tags}}, $val;
280                                 }
281                                 else {
282                                         $data->{$field}=$val;
283                                 }
284                         }
285                         
286                         if (exists $data->{name}) {
287                                 $feeds{$data->{name}}=$data;
288                         }
289                         elsif (exists $data->{guid}) {
290                                 $guids{$data->{guid}}=$data;
291                         }
292                 }
293
294                 close IN;
295         }
296 } #}}}
297
298 sub savestate () { #{{{
299         return unless $state_loaded;
300         garbage_collect();
301         my $newfile="$config{wikistatedir}/aggregate.new";
302         my $cleanup = sub { unlink($newfile) };
303         open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
304         foreach my $data (values %feeds, values %guids) {
305                 my @line;
306                 foreach my $field (keys %$data) {
307                         if ($field eq "name" || $field eq "feed" ||
308                             $field eq "guid" || $field eq "message") {
309                                 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
310                         }
311                         elsif ($field eq "tags") {
312                                 push @line, "tag=$_" foreach @{$data->{tags}};
313                         }
314                         else {
315                                 push @line, "$field=".$data->{$field};
316                         }
317                 }
318                 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
319         }
320         close OUT || error("save $newfile: $!", $cleanup);
321         rename($newfile, "$config{wikistatedir}/aggregate") ||
322                 error("rename $newfile: $!", $cleanup);
323 } #}}}
324
325 sub garbage_collect () { #{{{
326         foreach my $name (keys %feeds) {
327                 # remove any feeds that were not seen while building the pages
328                 # that used to contain them
329                 if ($feeds{$name}->{unseen}) {
330                         delete $feeds{$name};
331                 }
332         }
333
334         foreach my $guid (values %guids) {
335                 # any guid whose feed is gone should be removed
336                 if (! exists $feeds{$guid->{feed}}) {
337                         unlink pagefile($guid->{page})
338                                 if exists $guid->{page};
339                         delete $guids{$guid->{guid}};
340                 }
341                 # handle expired guids
342                 elsif ($guid->{expired} && exists $guid->{page}) {
343                         unlink pagefile($guid->{page});
344                         delete $guid->{page};
345                         delete $guid->{md5};
346                 }
347         }
348 } #}}}
349
350 sub mergestate () { #{{{
351         # Load the current state in from disk, and merge into it
352         # values from the state in memory that might have changed
353         # during aggregation.
354         my %myfeeds=%feeds;
355         my %myguids=%guids;
356         clearstate();
357         loadstate();
358
359         # All that can change in feed state during aggregation is a few
360         # fields.
361         foreach my $name (keys %myfeeds) {
362                 if (exists $feeds{$name}) {
363                         foreach my $field (qw{message lastupdate numposts
364                                               newposts error}) {
365                                 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
366                         }
367                 }
368         }
369
370         # New guids can be created during aggregation.
371         # It's also possible that guids were removed from the on-disk state
372         # while the aggregation was in process. That would only happen if
373         # their feed was also removed, so any removed guids added back here
374         # will be garbage collected later.
375         foreach my $guid (keys %myguids) {
376                 if (! exists $guids{$guid}) {
377                         $guids{$guid}=$myguids{$guid};
378                 }
379         }
380 } #}}}
381
382 sub clearstate () { #{{{
383         %feeds=();
384         %guids=();
385         $state_loaded=0;
386 } #}}}
387
388 sub expire () { #{{{
389         foreach my $feed (values %feeds) {
390                 next unless $feed->{expireage} || $feed->{expirecount};
391                 my $count=0;
392                 my %seen;
393                 foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
394                                   grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
395                                   values %guids) {
396                         if ($feed->{expireage}) {
397                                 my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
398                                 if ($days_old > $feed->{expireage}) {
399                                         debug(sprintf(gettext("expiring %s (%s days old)"),
400                                                 $item->{page}, int($days_old)));
401                                         $item->{expired}=1;
402                                 }
403                         }
404                         elsif ($feed->{expirecount} &&
405                                $count >= $feed->{expirecount}) {
406                                 debug(sprintf(gettext("expiring %s"), $item->{page}));
407                                 $item->{expired}=1;
408                         }
409                         else {
410                                 if (! $seen{$item->{page}}) {
411                                         $seen{$item->{page}}=1;
412                                         $count++;
413                                 }
414                         }
415                 }
416         }
417 } #}}}
418
419 sub needsaggregate () { #{{{
420         return values %feeds if $config{rebuild};
421         return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
422 } #}}}
423
424 sub aggregate (@) { #{{{
425         eval q{use XML::Feed};
426         error($@) if $@;
427         eval q{use URI::Fetch};
428         error($@) if $@;
429
430         foreach my $feed (@_) {
431                 $feed->{lastupdate}=time;
432                 $feed->{newposts}=0;
433                 $feed->{message}=sprintf(gettext("processed ok at %s"),
434                         displaytime($feed->{lastupdate}));
435                 $feed->{error}=0;
436
437                 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
438
439                 if (! length $feed->{feedurl}) {
440                         my @urls=XML::Feed->find_feeds($feed->{url});
441                         if (! @urls) {
442                                 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
443                                 $feed->{error}=1;
444                                 debug($feed->{message});
445                                 next;
446                         }
447                         $feed->{feedurl}=pop @urls;
448                 }
449                 my $res=URI::Fetch->fetch($feed->{feedurl});
450                 if (! $res) {
451                         $feed->{message}=URI::Fetch->errstr;
452                         $feed->{error}=1;
453                         debug($feed->{message});
454                         next;
455                 }
456                 if ($res->status == URI::Fetch::URI_GONE()) {
457                         $feed->{message}=gettext("feed not found");
458                         $feed->{error}=1;
459                         debug($feed->{message});
460                         next;
461                 }
462                 my $content=$res->content;
463                 my $f=eval{XML::Feed->parse(\$content)};
464                 if ($@) {
465                         # One common cause of XML::Feed crashing is a feed
466                         # that contains invalid UTF-8 sequences. Convert
467                         # feed to ascii to try to work around.
468                         $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
469                         $content=Encode::decode_utf8($content, 0);
470                         $f=eval{XML::Feed->parse(\$content)};
471                 }
472                 if ($@) {
473                         # Another possibility is badly escaped entities.
474                         $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
475                         $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
476                         $content=Encode::decode_utf8($content, 0);
477                         $f=eval{XML::Feed->parse(\$content)};
478                 }
479                 if ($@) {
480                         $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
481                         $feed->{error}=1;
482                         debug($feed->{message});
483                         next;
484                 }
485                 if (! $f) {
486                         $feed->{message}=XML::Feed->errstr;
487                         $feed->{error}=1;
488                         debug($feed->{message});
489                         next;
490                 }
491
492                 foreach my $entry ($f->entries) {
493                         add_page(
494                                 feed => $feed,
495                                 copyright => $f->copyright,
496                                 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
497                                 link => $entry->link,
498                                 content => defined $entry->content->body ? $entry->content->body : "",
499                                 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
500                                 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
501                         );
502                 }
503         }
504 } #}}}
505
506 sub add_page (@) { #{{{
507         my %params=@_;
508         
509         my $feed=$params{feed};
510         my $guid={};
511         my $mtime;
512         if (exists $guids{$params{guid}}) {
513                 # updating an existing post
514                 $guid=$guids{$params{guid}};
515                 return if $guid->{expired};
516         }
517         else {
518                 # new post
519                 $guid->{guid}=$params{guid};
520                 $guids{$params{guid}}=$guid;
521                 $mtime=$params{ctime};
522                 $feed->{numposts}++;
523                 $feed->{newposts}++;
524
525                 # assign it an unused page
526                 my $page=IkiWiki::titlepage($params{title});
527                 # escape slashes and periods in title so it doesn't specify
528                 # directory name or trigger ".." disallowing code.
529                 $page=~s!([/.])!"__".ord($1)."__"!eg;
530                 $page=$feed->{dir}."/".$page;
531                 ($page)=$page=~/$config{wiki_file_regexp}/;
532                 if (! defined $page || ! length $page) {
533                         $page=$feed->{dir}."/item";
534                 }
535                 my $c="";
536                 while (exists $IkiWiki::pagecase{lc $page.$c} ||
537                        -e pagefile($page.$c)) {
538                         $c++
539                 }
540
541                 # Make sure that the file name isn't too long. 
542                 # NB: This doesn't check for path length limits.
543                 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
544                 if (defined $max && length(htmlfn($page)) >= $max) {
545                         $c="";
546                         $page=$feed->{dir}."/item";
547                         while (exists $IkiWiki::pagecase{lc $page.$c} ||
548                                -e pagefile($page.$c)) {
549                                 $c++
550                         }
551                 }
552
553                 $guid->{page}=$page;
554                 debug(sprintf(gettext("creating new page %s"), $page));
555         }
556         $guid->{feed}=$feed->{name};
557         
558         # To write or not to write? Need to avoid writing unchanged pages
559         # to avoid unneccessary rebuilding. The mtime from rss cannot be
560         # trusted; let's use a digest.
561         eval q{use Digest::MD5 'md5_hex'};
562         error($@) if $@;
563         require Encode;
564         my $digest=md5_hex(Encode::encode_utf8($params{content}));
565         return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
566         $guid->{md5}=$digest;
567
568         # Create the page.
569         my $template=template($feed->{template}, blind_cache => 1);
570         $template->param(title => $params{title})
571                 if defined $params{title} && length($params{title});
572         $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
573         $template->param(name => $feed->{name});
574         $template->param(url => $feed->{url});
575         $template->param(copyright => $params{copyright})
576                 if defined $params{copyright} && length $params{copyright};
577         $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
578                 if defined $params{link};
579         if (ref $feed->{tags}) {
580                 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
581         }
582         writefile(htmlfn($guid->{page}), $config{srcdir},
583                 $template->output);
584
585         # Set the mtime, this lets the build process get the right creation
586         # time on record for the new page.
587         utime $mtime, $mtime, pagefile($guid->{page})
588                 if defined $mtime && $mtime <= time;
589 } #}}}
590
591 sub htmlescape ($) { #{{{
592         # escape accidental wikilinks and preprocessor stuff
593         my $html=shift;
594         $html=~s/(?<!\\)\[\[/\\\[\[/g;
595         return $html;
596 } #}}}
597
598 sub urlabs ($$) { #{{{
599         my $url=shift;
600         my $urlbase=shift;
601
602         URI->new_abs($url, $urlbase)->as_string;
603 } #}}}
604
605 sub htmlabs ($$) { #{{{
606         # Convert links in html from relative to absolute.
607         # Note that this is a heuristic, which is not specified by the rss
608         # spec and may not be right for all feeds. Also, see Debian
609         # bug #381359.
610         my $html=shift;
611         my $urlbase=shift;
612
613         my $ret="";
614         my $p = HTML::Parser->new(api_version => 3);
615         $p->handler(default => sub { $ret.=join("", @_) }, "text");
616         $p->handler(start => sub {
617                 my ($tagname, $pos, $text) = @_;
618                 if (ref $HTML::Tagset::linkElements{$tagname}) {
619                         while (4 <= @$pos) {
620                                 # use attribute sets from right to left
621                                 # to avoid invalidating the offsets
622                                 # when replacing the values
623                                 my($k_offset, $k_len, $v_offset, $v_len) =
624                                         splice(@$pos, -4);
625                                 my $attrname = lc(substr($text, $k_offset, $k_len));
626                                 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
627                                 next unless $v_offset; # 0 v_offset means no value
628                                 my $v = substr($text, $v_offset, $v_len);
629                                 $v =~ s/^([\'\"])(.*)\1$/$2/;
630                                 my $new_v=urlabs($v, $urlbase);
631                                 $new_v =~ s/\"/&quot;/g; # since we quote with ""
632                                 substr($text, $v_offset, $v_len) = qq("$new_v");
633                         }
634                 }
635                 $ret.=$text;
636         }, "tagname, tokenpos, text");
637         $p->parse($html);
638         $p->eof;
639
640         return $ret;
641 } #}}}
642
643 sub pagefile ($) { #{{{
644         my $page=shift;
645
646         return "$config{srcdir}/".htmlfn($page);
647 } #}}}
648
649 sub htmlfn ($) { #{{{
650         return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
651 } #}}}
652
653 my $aggregatelock;
654
655 sub lockaggregate () { #{{{
656         # Take an exclusive lock to prevent multiple concurrent aggregators.
657         # Returns true if the lock was aquired.
658         if (! -d $config{wikistatedir}) {
659                 mkdir($config{wikistatedir});
660         }
661         open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
662                 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
663         if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
664                 close($aggregatelock) || error("failed closing aggregatelock: $!");
665                 return 0;
666         }
667         return 1;
668 } #}}}
669
670 sub unlockaggregate () { #{{{
671         return close($aggregatelock) if $aggregatelock;
672         return;
673 } #}}}
674
675 1