2 # Blog aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
11 use open qw{:utf8 :std};
17 hook(type => "getopt", id => "aggregate", call => \&getopt);
18 hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
19 hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
20 hook(type => "preprocess", id => "aggregate", call => \&preprocess);
21 hook(type => "delete", id => "aggregate", call => \&delete);
22 hook(type => "savestate", id => "aggregate", call => \&savestate);
26 eval q{use Getopt::Long};
28 Getopt::Long::Configure('pass_through');
29 GetOptions("aggregate" => \$config{aggregate});
32 sub checkconfig () { #{{{
33 if ($config{aggregate} && ! ($config{post_commit} &&
34 IkiWiki::commit_hook_enabled())) {
35 # See if any feeds need aggregation.
37 my @feeds=needsaggregate();
39 if (! lockaggregate()) {
40 debug("an aggregation process is already running");
43 # force a later rebuild of source pages
44 $IkiWiki::forcerebuild{$_->{sourcepage}}=1
47 # Fork a child process to handle the aggregation.
48 # The parent process will then handle building the
49 # result. This avoids messy code to clear state
50 # accumulated while aggregating.
51 defined(my $pid = fork) or error("Can't fork: $!");
55 # Aggregation happens without the main wiki lock
56 # being held. This allows editing pages etc while
57 # aggregation is running.
61 # Merge changes, since aggregation state may have
62 # changed on disk while the aggregation was happening.
71 error "aggregation failed with code $?";
79 sub needsbuild (@) { #{{{
84 foreach my $feed (values %feeds) {
85 if (exists $pagesources{$feed->{sourcepage}} &&
86 grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
87 # Mark all feeds originating on this page as
88 # not yet seen; preprocess will unmark those that
90 markunseen($feed->{sourcepage});
95 sub preprocess (@) { #{{{
98 foreach my $required (qw{name url}) {
99 if (! exists $params{$required}) {
100 return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
105 my $name=$params{name};
106 if (exists $feeds{$name}) {
113 $feed->{sourcepage}=$params{page};
114 $feed->{url}=$params{url};
115 my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
117 ($dir)=$dir=~/$config{wiki_file_regexp}/;
119 $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
120 $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
121 $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
122 $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
123 delete $feed->{unseen};
124 $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
125 $feed->{numposts}=0 unless defined $feed->{numposts};
126 $feed->{newposts}=0 unless defined $feed->{newposts};
127 $feed->{message}=gettext("new feed") unless defined $feed->{message};
128 $feed->{error}=0 unless defined $feed->{error};
134 push @{$feed->{tags}}, $value;
138 return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
139 ($feed->{error} ? "<em>" : "").$feed->{message}.
140 ($feed->{error} ? "</em>" : "").
141 " (".$feed->{numposts}." ".gettext("posts").
142 ($feed->{newposts} ? "; ".$feed->{newposts}.
143 " ".gettext("new") : "").
147 sub delete (@) { #{{{
150 # Remove feed data for removed pages.
151 foreach my $file (@files) {
152 my $page=pagename($file);
157 sub markunseen ($) { #{{{
160 foreach my $id (keys %feeds) {
161 if ($feeds{$id}->{sourcepage} eq $page) {
162 $feeds{$id}->{unseen}=1;
169 sub loadstate () { #{{{
170 return if $state_loaded;
172 if (-e "$config{wikistatedir}/aggregate") {
173 open(IN, "$config{wikistatedir}/aggregate") ||
174 die "$config{wikistatedir}/aggregate: $!";
176 $_=IkiWiki::possibly_foolish_untaint($_);
179 foreach my $i (split(/ /, $_)) {
180 my ($field, $val)=split(/=/, $i, 2);
181 if ($field eq "name" || $field eq "feed" ||
182 $field eq "guid" || $field eq "message") {
183 $data->{$field}=decode_entities($val, " \t\n");
185 elsif ($field eq "tag") {
186 push @{$data->{tags}}, $val;
189 $data->{$field}=$val;
193 if (exists $data->{name}) {
194 $feeds{$data->{name}}=$data;
196 elsif (exists $data->{guid}) {
197 $guids{$data->{guid}}=$data;
205 sub savestate () { #{{{
206 return unless $state_loaded;
208 eval q{use HTML::Entities};
210 my $newfile="$config{wikistatedir}/aggregate.new";
211 my $cleanup = sub { unlink($newfile) };
212 open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
213 foreach my $data (values %feeds, values %guids) {
215 foreach my $field (keys %$data) {
216 if ($field eq "name" || $field eq "feed" ||
217 $field eq "guid" || $field eq "message") {
218 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
220 elsif ($field eq "tags") {
221 push @line, "tag=$_" foreach @{$data->{tags}};
224 push @line, "$field=".$data->{$field};
227 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
229 close OUT || error("save $newfile: $!", $cleanup);
230 rename($newfile, "$config{wikistatedir}/aggregate") ||
231 error("rename $newfile: $!", $cleanup);
234 sub garbage_collect () { #{{{
235 foreach my $name (keys %feeds) {
236 # remove any feeds that were not seen while building the pages
237 # that used to contain them
238 if ($feeds{$name}->{unseen}) {
239 delete $feeds{$name};
243 foreach my $guid (values %guids) {
244 # any guid whose feed is gone should be removed
245 if (! exists $feeds{$guid->{feed}}) {
246 unlink pagefile($guid->{page})
247 if exists $guid->{page};
248 delete $guids{$guid->{guid}};
250 # handle expired guids
251 elsif ($guid->{expired} && exists $guid->{page}) {
252 unlink pagefile($guid->{page});
253 delete $guid->{page};
259 sub mergestate () { #{{{
260 # Load the current state in from disk, and merge into it
261 # values from the state in memory that might have changed
262 # during aggregation.
268 # All that can change in feed state during aggregation is a few
270 foreach my $name (keys %myfeeds) {
271 if (exists $feeds{$name}) {
272 foreach my $field (qw{message lastupdate numposts
274 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
279 # New guids can be created during aggregation.
280 # It's also possible that guids were removed from the on-disk state
281 # while the aggregation was in process. That would only happen if
282 # their feed was also removed, so any removed guids added back here
283 # will be garbage collected later.
284 foreach my $guid (keys %myguids) {
285 if (! exists $guids{$guid}) {
286 $guids{$guid}=$myguids{$guid};
291 sub clearstate () { #{{{
298 foreach my $feed (values %feeds) {
299 next unless $feed->{expireage} || $feed->{expirecount};
302 foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
303 grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
305 if ($feed->{expireage}) {
306 my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
307 if ($days_old > $feed->{expireage}) {
308 debug(sprintf(gettext("expiring %s (%s days old)"),
309 $item->{page}, int($days_old)));
313 elsif ($feed->{expirecount} &&
314 $count >= $feed->{expirecount}) {
315 debug(sprintf(gettext("expiring %s"), $item->{page}));
319 if (! $seen{$item->{page}}) {
320 $seen{$item->{page}}=1;
328 sub needsaggregate () { #{{{
329 return values %feeds if $config{rebuild};
330 return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
333 sub aggregate (@) { #{{{
334 eval q{use XML::Feed};
336 eval q{use URI::Fetch};
338 eval q{use HTML::Entities};
341 foreach my $feed (@_) {
342 $feed->{lastupdate}=time;
344 $feed->{message}=sprintf(gettext("processed ok at %s"),
345 displaytime($feed->{lastupdate}));
348 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
350 if (! length $feed->{feedurl}) {
351 my @urls=XML::Feed->find_feeds($feed->{url});
353 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
355 debug($feed->{message});
358 $feed->{feedurl}=pop @urls;
360 my $res=URI::Fetch->fetch($feed->{feedurl});
362 $feed->{message}=URI::Fetch->errstr;
364 debug($feed->{message});
367 if ($res->status == URI::Fetch::URI_GONE()) {
368 $feed->{message}=gettext("feed not found");
370 debug($feed->{message});
373 my $content=$res->content;
374 my $f=eval{XML::Feed->parse(\$content)};
376 # One common cause of XML::Feed crashing is a feed
377 # that contains invalid UTF-8 sequences. Convert
378 # feed to ascii to try to work around.
379 $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
380 $content=Encode::decode_utf8($content);
381 $f=eval{XML::Feed->parse(\$content)};
384 # Another possibility is badly escaped entities.
385 $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
386 $content=~s/\&(?!amp)(\w+);/&$1;/g;
387 $content=Encode::decode_utf8($content);
388 $f=eval{XML::Feed->parse(\$content)};
391 $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
393 debug($feed->{message});
397 $feed->{message}=XML::Feed->errstr;
399 debug($feed->{message});
403 foreach my $entry ($f->entries) {
406 copyright => $f->copyright,
407 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
408 link => $entry->link,
409 content => defined $entry->content->body ? $entry->content->body : "",
410 guid => defined $entry->id ? $entry->id : time."_".$feed->name,
411 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
417 sub add_page (@) { #{{{
420 my $feed=$params{feed};
423 if (exists $guids{$params{guid}}) {
424 # updating an existing post
425 $guid=$guids{$params{guid}};
426 return if $guid->{expired};
430 $guid->{guid}=$params{guid};
431 $guids{$params{guid}}=$guid;
432 $mtime=$params{ctime};
436 # assign it an unused page
437 my $page=IkiWiki::titlepage($params{title});
438 # escape slashes and periods in title so it doesn't specify
439 # directory name or trigger ".." disallowing code.
440 $page=~s!([/.])!"__".ord($1)."__"!eg;
441 $page=$feed->{dir}."/".$page;
442 ($page)=$page=~/$config{wiki_file_regexp}/;
443 if (! defined $page || ! length $page) {
444 $page=$feed->{dir}."/item";
447 while (exists $IkiWiki::pagecase{lc $page.$c} ||
448 -e pagefile($page.$c)) {
452 # Make sure that the file name isn't too long.
453 # NB: This doesn't check for path length limits.
454 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
455 if (defined $max && length(htmlfn($page)) >= $max) {
457 $page=$feed->{dir}."/item";
458 while (exists $IkiWiki::pagecase{lc $page.$c} ||
459 -e pagefile($page.$c)) {
465 debug(sprintf(gettext("creating new page %s"), $page));
467 $guid->{feed}=$feed->{name};
469 # To write or not to write? Need to avoid writing unchanged pages
470 # to avoid unneccessary rebuilding. The mtime from rss cannot be
471 # trusted; let's use a digest.
472 eval q{use Digest::MD5 'md5_hex'};
475 my $digest=md5_hex(Encode::encode_utf8($params{content}));
476 return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
477 $guid->{md5}=$digest;
480 my $template=template("aggregatepost.tmpl", blind_cache => 1);
481 $template->param(title => $params{title})
482 if defined $params{title} && length($params{title});
483 $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
484 $template->param(name => $feed->{name});
485 $template->param(url => $feed->{url});
486 $template->param(copyright => $params{copyright})
487 if defined $params{copyright} && length $params{copyright};
488 $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
489 if defined $params{link};
490 if (ref $feed->{tags}) {
491 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
493 writefile(htmlfn($guid->{page}), $config{srcdir},
496 # Set the mtime, this lets the build process get the right creation
497 # time on record for the new page.
498 utime $mtime, $mtime, pagefile($guid->{page})
499 if defined $mtime && $mtime <= time;
502 sub htmlescape ($) { #{{{
503 # escape accidental wikilinks and preprocessor stuff
505 $html=~s/(?<!\\)\[\[/\\\[\[/g;
509 sub urlabs ($$) { #{{{
513 URI->new_abs($url, $urlbase)->as_string;
516 sub htmlabs ($$) { #{{{
517 # Convert links in html from relative to absolute.
518 # Note that this is a heuristic, which is not specified by the rss
519 # spec and may not be right for all feeds. Also, see Debian
525 my $p = HTML::Parser->new(api_version => 3);
526 $p->handler(default => sub { $ret.=join("", @_) }, "text");
527 $p->handler(start => sub {
528 my ($tagname, $pos, $text) = @_;
529 if (ref $HTML::Tagset::linkElements{$tagname}) {
531 # use attribute sets from right to left
532 # to avoid invalidating the offsets
533 # when replacing the values
534 my($k_offset, $k_len, $v_offset, $v_len) =
536 my $attrname = lc(substr($text, $k_offset, $k_len));
537 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
538 next unless $v_offset; # 0 v_offset means no value
539 my $v = substr($text, $v_offset, $v_len);
540 $v =~ s/^([\'\"])(.*)\1$/$2/;
541 my $new_v=urlabs($v, $urlbase);
542 $new_v =~ s/\"/"/g; # since we quote with ""
543 substr($text, $v_offset, $v_len) = qq("$new_v");
547 }, "tagname, tokenpos, text");
554 sub pagefile ($) { #{{{
557 return "$config{srcdir}/".htmlfn($page);
560 sub htmlfn ($) { #{{{
561 return shift().".".$config{htmlext};
566 sub lockaggregate () { #{{{
567 # Take an exclusive lock to prevent multiple concurrent aggregators.
568 # Returns true if the lock was aquired.
569 if (! -d $config{wikistatedir}) {
570 mkdir($config{wikistatedir});
572 open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
573 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
574 if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
575 close($aggregatelock) || error("failed closing aggregatelock: $!");
581 sub unlockaggregate () { #{{{
582 return close($aggregatelock) if $aggregatelock;