add section information
[ikiwiki] / IkiWiki / Plugin / amazon_s3.pm
1 #!/usr/bin/perl
2 package IkiWiki::Plugin::amazon_s3;
3
4 use warnings;
5 no warnings 'redefine';
6 use strict;
7 use IkiWiki 3.00;
8 use IkiWiki::Render;
9 use Net::Amazon::S3;
10
11 # Store references to real subs before overriding them.
12 our %subs;
13 BEGIN {
14         foreach my $sub (qw{IkiWiki::writefile IkiWiki::prune}) {
15                 $subs{$sub}=\&$sub;
16         }
17 };
18
19 sub import {
20         hook(type => "getopt", id => "amazon_s3", call => \&getopt);
21         hook(type => "getsetup", id => "amazon_s3", call => \&getsetup);
22         hook(type => "checkconfig", id => "amazon_s3", call => \&checkconfig);
23 }
24
25 sub getopt () {
26         eval q{use Getopt::Long};
27         error($@) if $@;
28         Getopt::Long::Configure('pass_through');
29         GetOptions("delete-bucket" => sub {
30                 my $bucket=getbucket();
31                 debug(gettext("deleting bucket.."));
32                 my $resp = $bucket->list_all or die $bucket->err . ": " . $bucket->errstr;
33                 foreach my $key (@{$resp->{keys}}) {
34                         debug("\t".$key->{key});
35                         $bucket->delete_key($key->{key}) or die $bucket->err . ": " . $bucket->errstr;
36                 }
37                 $bucket->delete_bucket or die $bucket->err . ": " . $bucket->errstr;
38                 debug(gettext("done"));
39                 exit(0);
40         });
41 }
42
43 sub getsetup () {
44         return
45                 plugin => {
46                         safe => 0,
47                         rebuild => 0,
48                         section => "special-purpose",
49                 },
50                 amazon_s3_key_id => {
51                         type => "string",
52                         example => "XXXXXXXXXXXXXXXXXXXX",
53                         description => "public access key id",
54                         safe => 1,
55                         rebuild => 0,
56                 },
57                 amazon_s3_key_id => {
58                         type => "string",
59                         example => "$ENV{HOME}/.s3_key",
60                         description => "file holding secret key (must not be readable by others!)",
61                         safe => 0, # ikiwiki reads this file
62                         rebuild => 0,
63                 },
64                 amazon_s3_bucket => {
65                         type => "string",
66                         example => "mywiki",
67                         description => "globally unique name of bucket to store wiki in",
68                         safe => 1,
69                         rebuild => 1,
70                 },
71                 amazon_s3_prefix => {
72                         type => "string",
73                         example => "wiki/",
74                         description => "a prefix to prepend to each page name",
75                         safe => 1,
76                         rebuild => 1,
77                 },
78                 amazon_s3_location => {
79                         type => "string",
80                         example => "EU",
81                         description => "which S3 datacenter to use (leave blank for default)",
82                         safe => 1,
83                         rebuild => 1,
84                 },
85                 amazon_s3_dupindex => {
86                         type => "boolean",
87                         example => 0,
88                         description => "store each index file twice? (allows urls ending in \"/index.html\" and \"/\")",
89                         safe => 1,
90                         rebuild => 1,
91                 },
92 }
93
94 sub checkconfig {
95         foreach my $field (qw{amazon_s3_key_id amazon_s3_key_file
96                               amazon_s3_bucket}) {
97                 if (! exists $config{$field} || ! defined $config{$field}) {
98                         error(sprintf(gettext("Must specify %s"), $field));
99                 }
100         }
101         if (! exists $config{amazon_s3_prefix} ||
102             ! defined $config{amazon_s3_prefix}) {
103             $config{amazon_s3_prefix}="wiki/";
104         }
105 }
106
107 {
108 my $bucket;
109 sub getbucket {
110         return $bucket if defined $bucket;
111         
112         open(IN, "<", $config{amazon_s3_key_file}) || error($config{amazon_s3_key_file}.": ".$!);
113         my $key=<IN>;
114         chomp $key;
115         close IN;
116
117         my $s3=Net::Amazon::S3->new({
118                 aws_access_key_id => $config{amazon_s3_key_id},
119                 aws_secret_access_key => $key,
120                 retry => 1,
121         });
122
123         # make sure the bucket exists
124         if (exists $config{amazon_s3_location}) {
125                 $bucket=$s3->add_bucket({
126                         bucket => $config{amazon_s3_bucket},
127                         location_constraint => $config{amazon_s3_location},
128                 });
129         }
130         else {
131                 $bucket=$s3->add_bucket({
132                         bucket => $config{amazon_s3_bucket},
133                 });
134         }
135
136         if (! $bucket) {
137                 # Try to use existing bucket.
138                 $bucket=$s3->bucket($config{amazon_s3_bucket});
139         }
140         if (! $bucket) {
141                 error(gettext("Failed to create S3 bucket: ").
142                         $s3->err.": ".$s3->errstr."\n");
143         }
144
145         return $bucket;
146 }
147 }
148
149 # Given a file, return any S3 keys associated with it.
150 sub file2keys ($) {
151         my $file=shift;
152
153         my @keys;
154         if ($file =~ /^\Q$config{destdir}\/\E(.*)/) {
155                 push @keys, $config{amazon_s3_prefix}.$1;
156
157                 # Munge foo/index.html to foo/
158                 if ($keys[0]=~/(^|.*\/)index.$config{htmlext}$/) {
159                         # A duplicate might need to be stored under the
160                         # unmunged name too.
161                         if (!$config{usedirs} || $config{amazon_s3_dupindex}) {
162                                 push @keys, $1;
163                         }
164                         else {
165                                 @keys=($1);
166                         }
167                 }
168         }
169         return @keys;
170 }
171
172 package IkiWiki;
173 use File::MimeInfo;
174 use Encode;
175
176 # This is a wrapper around the real writefile.
177 sub writefile ($$$;$$) {
178         my $file=shift;
179         my $destdir=shift;
180         my $content=shift;
181         my $binary=shift;
182         my $writer=shift;
183
184         # First, write the file to disk.
185         my $ret=$IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::writefile'}->($file, $destdir, $content, $binary, $writer);
186
187         my @keys=IkiWiki::Plugin::amazon_s3::file2keys("$destdir/$file");
188
189         # Store the data in S3.
190         if (@keys) {
191                 my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
192
193                 # The http layer tries to downgrade utf-8
194                 # content, but that can fail (see
195                 # http://rt.cpan.org/Ticket/Display.html?id=35710),
196                 # so force convert it to bytes.
197                 $content=encode_utf8($content) if defined $content;
198
199                 my %opts=(
200                         acl_short => 'public-read',
201                         content_type => mimetype("$destdir/$file"),
202                 );
203
204                 # If there are multiple keys to write, data is sent
205                 # multiple times.
206                 # TODO: investigate using the new copy operation.
207                 #       (It may not be robust enough.)
208                 foreach my $key (@keys) {
209                         my $res;
210                         if (! $writer) {
211                                 $res=$bucket->add_key($key, $content, \%opts);
212                         }
213                         else {
214                                 # This test for empty files is a workaround
215                                 # for this bug:
216                                 # http://rt.cpan.org//Ticket/Display.html?id=35731
217                                 if (-z "$destdir/$file") {
218                                         $res=$bucket->add_key($key, "", \%opts);
219                                 }
220                                 else {
221                                         # read back in the file that the writer emitted
222                                         $res=$bucket->add_key_filename($key, "$destdir/$file", \%opts);
223                                 }
224                         }
225                         if (! $res) {
226                                 error(gettext("Failed to save file to S3: ").
227                                         $bucket->err.": ".$bucket->errstr."\n");
228                         }
229                 }
230         }
231
232         return $ret;
233 }
234
235 # This is a wrapper around the real prune.
236 sub prune ($) {
237         my $file=shift;
238
239         my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file);
240
241         # Prune files out of S3 too.
242         if (@keys) {
243                 my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
244
245                 foreach my $key (@keys) {
246                         my $res=$bucket->delete_key($key);
247                         if (! $res) {
248                                 error(gettext("Failed to delete file from S3: ").
249                                         $bucket->err.": ".$bucket->errstr."\n");
250                         }
251                 }
252         }
253
254         return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file);
255 }
256
257 1