root/trunk/plagger/lib/Plagger/UserAgent.pm

Revision 1686 (checked in by miyagawa, 14 years ago)
  • Property svn:keywords set to Id Revision
Line 
1 package Plagger::UserAgent;
2 use strict;
3 use base qw( LWP::UserAgent );
4
5 use Carp;
6 use Plagger::Cookies;
7 use Plagger::FeedParser;
8 use URI::Fetch 0.06;
9 use Scalar::Util qw(blessed);
10
11 sub new {
12     my $class = shift;
13     my $self  = $class->SUPER::new(@_);
14        $self->parse_head(0);
15
16     my $conf = Plagger->context ? Plagger->context->conf->{user_agent} : {};
17     if ($conf->{cookies}) {
18         $self->cookie_jar( Plagger::Cookies->create($conf->{cookies}) );
19     }
20
21     $self->agent( $conf->{agent} || "Plagger/$Plagger::VERSION (http://plagger.org/)" );
22     $self->timeout( $conf->{timeout} || 15 );
23     $self->env_proxy();
24
25     if (Plagger->context) {
26         Plagger->context->run_hook('useragent.init', { ua => $self });
27     }
28
29     $self;
30 }
31
32 sub fetch {
33     my($self, $url, $plugin, $opt) = @_;
34
35     my $res = URI::Fetch->fetch($url,
36         UserAgent => $self,
37         $plugin ? (Cache => $plugin->cache) : (),
38         ForceResponse => 1,
39         ($opt ? %$opt : ()),
40     );
41
42     if ($res && $url =~ m!^file://!) {
43         $res->content_type( Plagger::Util::mime_type_of(URI->new($url)) );
44     }
45
46     $res;
47 }
48
49 sub request {
50     my $self = shift;
51     my($req) = @_;
52     if (Plagger->context) {
53         Plagger->context->run_hook('useragent.request', { ua => $self, url => $req->uri, req => $req });
54     }
55     $self->SUPER::request(@_);
56 }
57
58 sub mirror {
59     my($self, $request, $file) = @_;
60
61     unless (blessed($request) && $request->isa('HTTP::Request') ) {
62         return $self->SUPER::mirror($request, $file);
63     }
64
65     # below is copied from LWP::UserAgent
66     if (-e $file) {
67         my($mtime) = (stat($file))[9];
68         if($mtime) {
69             $request->header('If-Modified-Since' =>
70                              HTTP::Date::time2str($mtime));
71         }
72     }
73     my $tmpfile = "$file-$$";
74
75     my $response = $self->request($request, $tmpfile);
76     if ($response->is_success) {
77
78         my $file_length = (stat($tmpfile))[7];
79         my($content_length) = $response->header('Content-length');
80
81         if (defined $content_length and $file_length < $content_length) {
82             unlink($tmpfile);
83             die "Transfer truncated: " .
84                 "only $file_length out of $content_length bytes received\n";
85         }
86         elsif (defined $content_length and $file_length > $content_length) {
87             unlink($tmpfile);
88             die "Content-length mismatch: " .
89                 "expected $content_length bytes, got $file_length\n";
90         }
91         else {
92             # OK
93             if (-e $file) {
94                 # Some dosish systems fail to rename if the target exists
95                 chmod 0777, $file;
96                 unlink $file;
97             }
98             rename($tmpfile, $file) or
99                 die "Cannot rename '$tmpfile' to '$file': $!\n";
100
101             if (my $lm = $response->last_modified) {
102                 # make sure the file has the same last modification time
103                 utime $lm, $lm, $file;
104             }
105         }
106     }
107     else {
108         unlink($tmpfile);
109     }
110     return $response;
111 }
112
113 sub find_parse {
114     my($self, $url) = @_;
115     $url = URI->new($url) unless ref $url;
116
117     my $response = $self->fetch($url);
118     if ($response->is_error) {
119         Carp::croak("Error fetching $url: ", $response->http_status);
120     }
121
122     my $feed_url = Plagger::FeedParser->discover($response);
123     if ($url eq $feed_url) {
124         return Plagger::FeedParser->parse(\$response->content);
125     } elsif ($feed_url) {
126         return $self->fetch_parse($feed_url);
127     } else {
128         Carp::croak("Can't find feed from $url");
129     }
130 }
131
132 sub fetch_parse {
133     my($self, $url) = @_;
134     $url = URI->new($url) unless ref $url;
135
136     my $response = $self->fetch($url);
137     if ($response->is_error) {
138         Carp::croak("Error fetching $url: ", $response->http_status);
139     }
140
141     Plagger::FeedParser->parse(\$response->content);
142 }
143
144 1;
145
Note: See TracBrowser for help on using the browser.