Leaked source code of windows server 2003
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

291 lines
7.1 KiB

  1. # $Id: RobotUA.pm,v 1.17 2000/04/09 11:21:11 gisle Exp $
  2. package LWP::RobotUA;
  3. require LWP::UserAgent;
  4. @ISA = qw(LWP::UserAgent);
  5. $VERSION = sprintf("%d.%02d", q$Revision: 1.17 $ =~ /(\d+)\.(\d+)/);
  6. require WWW::RobotRules;
  7. require HTTP::Request;
  8. require HTTP::Response;
  9. use Carp ();
  10. use LWP::Debug ();
  11. use HTTP::Status ();
  12. use HTTP::Date qw(time2str);
  13. use strict;
  14. =head1 NAME
  15. LWP::RobotUA - A class for Web Robots
  16. =head1 SYNOPSIS
  17. require LWP::RobotUA;
  18. $ua = new LWP::RobotUA 'my-robot/0.1', '[email protected]';
  19. $ua->delay(10); # be very nice, go slowly
  20. ...
  21. # just use it just like a normal LWP::UserAgent
  22. $res = $ua->request($req);
  23. =head1 DESCRIPTION
  24. This class implements a user agent that is suitable for robot
  25. applications. Robots should be nice to the servers they visit. They
  26. should consult the F</robots.txt> file to ensure that they are welcomed
  27. and they should not make requests too frequently.
  28. But, before you consider writing a robot take a look at
  29. <URL:http://info.webcrawler.com/mak/projects/robots/robots.html>.
  30. When you use a I<LWP::RobotUA> as your user agent, then you do not
  31. really have to think about these things yourself. Just send requests
  32. as you do when you are using a normal I<LWP::UserAgent> and this
  33. special agent will make sure you are nice.
  34. =head1 METHODS
  35. The LWP::RobotUA is a sub-class of LWP::UserAgent and implements the
  36. same methods. In addition the following methods are provided:
  37. =over 4
  38. =cut
  39. #
  40. # Additional attributes in addition to those found in LWP::UserAgent:
  41. #
  42. # $self->{'delay'} Required delay between request to the same
  43. # server in minutes.
  44. #
  45. # $self->{'rules'} A WWW::RobotRules object
  46. #
  47. =item $ua = LWP::RobotUA->new($agent_name, $from, [$rules])
  48. Your robot's name and the mail address of the human responsible for
  49. the robot (i.e. you) are required by the constructor.
  50. Optionally it allows you to specify the I<WWW::RobotRules> object to
  51. use.
  52. =cut
  53. sub new
  54. {
  55. my($class,$name,$from,$rules) = @_;
  56. Carp::croak('LWP::RobotUA name required') unless $name;
  57. Carp::croak('LWP::RobotUA from address required') unless $from;
  58. my $self = new LWP::UserAgent;
  59. $self = bless $self, $class;
  60. $self->{'delay'} = 1; # minutes
  61. $self->{'agent'} = $name;
  62. $self->{'from'} = $from;
  63. $self->{'use_sleep'} = 1;
  64. if ($rules) {
  65. $rules->agent($name);
  66. $self->{'rules'} = $rules;
  67. } else {
  68. $self->{'rules'} = new WWW::RobotRules $name;
  69. }
  70. $self;
  71. }
  72. =item $ua->delay([$minutes])
  73. Set the minimum delay between requests to the same server. The
  74. default is 1 minute.
  75. =item $ua->use_sleep([$boolean])
  76. Get/set a value indicating whether the UA should sleep() if requests
  77. arrive too fast (before $ua->delay minutes has passed). The default is
  78. TRUE. If this value is FALSE then an internal SERVICE_UNAVAILABLE
  79. response will be generated. It will have an Retry-After header that
  80. indicates when it is OK to send another request to this server.
  81. =cut
  82. sub delay { shift->_elem('delay', @_); }
  83. sub use_sleep { shift->_elem('use_sleep', @_); }
  84. sub agent
  85. {
  86. my $self = shift;
  87. my $old = $self->SUPER::agent(@_);
  88. if (@_) {
  89. # Changing our name means to start fresh
  90. $self->{'rules'}->agent($self->{'agent'});
  91. }
  92. $old;
  93. }
  94. =item $ua->rules([$rules])
  95. Set/get which I<WWW::RobotRules> object to use.
  96. =cut
  97. sub rules {
  98. my $self = shift;
  99. my $old = $self->_elem('rules', @_);
  100. $self->{'rules'}->agent($self->{'agent'}) if @_;
  101. $old;
  102. }
  103. =item $ua->no_visits($netloc)
  104. Returns the number of documents fetched from this server host. Yes I
  105. know, this method should probably have been named num_visits() or
  106. something like that. :-(
  107. =cut
  108. sub no_visits
  109. {
  110. my($self, $netloc) = @_;
  111. $self->{'rules'}->no_visits($netloc);
  112. }
  113. *host_count = \&no_visits; # backwards compatibility with LWP-5.02
  114. =item $ua->host_wait($netloc)
  115. Returns the number of seconds (from now) you must wait before you can
  116. make a new request to this host.
  117. =cut
  118. sub host_wait
  119. {
  120. my($self, $netloc) = @_;
  121. return undef unless defined $netloc;
  122. my $last = $self->{'rules'}->last_visit($netloc);
  123. if ($last) {
  124. my $wait = int($self->{'delay'} * 60 - (time - $last));
  125. $wait = 0 if $wait < 0;
  126. return $wait;
  127. }
  128. return 0;
  129. }
  130. sub simple_request
  131. {
  132. my($self, $request, $arg, $size) = @_;
  133. LWP::Debug::trace('()');
  134. # Do we try to access a new server?
  135. my $allowed = $self->{'rules'}->allowed($request->url);
  136. if ($allowed < 0) {
  137. LWP::Debug::debug("Host is not visited before, or robots.txt expired.");
  138. # fetch "robots.txt"
  139. my $robot_url = $request->url->clone;
  140. $robot_url->path("robots.txt");
  141. $robot_url->query(undef);
  142. LWP::Debug::debug("Requesting $robot_url");
  143. # make access to robot.txt legal since this will be a recursive call
  144. $self->{'rules'}->parse($robot_url, "");
  145. my $robot_req = new HTTP::Request 'GET', $robot_url;
  146. my $robot_res = $self->request($robot_req);
  147. my $fresh_until = $robot_res->fresh_until;
  148. if ($robot_res->is_success) {
  149. my $c = $robot_res->content;
  150. if ($robot_res->content_type =~ m,^text/, && $c =~ /Disallow/) {
  151. LWP::Debug::debug("Parsing robot rules");
  152. $self->{'rules'}->parse($robot_url, $c, $fresh_until);
  153. }
  154. else {
  155. LWP::Debug::debug("Ignoring robots.txt");
  156. $self->{'rules'}->parse($robot_url, "", $fresh_until);
  157. }
  158. } else {
  159. LWP::Debug::debug("No robots.txt file found");
  160. $self->{'rules'}->parse($robot_url, "", $fresh_until);
  161. }
  162. # recalculate allowed...
  163. $allowed = $self->{'rules'}->allowed($request->url);
  164. }
  165. # Check rules
  166. unless ($allowed) {
  167. return new HTTP::Response
  168. &HTTP::Status::RC_FORBIDDEN, 'Forbidden by robots.txt';
  169. }
  170. my $netloc = $request->url->host_port;
  171. my $wait = $self->host_wait($netloc);
  172. if ($wait) {
  173. LWP::Debug::debug("Must wait $wait seconds");
  174. if ($self->{'use_sleep'}) {
  175. sleep($wait)
  176. } else {
  177. my $res = new HTTP::Response
  178. &HTTP::Status::RC_SERVICE_UNAVAILABLE, 'Please, slow down';
  179. $res->header('Retry-After', time2str(time + $wait));
  180. return $res;
  181. }
  182. }
  183. # Perform the request
  184. my $res = $self->SUPER::simple_request($request, $arg, $size);
  185. $self->{'rules'}->visit($netloc);
  186. $res;
  187. }
  188. =item $ua->as_string
  189. Returns a string that describes the state of the UA.
  190. Mainly useful for debugging.
  191. =cut
  192. sub as_string
  193. {
  194. my $self = shift;
  195. my @s;
  196. push(@s, "Robot: $self->{'agent'} operated by $self->{'from'} [$self]");
  197. push(@s, " Minimum delay: " . int($self->{'delay'}*60) . "s");
  198. push(@s, " Will sleep if too early") if $self->{'use_sleep'};
  199. push(@s, " Rules = $self->{'rules'}");
  200. join("\n", @s, '');
  201. }
  202. 1;
  203. =back
  204. =head1 SEE ALSO
  205. L<LWP::UserAgent>, L<WWW::RobotRules>
  206. =head1 COPYRIGHT
  207. Copyright 1996-2000 Gisle Aas.
  208. This library is free software; you can redistribute it and/or
  209. modify it under the same terms as Perl itself.
  210. =cut