#!/usr/local/bin/perl
# File: find_bad_links.pl
use LWP::UserAgent;
use HTML::LinkExtor;
use URI::URL;
use WWW::RobotRules;
%CAN_HANDLE = ('http'=>1,
'gopher'=>1,
# 'ftp'=>1, # timeout problems?
);
%OUTCOME = ();
$CHECKED = $BAD = 0;
@BAD = ();
my $URL = shift;
$UA = new LWP::UserAgent;
$PARSER = HTML::LinkExtor->new();
$TOP = $UA->request(HTTP::Request->new(HEAD => $URL));
$BASE = $TOP->base;
# handle robot rules
my $robots = URI::URL->new('robots.txt',$BASE->scheme.'://'.$BASE->netloc);
my $robots_text = $UA->request(HTTP::Request->new(GET=>$robots))->content;
$ROBOTRULES = WWW::RobotRules->new;
$ROBOTRULES->parse($robots->abs,$robots_text);
check_links(URI::URL->new($TOP->request->url));
if (@BAD) {
print "\nBAD LINKS:\n";
print join("\n",@BAD),"\n\n";
}
print "$CHECKED documents checked\n",scalar(@BAD)," bad links\n";
sub check_links {
my $url = shift;
my $fixed_url = $url;
$fixed_url =~ s/\#.+$//;
return 1 unless $CAN_HANDLE{$url->scheme};
# check cached outcomes
return $OUTCOME{$fixed_url} if exists $OUTCOME{$fixed_url};
print STDERR "checking $fixed_url...\n";
$CHECKED++;
my $rel = $url->rel($BASE) || 'index.html';
my $child = is_child($BASE,$url);
$UA->timeout(5);
my $doc = $d = $UA->request(HTTP::Request->new(($child ? 'GET' : 'HEAD' )=>$url));
$OUTCOME{$fixed_url} = $doc->is_success;
return $OUTCOME{$fixed_url}
unless $ROBOTRULES->allowed($fixed_url)
&& $child && $doc->header('Content-type') eq 'text/html';
# Follow HTML documents
my $base = $doc->base;
# pull out the links and call us recursively
my @links = $PARSER->parse($doc->content)->links;
my @hrefs = map { url($_->[2],$base)->abs } @links;
foreach (@hrefs) {
next if check_links($_);
push (@BAD,"$rel : $_");
}
1;
}
sub is_child {
my ($base,$url) = @_;
my $rel = $url->rel($base);
return ($rel ne $url) && ($rel !~ m!^[/.]!);
}
|