--- lib/Grep/Source.pm 2007/02/23 18:51:40 84 +++ lib/Grep/Source.pm 2007/02/23 20:47:08 85 @@ -10,6 +10,11 @@ use base qw(Class::Accessor); Grep::Source->mk_accessors( qw(feed uri q new_items collection) ); +use HTML::TreeBuilder; +use WWW::Mechanize; +use XML::Feed; +use URI; + use Data::Dump qw/dump/; =head1 NAME @@ -171,4 +176,116 @@ } } +=head2 scrape + +Create semi-complex L rules to scrape page + + +=cut + +sub scrape { + my $self = shift; + + my $args = {@_}; + + warn "scrape got args ",dump($args); + + my ($feed,$uri,$q) = ($self->feed, $self->uri,$self->q); + die "no uri" unless ($uri); + die "feed is not a Grep::Model::Feed but ", ref $feed unless $feed->isa('Grep::Model::Feed'); + + sub save_html { + my ( $file, $content ) = @_; + if ( -w '/tmp/grep' ) { + open(my $f, '>', "/tmp/grep/${file}.html") or die "can't open $file: $!"; + print $f $content or die "can't write to $file: $!"; + close $f or die "can't close $file: $!"; + } + } + + my $mech = WWW::Mechanize->new(); + + $mech->get( $uri ); + + save_html( 'get', $mech->content ); + + if ( $args->{submit_form} ) { + warn "submit form on $uri\n"; + $mech->submit_form( %{ $args->{submit_form} } ) or die "can't submit form"; + save_html( 'submit', $mech->content ); + } + + warn "parse result page\n"; + + my $tree = HTML::TreeBuilder->new or die "can't create html tree"; + $tree->parse( $mech->content ) or die "can't parse fetched content"; + + die "wrapper doesn't have 3 elements but ", $#{ $args->{wrapper} } unless ( $#{ $args->{wrapper} } == 2 ); + my ( $el,$attr,$value ) = @{ $args->{wrapper} }; + + warn "looking for <$el $attr=\"$value\">"; + + my $div = $tree->look_down( '_tag', $el, sub { + warn dump( $_[0]->attr( $attr ) ),$/; + ( $_[0]->attr( $attr ) || '' ) eq $value; + }); + + die "can't find results wrapper <$el $attr=\"$value\">" unless ( $div ); + + my $max = 5; + my $nr = 1; + + my $base_uri = $uri; + $base_uri =~ s!\?.*$!!; + + foreach my $dt ( $div->look_down( '_tag', $args->{results} ) ) { + my $a = $dt->look_down( '_tag', 'a', sub { $_[0]->attr('href') } ); + if ( $a ) { + + my $href = $a->attr('href') or die "can't find href inside <", $args->{results}, ">"; + my $page_uri = URI->new_abs( $a->attr('href'), $base_uri ); + $page_uri->query( undef ); + $page_uri = $page_uri->canonical; + + warn "fetching page: ",$a->as_text," from $page_uri\n"; + if ( $mech->follow_link( url => $a->attr('href') ) ) { + + save_html( "page-${nr}", $mech->content ); + + my $page_tree = HTML::TreeBuilder->new or die "can't create page tree"; + $page_tree->parse( $mech->content ) or die "can't parse page at $page_uri"; + + my ( $el,$attr,$value ) = @{ $args->{scrape} }; + my $div = $page_tree->look_down( '_tag', $el, sub { ( $_[0]->attr( $attr ) || '' ) eq $value } ); + + die "can't find <$el $attr=\"$value\">" unless ($div); + + $self->add_record( + in_feed => $feed, + title => $mech->title, + link => $page_uri, + content => $div->as_HTML, +# summary => +# category => +# author => +# issued => +# modified => + ); + + $mech->back; + $page_tree->delete; + + } else { + warn "can't follow uri $page_uri: $!\n"; + } + } + + last if ($nr == $max); + $nr++; + } + + $tree->delete; # clear memory! + +} + 1;