--- lib/Grep/Source.pm 2007/02/23 17:16:33 74 +++ lib/Grep/Source.pm 2007/02/24 11:16:05 92 @@ -10,6 +10,11 @@ use base qw(Class::Accessor); Grep::Source->mk_accessors( qw(feed uri q new_items collection) ); +use HTML::TreeBuilder; +use WWW::Mechanize; +use XML::Feed; +use URI; + use Data::Dump qw/dump/; =head1 NAME @@ -158,10 +163,150 @@ foreach my $s ( $self->sources ) { Jifty->log->debug("testing source class $s"); - if ($s->can('content_have') && $s->content_have( $content ) ) { - Jifty->log->debug("${s}->content_have succesful"); - return "$s"; + if ( $s->can('content_have') ) { + my $regex = $s->content_have( $content ) or + die "${s}->content_have didn't return anything"; + die "${s}->content_have didn't return regex but ", dump( $regex ), " ref ", ref( $regex ) + unless ( ref($regex) eq 'Regexp' ); + if ( $content =~ $regex ) { + Jifty->log->debug("${s}->content_have succesful"); + return $s; + } + } + } +} + +=head2 scrape + +Create semi-complex L rules to scrape page + + +=cut + +sub scrape { + my $self = shift; + + my $args = {@_}; + + warn "scrape got args ",dump($args); + + my ($feed,$uri,$q) = ($self->feed, $self->uri,$self->q); + die "no uri" unless ($uri); + die "feed is not a Grep::Model::Feed but ", ref $feed unless $feed->isa('Grep::Model::Feed'); + + sub mech_warn { + my $m = shift || return; + warn $m; + } + + my $mech = WWW::Mechanize->new( + cookie_jar => {}, + onwarn => \&mech_warn, + onerror => \&mech_warn, + ); + + $mech->get( $uri ); + + $self->save( 'get.html', $mech->content ); + + if ( my $form = $args->{submit_form} ) { + warn "submit form on $uri with ", dump( $form ),"\n"; + $mech->submit_form( %$form ) or die "can't submit form ", dump( $form ); + $self->save( 'submit.html', $mech->content ); + } + + warn "parse result page\n"; + + my $tree = HTML::TreeBuilder->new or die "can't create html tree"; + $tree->parse( $mech->content ) or die "can't parse fetched content"; + + die "wrapper doesn't have 3 elements but ", $#{ $args->{wrapper} } unless ( $#{ $args->{wrapper} } == 2 ); + my ( $el,$attr,$value ) = @{ $args->{wrapper} }; + + warn "looking for <$el $attr=\"$value\">"; + + my $div = $tree->look_down( '_tag', $el, sub { + warn dump( $_[0]->attr( $attr ) ),$/; + ( $_[0]->attr( $attr ) || '' ) eq $value; + }); + + if ( ! $div ) { + warn "can't find results wrapper <$el $attr=\"$value\">"; + return; + } + + my $max = 5; + my $nr = 1; + + my $base_uri = $uri; + $base_uri =~ s!\?.*$!!; + + foreach my $dt ( $div->look_down( '_tag', $args->{results} ) ) { + my $a = $dt->look_down( '_tag', 'a', sub { $_[0]->attr('href') } ); + if ( $a ) { + + my $href = $a->attr('href') or die "can't find href inside <", $args->{results}, ">"; + my $page_uri = URI->new_abs( $a->attr('href'), $base_uri ); + $page_uri->query( undef ); + $page_uri = $page_uri->canonical; + + warn "fetching page: ",$a->as_text," from $page_uri\n"; + if ( $mech->follow_link( url => $a->attr('href') ) ) { + + $self->save( "page-${nr}.html", $mech->content ); + + my $page_tree = HTML::TreeBuilder->new or die "can't create page tree"; + $page_tree->parse( $mech->content ) or die "can't parse page at $page_uri"; + + ( $el,$attr,$value ) = @{ $args->{scrape} }; + $div = $page_tree->look_down( '_tag', $el, sub { ( $_[0]->attr( $attr ) || '' ) eq $value } ); + + die "can't find <$el $attr=\"$value\">" unless ($div); + + $self->add_record( + in_feed => $feed, + title => $mech->title, + link => $page_uri, + content => $div->as_HTML, +# summary => +# category => +# author => +# issued => +# modified => + ); + + $mech->back; + $page_tree->delete; + + } else { + warn "can't follow uri $page_uri: $!\n"; + } } + + last if ($nr == $max); + $nr++; + } + + $tree->delete; # clear memory! + +} + +=head2 save + + save( 'name', $content ); + +Save dumps into C if writable + +=cut + +sub save { + my $self = shift; + my ( $file, $content ) = @_; + if ( -w '/tmp/grep' ) { + open(my $f, '>', "/tmp/grep/$file") or die "can't open $file: $!"; + print $f $content or die "can't write to $file: $!"; + close $f or die "can't close $file: $!"; + Jifty->log->debug("saved $file ",length($content)," bytes"); } }