--- trunk/run.pl 2005/11/24 11:47:29 127 +++ trunk/run.pl 2006/09/26 10:23:04 712 @@ -4,105 +4,563 @@ use Cwd qw/abs_path/; use File::Temp qw/tempdir/; -use Data::Dumper; use lib './lib'; -use WebPAC::Lookup; -use WebPAC::Input::ISIS; -use WebPAC::DB 0.02; -use WebPAC::Normalize::XML; +use WebPAC::Common 0.02; +use WebPAC::Parser 0.04; +use WebPAC::Input 0.13; +use WebPAC::Store 0.10; +use WebPAC::Normalize 0.11; use WebPAC::Output::TT; -use WebPAC::Output::Estraier; +use WebPAC::Validate 0.06; +use WebPAC::Output::MARC; +use WebPAC::Config; +use Getopt::Long; +use File::Path; +use Time::HiRes qw/time/; +use File::Slurp; +use Data::Dump qw/dump/; +use Storable qw/dclone/; -my $limit = shift @ARGV; +use Proc::Queue size => 1; +use POSIX ":sys_wait_h"; # imports WNOHANG -my $abs_path = abs_path($0); -$abs_path =~ s#/[^/]*$#/#; +=head1 NAME -my $isis_file = '/data/isis_data/ps/LIBRI/LIBRI'; +run.pl - start WebPAC indexing -my $lookup = new WebPAC::Lookup( - lookup_file => "$abs_path/conf/lookup/isis.pm", -); +B -my $isis = new WebPAC::Input::ISIS( - code_page => 'ISO-8859-2', # application encoding - limit_mfn => $limit, -); +Options: -my $maxmfn = $isis->open( - filename => $isis_file, - code_page => '852', # database encoding -); +=over 4 -my $path = './db/'; +=item --offset 42 -my $db = new WebPAC::DB( - path => $path, -); +start loading (all) databases at offset 42 -my $n = new WebPAC::Normalize::XML( -# filter => { 'foo' => sub { shift } }, - db => $db, - lookup_regex => $lookup->regex, - lookup => $lookup, -); +=item --limit 100 -$n->open( - tag => 'isis', - xml_file => "$abs_path/conf/normalize/isis_ffzg.xml", -); +limit loading to 100 records -my $out = new WebPAC::Output::TT( - include_path => "$abs_path/conf/output/tt", - filters => { foo => sub { shift } }, -); +=item --clean + +remove database and Hyper Estraier index before indexing + +=item --only=database_name/input_filter + +reindex just single database (legacy name is --one) + +C is optional part which can be C +or C from input + +=item --config conf/config.yml + +path to YAML configuration file + +=item --stats + +disable indexing, modify_* in configuration and dump statistics about field +and subfield usage for each input + +=item --validate path/to/validation_file + +turn on extra validation of imput records, see L + +=item --marc-normalize conf/normalize/mapping.pl + +This option specifies normalisation file for MARC creation + +=item --marc-output out/marc/test.marc + +Optional path to output file + +=item --marc-lint + +By default turned on if C<--marc-normalize> is used. You can disable lint +messages with C<--no-marc-lint>. + +=item --marc-dump + +Force dump or input and marc record for debugging. + +=item --parallel 4 + +Run databases in parallel (aproximatly same as number of processors in +machine if you want to use full load) + +=item --only-links -my $est = new WebPAC::Output::Estraier( - url => 'http://localhost:1978/node/webpac2', - user => 'admin', - passwd => 'admin', - database => 'ps', +Create just links + +=item --merge + +Create merged index of databases which have links + +=back + +=cut + +my $offset; +my $limit; + +my $clean = 0; +my $config_path; +my $debug = 0; +my $only_filter; +my $stats = 0; +my $validate_path; +my ($marc_normalize, $marc_output); +my $marc_lint = 1; +my $marc_dump = 0; +my $parallel = 0; +my $only_links = 0; +my $merge = 0; + +my $log = _new WebPAC::Common()->_get_logger(); + +GetOptions( + "limit=i" => \$limit, + "offset=i" => \$offset, + "clean" => \$clean, + "one=s" => \$only_filter, + "only=s" => \$only_filter, + "config" => \$config_path, + "debug+" => \$debug, + "stats" => \$stats, + "validate=s" => \$validate_path, + "marc-normalize=s" => \$marc_normalize, + "marc-output=s" => \$marc_output, + "marc-lint!" => \$marc_lint, + "marc-dump!" => \$marc_dump, + "parallel=i" => \$parallel, + "only-links!" => \$only_links, + "merge" => \$merge, ); -my $total_rows = 0; +my $config = new WebPAC::Config( path => $config_path ); -for ( 0 ... $isis->size ) { +#print "config = ",dump($config) if ($debug); - my $row = $isis->fetch || next; +die "no databases in config file!\n" unless ($config->databases); - my $mfn = $row->{'000'}->[0] || die "can't find MFN"; +$log->info( "-" x 79 ); - my $ds = $n->data_structure($row); -# print STDERR Dumper($row, $ds); +my $estcmd_fh; +my $estcmd_path = './estcmd-merge.sh'; +if ($merge) { + open($estcmd_fh, '>', $estcmd_path) || $log->logdie("can't open $estcmd_path: $!"); + print $estcmd_fh 'cd /data/estraier/_node/ || exit 1',$/; + print $estcmd_fh 'sudo /etc/init.d/hyperestraier stop',$/; + $log->info("created merge batch file $estcmd_path"); +} -# my $html = $out->apply( -# template => 'html_ffzg.tt', -# data => $ds, -# ); -# -# # create test output -# -# my $file = sprintf('out/%02d.html', $mfn ); -# open(my $fh, '>', $file) or die "can't open $file: $!"; -# print $fh $html; -# close($fh); -# -# $html =~ s#\s*[\n\r]+\s*##gs; -# -# print STDERR $html; - $est->add( - id => $mfn, - ds => $ds, - type => 'search', +my $validate; +$validate = new WebPAC::Validate( + path => $validate_path, +) if ($validate_path); + + +my $use_indexer = $config->use_indexer; +if ($stats) { + $log->debug("option --stats disables update of indexing engine..."); + $use_indexer = undef; +} else { + $log->info("using $use_indexer indexing engine..."); +} + +# disable indexing when creating marc +$use_indexer = undef if ($marc_normalize); + +# parse normalize files and create source files for lookup and normalization + +my $parser = new WebPAC::Parser( config => $config ); + +my $total_rows = 0; +my $start_t = time(); + +my @links; + +if ($parallel) { + $log->info("Using $parallel processes for speedup"); + Proc::Queue::size($parallel); +} + +sub create_ds_config { + my ($db_config, $database, $input, $mfn) = @_; + my $c = dclone( $db_config ); + $c->{_} = $database || $log->logconfess("need database"); + $c->{_mfn} = $mfn || $log->logconfess("need mfn"); + $c->{input} = $input || $log->logconfess("need input"); + return $c; +} + +while (my ($database, $db_config) = each %{ $config->databases }) { + + my ($only_database,$only_input) = split(m#/#, $only_filter) if ($only_filter); + next if ($only_database && $database !~ m/$only_database/i); + + if ($parallel) { + my $f=fork; + if(defined ($f) and $f==0) { + $log->info("Created processes $$ for speedup"); + } else { + next; + } + } + + my $indexer; + if ($use_indexer) { + + my $cfg_name = $use_indexer; + $cfg_name =~ s/\-.*$//; + + my $indexer_config = $config->get( $cfg_name ) || $log->logdie("can't find '$cfg_name' part in confguration"); + $indexer_config->{database} = $database; + $indexer_config->{clean} = $clean; + $indexer_config->{label} = $db_config->{name}; + + # force clean if database has links + $indexer_config->{clean} = 1 if ($db_config->{links}); + + if ($use_indexer eq 'hyperestraier') { + + # open Hyper Estraier database + use WebPAC::Output::Estraier '0.10'; + $indexer = new WebPAC::Output::Estraier( %{ $indexer_config } ); + + } elsif ($use_indexer eq 'hyperestraier-native') { + + # open Hyper Estraier database + use WebPAC::Output::EstraierNative; + $indexer = new WebPAC::Output::EstraierNative( %{ $indexer_config } ); + + } elsif ($use_indexer eq 'kinosearch') { + + # open KinoSearch + use WebPAC::Output::KinoSearch; + $indexer_config->{clean} = 1 unless (-e $indexer_config->{index_path}); + $indexer = new WebPAC::Output::KinoSearch( %{ $indexer_config } ); + + } else { + $log->logdie("unknown use_indexer: $use_indexer"); + } + + $log->logide("can't continue without valid indexer") unless ($indexer); + } + + + # + # store Hyper Estraier links to other databases + # + if (ref($db_config->{links}) eq 'ARRAY' && $use_indexer) { + foreach my $link (@{ $db_config->{links} }) { + if ($use_indexer eq 'hyperestraier') { + if ($merge) { + print $estcmd_fh 'sudo -u www-data estcmd merge ' . $database . ' ' . $link->{to},$/; + } else { + $log->info("saving link $database -> $link->{to} [$link->{credit}]"); + push @links, sub { + $log->info("adding link $database -> $link->{to} [$link->{credit}]"); + $indexer->add_link( + from => $database, + to => $link->{to}, + credit => $link->{credit}, + ); + }; + } + } else { + $log->warn("NOT IMPLEMENTED WITH $use_indexer: adding link $database -> $link->{to} [$link->{credit}]"); + } + } + } + next if ($only_links); + + + # + # now WebPAC::Store + # + my $abs_path = abs_path($0); + $abs_path =~ s#/[^/]*$#/#; + + my $db_path = $config->get('webpac')->{db_path} . '/' . $database; + + if ($clean) { + $log->info("creating new database '$database' in $db_path"); + rmtree( $db_path ) || $log->warn("can't remove $db_path: $!"); + } else { + $log->info("working on database '$database' in $db_path"); + } + + my $store = new WebPAC::Store( + path => $db_path, + database => $database, + debug => $debug, + ); + + + # + # now, iterate through input formats + # + + my @inputs; + if (ref($db_config->{input}) eq 'ARRAY') { + @inputs = @{ $db_config->{input} }; + } elsif ($db_config->{input}) { + push @inputs, $db_config->{input}; + } else { + $log->info("database $database doesn't have inputs defined"); + } + + foreach my $input (@inputs) { + + my $input_name = $input->{name} || $log->logdie("input without a name isn't valid: ",dump($input)); + + next if ($only_input && ($input_name !~ m#$only_input#i && $input->{type} !~ m#$only_input#i)); + + my $type = lc($input->{type}); + + die "I know only how to handle input types ", join(",", $config->webpac('inputs') ), " not '$type'!\n" unless (grep(/$type/, $config->webpac('inputs'))); + + my $input_module = $config->webpac('inputs')->{$type}; + + my @lookups = $parser->have_lookup_create($database, $input); + + $log->info("working on input '$input_name' in $input->{path} [type: $input->{type}] using $input_module", + @lookups ? " creating lookups: ".join(", ", @lookups) : "" + ); + + if ($stats) { + # disable modification of records if --stats is in use + delete($input->{modify_records}); + delete($input->{modify_file}); + } + + my $input_db = new WebPAC::Input( + module => $input_module, + encoding => $config->webpac('webpac_encoding'), + limit => $limit || $input->{limit}, + offset => $offset, + recode => $input->{recode}, + stats => $stats, + modify_records => $input->{modify_records}, + modify_file => $input->{modify_file}, + ); + $log->logdie("can't create input using $input_module") unless ($input); + + if (defined( $input->{lookup} )) { + $log->warn("$database/$input_name has depriciated lookup definition, removing it..."); + delete( $input->{lookup} ); + } + + my $lookup; + my $lookup_coderef; + + if (@lookups) { + + my $rules = $parser->lookup_create_rules($database, $input) || $log->logdie("no rules found for $database/$input"); + + $lookup_coderef = sub { + my $rec = shift || die "need rec!"; + my $mfn = $rec->{'000'}->[0] || die "need mfn in 000"; + + WebPAC::Normalize::data_structure( + row => $rec, + rules => $rules, + lookup => $lookup, + config => create_ds_config( $db_config, $database, $input, $mfn ), + ); + + warn "current lookup = ", dump($lookup) if ($lookup); + }; + + WebPAC::Normalize::_set_lookup( undef ); + + $log->debug("created lookup_coderef using:\n$rules"); + + }; + + my $maxmfn = $input_db->open( + path => $input->{path}, + code_page => $input->{encoding}, # database encoding + lookup_coderef => $lookup_coderef, + %{ $input }, + ); + + my $lookup_data = WebPAC::Normalize::_get_lookup(); + + if (defined( $lookup_data->{$database}->{$input_name} )) { + $log->debug("created following lookups: ", dump( $lookup_data )); + + foreach my $key (keys %{ $lookup_data->{$database}->{$input_name} }) { + $store->save_lookup( $database, $input_name, $key, $lookup_data->{$database}->{$input_name}->{$key} ); + } + } + + my $report_fh; + if ($stats || $validate) { + my $path = "out/report/${database}-${input_name}.txt"; + open($report_fh, '>', $path) || $log->logdie("can't open $path: $!"); + + print $report_fh "Report for database '$database' input '$input_name' records ", + $offset || 1, "-", $limit || $input->{limit} || $maxmfn, "\n\n"; + $log->info("Generating report file $path"); + } + + my @norm_array = ref($input->{normalize}) eq 'ARRAY' ? + @{ $input->{normalize} } : ( $input->{normalize} ); + + if ($marc_normalize) { + @norm_array = ( { + path => $marc_normalize, + output => $marc_output || "out/marc/${database}-${input_name}.marc", + } ); + } + + foreach my $normalize (@norm_array) { + + my $normalize_path = $normalize->{path} || $log->logdie("can't find normalize path in config"); + + $log->logdie("Found '$normalize_path' as normalization file which isn't supported any more!") unless ( $normalize_path =~ m!\.pl$!i ); + + my $rules = read_file( $normalize_path ) or die "can't open $normalize_path: $!"; + + $log->info("Using $normalize_path for normalization..."); + + my $marc = new WebPAC::Output::MARC( + path => $normalize->{output}, + lint => $marc_lint, + dump => $marc_dump, + ) if ($normalize->{output}); + + # reset position in database + $input_db->seek(1); + + # generate name of config key for indexer (strip everything after -) + my $indexer_config = $use_indexer; + $indexer_config =~ s/^(\w+)-?.*$/$1/g if ($indexer_config); + + foreach my $pos ( 0 ... $input_db->size ) { + + my $row = $input_db->fetch || next; + + my $mfn = $row->{'000'}->[0]; + + if (! $mfn || $mfn !~ m#^\d+$#) { + $log->warn("record $pos doesn't have valid MFN but '$mfn', using $pos"); + $mfn = $pos; + push @{ $row->{'000'} }, $pos; + } + + + if ($validate) { + if ( my $errors = $validate->validate_errors( $row, $input_db->dump ) ) { + $log->error( "MFN $mfn validation error:\n", + $validate->report_error( $errors ) + ); + } + } + + my $ds = WebPAC::Normalize::data_structure( + row => $row, + rules => $rules, + lookup => $lookup ? $lookup->lookup_hash : undef, + config => create_ds_config( $db_config, $database, $input, $mfn ), + marc_encoding => 'utf-8', + ); + + $store->save_ds( + id => $mfn, + ds => $ds, + prefix => $input_name, + ) if ($ds && !$stats); + + $indexer->add( + id => "${input_name}/${mfn}", + ds => $ds, + type => $config->get($indexer_config)->{type}, + ) if ($indexer && $ds); + + if ($marc) { + my $i = 0; + + while (my $fields = WebPAC::Normalize::_get_marc_fields( fetch_next => 1 ) ) { + $marc->add( + id => $mfn . ( $i ? "/$i" : '' ), + fields => $fields, + leader => WebPAC::Normalize::marc_leader(), + row => $row, + ); + $i++; + } + + $log->info("Created $i instances of MFN $mfn\n") if ($i > 1); + } + + $total_rows++; + } + + if ($validate) { + my $errors = $validate->report; + if ($errors) { + $log->info("validation errors:\n$errors\n" ); + print $report_fh "$errors\n" if ($report_fh); + } + } + + if ($stats) { + my $s = $input_db->stats; + $log->info("statistics of fields usage:\n$s"); + print $report_fh "Statistics of fields usage:\n$s" if ($report_fh); + } + + # close MARC file + $marc->finish if ($marc); + + # close report + close($report_fh) if ($report_fh) + } + + } + + eval { $indexer->finish } if ($indexer && $indexer->can('finish')); + + my $dt = time() - $start_t; + $log->info("$total_rows records ", $indexer ? "indexed " : "", + sprintf("in %.2f sec [%.2f rec/sec]", + $dt, ($total_rows / $dt) + ) ); - $total_rows++; -}; + # end forked process + if ($parallel) { + $log->info("parallel process $$ finished"); + exit(0); + } + +} + +if ($parallel) { + # wait all children to finish + sleep(1) while wait != -1; + $log->info("all parallel processes finished"); +} -my $log = $lookup->_get_logger; +# +# handle links or merge after indexing +# -$log->info("$total_rows records indexed"); +if ($merge) { + print $estcmd_fh 'sudo /etc/init.d/hyperestraier start',$/; + close($estcmd_fh); + chmod 0700, $estcmd_path || $log->warn("can't chmod 0700 $estcmd_path: $!"); + system $estcmd_path; +} else { + foreach my $link (@links) { + $log->logdie("coderef in link ", Dumper($link), " is ", ref($link), " and not CODE") unless (ref($link) eq 'CODE'); + $link->(); + } +}