4 |
|
|
5 |
use Cwd qw/abs_path/; |
use Cwd qw/abs_path/; |
6 |
use File::Temp qw/tempdir/; |
use File::Temp qw/tempdir/; |
|
use Data::Dumper; |
|
7 |
use lib './lib'; |
use lib './lib'; |
8 |
|
|
9 |
use WebPAC::Lookup; |
use WebPAC::Common 0.02; |
10 |
use WebPAC::Input::ISIS; |
use WebPAC::Lookup 0.03; |
11 |
|
use WebPAC::Input 0.11; |
12 |
use WebPAC::Store 0.03; |
use WebPAC::Store 0.03; |
13 |
use WebPAC::Normalize::XML; |
use WebPAC::Normalize 0.11; |
14 |
use WebPAC::Output::TT; |
use WebPAC::Output::TT; |
15 |
use WebPAC::Output::Estraier 0.02; |
use WebPAC::Validate 0.06; |
16 |
use YAML qw/LoadFile/; |
use WebPAC::Output::MARC; |
17 |
use LWP::Simple; |
use WebPAC::Config; |
18 |
|
use Getopt::Long; |
19 |
|
use File::Path; |
20 |
|
use Time::HiRes qw/time/; |
21 |
|
use File::Slurp; |
22 |
|
use Data::Dump qw/dump/; |
23 |
|
use Storable qw/dclone/; |
24 |
|
|
25 |
my $limit = shift @ARGV; |
use Proc::Queue size => 1; |
26 |
|
use POSIX ":sys_wait_h"; # imports WNOHANG |
27 |
|
|
28 |
my $config = LoadFile('conf/config.yml'); |
=head1 NAME |
29 |
|
|
30 |
print "config = ",Dumper($config); |
run.pl - start WebPAC indexing |
31 |
|
|
32 |
die "no databases in config file!\n" unless ($config->{databases}); |
B<this command will probably go away. Don't get used to it!> |
33 |
|
|
34 |
|
Options: |
35 |
|
|
36 |
|
=over 4 |
37 |
|
|
38 |
|
=item --offset 42 |
39 |
|
|
40 |
|
start loading (all) databases at offset 42 |
41 |
|
|
42 |
|
=item --limit 100 |
43 |
|
|
44 |
|
limit loading to 100 records |
45 |
|
|
46 |
|
=item --clean |
47 |
|
|
48 |
|
remove database and Hyper Estraier index before indexing |
49 |
|
|
50 |
|
=item --only=database_name/input_filter |
51 |
|
|
52 |
|
reindex just single database (legacy name is --one) |
53 |
|
|
54 |
|
C</input_filter> is optional part which can be C<name> |
55 |
|
or C<type> from input |
56 |
|
|
57 |
|
=item --config conf/config.yml |
58 |
|
|
59 |
|
path to YAML configuration file |
60 |
|
|
61 |
|
=item --stats |
62 |
|
|
63 |
|
disable indexing, modify_* in configuration and dump statistics about field |
64 |
|
and subfield usage for each input |
65 |
|
|
66 |
|
=item --validate path/to/validation_file |
67 |
|
|
68 |
|
turn on extra validation of imput records, see L<WebPAC::Validation> |
69 |
|
|
70 |
|
=item --marc-normalize conf/normalize/mapping.pl |
71 |
|
|
72 |
|
This option specifies normalisation file for MARC creation |
73 |
|
|
74 |
|
=item --marc-output out/marc/test.marc |
75 |
|
|
76 |
|
Optional path to output file |
77 |
|
|
78 |
|
=item --marc-lint |
79 |
|
|
80 |
|
By default turned on if C<--marc-normalize> is used. You can disable lint |
81 |
|
messages with C<--no-marc-lint>. |
82 |
|
|
83 |
|
=item --marc-dump |
84 |
|
|
85 |
|
Force dump or input and marc record for debugging. |
86 |
|
|
87 |
|
=item --parallel 4 |
88 |
|
|
89 |
|
Run databases in parallel (aproximatly same as number of processors in |
90 |
|
machine if you want to use full load) |
91 |
|
|
92 |
|
=item --only-links |
93 |
|
|
94 |
|
Create just links |
95 |
|
|
96 |
|
=item --merge |
97 |
|
|
98 |
|
Create merged index of databases which have links |
99 |
|
|
100 |
|
=back |
101 |
|
|
102 |
|
=cut |
103 |
|
|
104 |
|
my $offset; |
105 |
|
my $limit; |
106 |
|
|
107 |
|
my $clean = 0; |
108 |
|
my $config_path; |
109 |
|
my $debug = 0; |
110 |
|
my $only_filter; |
111 |
|
my $stats = 0; |
112 |
|
my $validate_path; |
113 |
|
my ($marc_normalize, $marc_output); |
114 |
|
my $marc_lint = 1; |
115 |
|
my $marc_dump = 0; |
116 |
|
my $parallel = 0; |
117 |
|
my $only_links = 0; |
118 |
|
my $merge = 0; |
119 |
|
|
120 |
|
my $log = _new WebPAC::Common()->_get_logger(); |
121 |
|
|
122 |
|
GetOptions( |
123 |
|
"limit=i" => \$limit, |
124 |
|
"offset=i" => \$offset, |
125 |
|
"clean" => \$clean, |
126 |
|
"one=s" => \$only_filter, |
127 |
|
"only=s" => \$only_filter, |
128 |
|
"config" => \$config_path, |
129 |
|
"debug+" => \$debug, |
130 |
|
"stats" => \$stats, |
131 |
|
"validate=s" => \$validate_path, |
132 |
|
"marc-normalize=s" => \$marc_normalize, |
133 |
|
"marc-output=s" => \$marc_output, |
134 |
|
"marc-lint!" => \$marc_lint, |
135 |
|
"marc-dump!" => \$marc_dump, |
136 |
|
"parallel=i" => \$parallel, |
137 |
|
"only-links!" => \$only_links, |
138 |
|
"merge" => \$merge, |
139 |
|
); |
140 |
|
|
141 |
|
my $config = new WebPAC::Config( path => $config_path ); |
142 |
|
|
143 |
|
#print "config = ",dump($config) if ($debug); |
144 |
|
|
145 |
|
die "no databases in config file!\n" unless ($config->databases); |
146 |
|
|
147 |
|
$log->info( "-" x 79 ); |
148 |
|
|
149 |
|
|
150 |
|
my $estcmd_fh; |
151 |
|
my $estcmd_path = './estcmd-merge.sh'; |
152 |
|
if ($merge) { |
153 |
|
open($estcmd_fh, '>', $estcmd_path) || $log->logdie("can't open $estcmd_path: $!"); |
154 |
|
print $estcmd_fh 'cd /data/estraier/_node/ || exit 1',$/; |
155 |
|
print $estcmd_fh 'sudo /etc/init.d/hyperestraier stop',$/; |
156 |
|
$log->info("created merge batch file $estcmd_path"); |
157 |
|
} |
158 |
|
|
159 |
|
|
160 |
|
my $validate; |
161 |
|
$validate = new WebPAC::Validate( |
162 |
|
path => $validate_path, |
163 |
|
) if ($validate_path); |
164 |
|
|
165 |
|
|
166 |
|
my $use_indexer = $config->use_indexer; |
167 |
|
if ($stats) { |
168 |
|
$log->debug("option --stats disables update of indexing engine..."); |
169 |
|
$use_indexer = undef; |
170 |
|
} else { |
171 |
|
$log->info("using $use_indexer indexing engine..."); |
172 |
|
} |
173 |
|
|
174 |
|
# disable indexing when creating marc |
175 |
|
$use_indexer = undef if ($marc_normalize); |
176 |
|
|
177 |
my $total_rows = 0; |
my $total_rows = 0; |
178 |
|
my $start_t = time(); |
179 |
|
|
180 |
while (my ($database, $db_config) = each %{ $config->{databases} }) { |
my @links; |
181 |
|
|
182 |
my $type = lc($db_config->{input}->{type}); |
if ($parallel) { |
183 |
|
$log->info("Using $parallel processes for speedup"); |
184 |
|
Proc::Queue::size($parallel); |
185 |
|
} |
186 |
|
|
187 |
die "I know only how to handle input type isis, not '$type'!\n" unless ($type eq 'isis'); |
while (my ($database, $db_config) = each %{ $config->databases }) { |
188 |
|
|
189 |
my $abs_path = abs_path($0); |
my ($only_database,$only_input) = split(m#/#, $only_filter) if ($only_filter); |
190 |
$abs_path =~ s#/[^/]*$#/#; |
next if ($only_database && $database !~ m/$only_database/i); |
191 |
|
|
192 |
my $lookup = new WebPAC::Lookup( |
if ($parallel) { |
193 |
lookup_file => $db_config->{input}->{lookup}, |
my $f=fork; |
194 |
); |
if(defined ($f) and $f==0) { |
195 |
|
$log->info("Created processes $$ for speedup"); |
196 |
|
} else { |
197 |
|
next; |
198 |
|
} |
199 |
|
} |
200 |
|
|
201 |
my $db_path = $config->{webpac}->{db_path} . '/' . $database; |
my $indexer; |
202 |
|
if ($use_indexer) { |
203 |
|
|
204 |
|
my $cfg_name = $use_indexer; |
205 |
|
$cfg_name =~ s/\-.*$//; |
206 |
|
|
207 |
my $log = $lookup->_get_logger; |
my $indexer_config = $config->get( $cfg_name ) || $log->logdie("can't find '$cfg_name' part in confguration"); |
208 |
$log->info("working on $database in $db_path"); |
$indexer_config->{database} = $database; |
209 |
|
$indexer_config->{clean} = $clean; |
210 |
|
$indexer_config->{label} = $db_config->{name}; |
211 |
|
|
212 |
my $path = './db/'; |
# force clean if database has links |
213 |
|
$indexer_config->{clean} = 1 if ($db_config->{links}); |
214 |
|
|
215 |
my $db = new WebPAC::Store( |
if ($use_indexer eq 'hyperestraier') { |
216 |
path => $db_path, |
|
217 |
); |
# open Hyper Estraier database |
218 |
|
use WebPAC::Output::Estraier '0.10'; |
219 |
|
$indexer = new WebPAC::Output::Estraier( %{ $indexer_config } ); |
220 |
|
|
221 |
|
} elsif ($use_indexer eq 'hyperestraier-native') { |
222 |
|
|
223 |
|
# open Hyper Estraier database |
224 |
|
use WebPAC::Output::EstraierNative; |
225 |
|
$indexer = new WebPAC::Output::EstraierNative( %{ $indexer_config } ); |
226 |
|
|
227 |
|
} elsif ($use_indexer eq 'kinosearch') { |
228 |
|
|
229 |
my $est_config = $config->{hyperestraier} || $log->logdie("can't find 'hyperestraier' part in confguration"); |
# open KinoSearch |
230 |
$est_config->{database} = $database; |
use WebPAC::Output::KinoSearch; |
231 |
|
$indexer_config->{clean} = 1 unless (-e $indexer_config->{index_path}); |
232 |
|
$indexer = new WebPAC::Output::KinoSearch( %{ $indexer_config } ); |
233 |
|
|
234 |
|
} else { |
235 |
|
$log->logdie("unknown use_indexer: $use_indexer"); |
236 |
|
} |
237 |
|
|
238 |
|
$log->logide("can't continue without valid indexer") unless ($indexer); |
239 |
|
} |
240 |
|
|
|
$log->info("using HyperEstraier URL $est_config->{masterurl}"); |
|
241 |
|
|
242 |
my $est = new WebPAC::Output::Estraier( |
# |
243 |
%{ $est_config }, |
# store Hyper Estraier links to other databases |
244 |
|
# |
245 |
|
if (ref($db_config->{links}) eq 'ARRAY' && $use_indexer) { |
246 |
|
foreach my $link (@{ $db_config->{links} }) { |
247 |
|
if ($use_indexer eq 'hyperestraier') { |
248 |
|
if ($merge) { |
249 |
|
print $estcmd_fh 'sudo -u www-data estcmd merge ' . $database . ' ' . $link->{to},$/; |
250 |
|
} else { |
251 |
|
$log->info("saving link $database -> $link->{to} [$link->{credit}]"); |
252 |
|
push @links, sub { |
253 |
|
$log->info("adding link $database -> $link->{to} [$link->{credit}]"); |
254 |
|
$indexer->add_link( |
255 |
|
from => $database, |
256 |
|
to => $link->{to}, |
257 |
|
credit => $link->{credit}, |
258 |
|
); |
259 |
|
}; |
260 |
|
} |
261 |
|
} else { |
262 |
|
$log->warn("NOT IMPLEMENTED WITH $use_indexer: adding link $database -> $link->{to} [$link->{credit}]"); |
263 |
|
} |
264 |
|
} |
265 |
|
} |
266 |
|
next if ($only_links); |
267 |
|
|
268 |
|
|
269 |
|
# |
270 |
|
# now WebPAC::Store |
271 |
|
# |
272 |
|
my $abs_path = abs_path($0); |
273 |
|
$abs_path =~ s#/[^/]*$#/#; |
274 |
|
|
275 |
|
my $db_path = $config->get('webpac')->{db_path} . '/' . $database; |
276 |
|
|
277 |
|
if ($clean) { |
278 |
|
$log->info("creating new database '$database' in $db_path"); |
279 |
|
rmtree( $db_path ) || $log->warn("can't remove $db_path: $!"); |
280 |
|
} else { |
281 |
|
$log->info("working on database '$database' in $db_path"); |
282 |
|
} |
283 |
|
|
284 |
|
my $db = new WebPAC::Store( |
285 |
|
path => $db_path, |
286 |
|
database => $database, |
287 |
|
debug => $debug, |
288 |
); |
); |
289 |
|
|
290 |
|
|
291 |
# |
# |
292 |
# now, iterate through input formats |
# now, iterate through input formats |
293 |
# |
# |
295 |
my @inputs; |
my @inputs; |
296 |
if (ref($db_config->{input}) eq 'ARRAY') { |
if (ref($db_config->{input}) eq 'ARRAY') { |
297 |
@inputs = @{ $db_config->{input} }; |
@inputs = @{ $db_config->{input} }; |
298 |
} else { |
} elsif ($db_config->{input}) { |
299 |
push @inputs, $db_config->{input}; |
push @inputs, $db_config->{input}; |
300 |
|
} else { |
301 |
|
$log->info("database $database doesn't have inputs defined"); |
302 |
} |
} |
303 |
|
|
304 |
foreach my $input (@inputs) { |
foreach my $input (@inputs) { |
|
$log->info("working on input $input->{path} [$input->{type}]"); |
|
305 |
|
|
306 |
my $isis = new WebPAC::Input::ISIS( |
next if ($only_input && ($input->{name} !~ m#$only_input#i && $input->{type} !~ m#$only_input#i)); |
|
code_page => $config->{webpac}->{webpac_encoding}, |
|
|
limit_mfn => $input->{limit}, |
|
|
); |
|
307 |
|
|
308 |
my $maxmfn = $isis->open( |
my $type = lc($input->{type}); |
309 |
filename => $input->{path}, |
|
310 |
code_page => $input->{encoding}, # database encoding |
die "I know only how to handle input types ", join(",", $config->webpac('inputs') ), " not '$type'!\n" unless (grep(/$type/, $config->webpac('inputs'))); |
311 |
|
|
312 |
|
my $lookup; |
313 |
|
if ($input->{lookup}) { |
314 |
|
$lookup = new WebPAC::Lookup( |
315 |
|
lookup_file => $input->{lookup}, |
316 |
|
); |
317 |
|
delete( $input->{lookup} ); |
318 |
|
} |
319 |
|
|
320 |
|
my $input_module = $config->webpac('inputs')->{$type}; |
321 |
|
|
322 |
|
$log->info("working on input '$input->{name}' in $input->{path} [type: $input->{type}] using $input_module", |
323 |
|
$input->{lookup} ? "lookup '$input->{lookup}'" : "" |
324 |
); |
); |
325 |
|
|
326 |
my $n = new WebPAC::Normalize::XML( |
if ($stats) { |
327 |
# filter => { 'foo' => sub { shift } }, |
# disable modification of records if --stats is in use |
328 |
db => $db, |
delete($input->{modify_records}); |
329 |
lookup_regex => $lookup->regex, |
delete($input->{modify_file}); |
330 |
lookup => $lookup, |
} |
331 |
|
|
332 |
|
my $input_db = new WebPAC::Input( |
333 |
|
module => $input_module, |
334 |
|
encoding => $config->webpac('webpac_encoding'), |
335 |
|
limit => $limit || $input->{limit}, |
336 |
|
offset => $offset, |
337 |
|
lookup_coderef => sub { |
338 |
|
my $rec = shift || return; |
339 |
|
$lookup->add( $rec ); |
340 |
|
}, |
341 |
|
recode => $input->{recode}, |
342 |
|
stats => $stats, |
343 |
|
modify_records => $input->{modify_records}, |
344 |
|
modify_file => $input->{modify_file}, |
345 |
); |
); |
346 |
|
$log->logdie("can't create input using $input_module") unless ($input); |
347 |
|
|
348 |
$n->open( |
my $maxmfn = $input_db->open( |
349 |
tag => $input->{normalize}->{tag}, |
path => $input->{path}, |
350 |
xml_file => $input->{normalize}->{path}, |
code_page => $input->{encoding}, # database encoding |
351 |
|
%{ $input }, |
352 |
); |
); |
353 |
|
|
354 |
for ( 0 ... $isis->size ) { |
my $report_fh; |
355 |
|
if ($stats || $validate) { |
356 |
|
my $path = "out/report/" . $database . '-' . $input->{name} . '.txt'; |
357 |
|
open($report_fh, '>', $path) || $log->logdie("can't open $path: $!"); |
358 |
|
|
359 |
|
print $report_fh "Report for database '$database' input '$input->{name}' records ", |
360 |
|
$offset || 1, "-", $limit || $input->{limit} || $maxmfn, "\n\n"; |
361 |
|
$log->info("Generating report file $path"); |
362 |
|
} |
363 |
|
|
364 |
my $row = $isis->fetch || next; |
my @norm_array = ref($input->{normalize}) eq 'ARRAY' ? |
365 |
|
@{ $input->{normalize} } : ( $input->{normalize} ); |
366 |
|
|
367 |
my $mfn = $row->{'000'}->[0] || die "can't find MFN"; |
if ($marc_normalize) { |
368 |
|
@norm_array = ( { |
369 |
|
path => $marc_normalize, |
370 |
|
output => $marc_output || 'out/marc/' . $database . '-' . $input->{name} . '.marc', |
371 |
|
} ); |
372 |
|
} |
373 |
|
|
374 |
my $ds = $n->data_structure($row); |
foreach my $normalize (@norm_array) { |
375 |
|
|
376 |
$est->add( |
my $normalize_path = $normalize->{path} || $log->logdie("can't find normalize path in config"); |
377 |
id => $input->{name} . "#" . $mfn, |
|
378 |
ds => $ds, |
$log->logdie("Found '$normalize_path' as normalization file which isn't supported any more!") unless ( $normalize_path =~ m!\.pl$!i ); |
|
type => $config->{hyperestraier}->{type}, |
|
|
); |
|
379 |
|
|
380 |
$total_rows++; |
my $rules = read_file( $normalize_path ) or die "can't open $normalize_path: $!"; |
381 |
|
|
382 |
|
$log->info("Using $normalize_path for normalization..."); |
383 |
|
|
384 |
|
my $marc = new WebPAC::Output::MARC( |
385 |
|
path => $normalize->{output}, |
386 |
|
lint => $marc_lint, |
387 |
|
dump => $marc_dump, |
388 |
|
) if ($normalize->{output}); |
389 |
|
|
390 |
|
# reset position in database |
391 |
|
$input_db->seek(1); |
392 |
|
|
393 |
|
# generate name of config key for indexer (strip everything after -) |
394 |
|
my $indexer_config = $use_indexer; |
395 |
|
$indexer_config =~ s/^(\w+)-?.*$/$1/g if ($indexer_config); |
396 |
|
|
397 |
|
foreach my $pos ( 0 ... $input_db->size ) { |
398 |
|
|
399 |
|
my $row = $input_db->fetch || next; |
400 |
|
|
401 |
|
my $mfn = $row->{'000'}->[0]; |
402 |
|
|
403 |
|
if (! $mfn || $mfn !~ m#^\d+$#) { |
404 |
|
$log->warn("record $pos doesn't have valid MFN but '$mfn', using $pos"); |
405 |
|
$mfn = $pos; |
406 |
|
push @{ $row->{'000'} }, $pos; |
407 |
|
} |
408 |
|
|
409 |
|
|
410 |
|
if ($validate) { |
411 |
|
if ( my $errors = $validate->validate_errors( $row, $input_db->dump ) ) { |
412 |
|
$log->error( "MFN $mfn validation error:\n", |
413 |
|
$validate->report_error( $errors ) |
414 |
|
); |
415 |
|
} |
416 |
|
} |
417 |
|
|
418 |
|
my $ds_config = dclone($db_config); |
419 |
|
|
420 |
|
# default values -> database key |
421 |
|
$ds_config->{_} = $database; |
422 |
|
|
423 |
|
# current mfn |
424 |
|
$ds_config->{_mfn} = $mfn; |
425 |
|
|
426 |
|
# attach current input |
427 |
|
$ds_config->{input} = $input; |
428 |
|
|
429 |
|
my $ds = WebPAC::Normalize::data_structure( |
430 |
|
row => $row, |
431 |
|
rules => $rules, |
432 |
|
lookup => $lookup ? $lookup->lookup_hash : undef, |
433 |
|
config => $ds_config, |
434 |
|
marc_encoding => 'utf-8', |
435 |
|
); |
436 |
|
|
437 |
|
$db->save_ds( |
438 |
|
id => $mfn, |
439 |
|
ds => $ds, |
440 |
|
prefix => $input->{name}, |
441 |
|
) if ($ds && !$stats); |
442 |
|
|
443 |
|
$indexer->add( |
444 |
|
id => $input->{name} . "/" . $mfn, |
445 |
|
ds => $ds, |
446 |
|
type => $config->get($indexer_config)->{type}, |
447 |
|
) if ($indexer && $ds); |
448 |
|
|
449 |
|
if ($marc) { |
450 |
|
my $i = 0; |
451 |
|
|
452 |
|
while (my $fields = WebPAC::Normalize::_get_marc_fields( fetch_next => 1 ) ) { |
453 |
|
$marc->add( |
454 |
|
id => $mfn . ( $i ? "/$i" : '' ), |
455 |
|
fields => $fields, |
456 |
|
leader => WebPAC::Normalize::marc_leader(), |
457 |
|
row => $row, |
458 |
|
); |
459 |
|
$i++; |
460 |
|
} |
461 |
|
|
462 |
|
$log->info("Created $i instances of MFN $mfn\n") if ($i > 1); |
463 |
|
} |
464 |
|
|
465 |
|
$total_rows++; |
466 |
|
} |
467 |
|
|
468 |
|
if ($validate) { |
469 |
|
my $errors = $validate->report; |
470 |
|
if ($errors) { |
471 |
|
$log->info("validation errors:\n$errors\n" ); |
472 |
|
print $report_fh "$errors\n" if ($report_fh); |
473 |
|
} |
474 |
|
} |
475 |
|
|
476 |
|
if ($stats) { |
477 |
|
my $s = $input_db->stats; |
478 |
|
$log->info("statistics of fields usage:\n$s"); |
479 |
|
print $report_fh "Statistics of fields usage:\n$s" if ($report_fh); |
480 |
|
} |
481 |
|
|
482 |
|
# close MARC file |
483 |
|
$marc->finish if ($marc); |
484 |
|
|
485 |
|
# close report |
486 |
|
close($report_fh) if ($report_fh) |
487 |
} |
} |
488 |
|
|
489 |
}; |
} |
490 |
|
|
491 |
|
eval { $indexer->finish } if ($indexer && $indexer->can('finish')); |
492 |
|
|
493 |
|
my $dt = time() - $start_t; |
494 |
|
$log->info("$total_rows records ", $indexer ? "indexed " : "", |
495 |
|
sprintf("in %.2f sec [%.2f rec/sec]", |
496 |
|
$dt, ($total_rows / $dt) |
497 |
|
) |
498 |
|
); |
499 |
|
|
500 |
|
|
501 |
|
# end forked process |
502 |
|
if ($parallel) { |
503 |
|
$log->info("parallel process $$ finished"); |
504 |
|
exit(0); |
505 |
|
} |
506 |
|
|
|
$log->info("$total_rows records indexed"); |
|
507 |
} |
} |
508 |
|
|
509 |
|
if ($parallel) { |
510 |
|
# wait all children to finish |
511 |
|
sleep(1) while wait != -1; |
512 |
|
$log->info("all parallel processes finished"); |
513 |
|
} |
514 |
|
|
515 |
|
# |
516 |
|
# handle links or merge after indexing |
517 |
|
# |
518 |
|
|
519 |
|
if ($merge) { |
520 |
|
print $estcmd_fh 'sudo /etc/init.d/hyperestraier start',$/; |
521 |
|
close($estcmd_fh); |
522 |
|
chmod 0700, $estcmd_path || $log->warn("can't chmod 0700 $estcmd_path: $!"); |
523 |
|
system $estcmd_path; |
524 |
|
} else { |
525 |
|
foreach my $link (@links) { |
526 |
|
$log->logdie("coderef in link ", Dumper($link), " is ", ref($link), " and not CODE") unless (ref($link) eq 'CODE'); |
527 |
|
$link->(); |
528 |
|
} |
529 |
|
} |