source: main/trunk/greenstone2/perllib/plugins/DirectoryPlugin.pm@ 28489

Last change on this file since 28489 was 28489, checked in by davidb, 11 years ago

Support for Cygwin added

  • Property svn:executable set to *
  • Property svn:keywords set to Author Date Id Revision
File size: 21.9 KB
RevLine 
[537]1###########################################################################
2#
[15870]3# DirectoryPlugin.pm --
[537]4# A component of the Greenstone digital library software
5# from the New Zealand Digital Library Project at the
6# University of Waikato, New Zealand.
7#
8# Copyright (C) 1999 New Zealand Digital Library Project
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation; either version 2 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License
21# along with this program; if not, write to the Free Software
22# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23#
24###########################################################################
25
[15870]26# DirectoryPlugin is a plugin which recurses through directories processing
27# each file it finds - which basically means passing it down the plugin
28# pipeline
[4]29
[15870]30package DirectoryPlugin;
[2228]31
[24951]32use extrametautil;
[17738]33use PrintInfo;
[4]34use plugin;
[136]35use util;
[27306]36use FileUtils;
[13188]37use metadatautil;
[4]38
[8737]39use File::Basename;
[10254]40use strict;
41no strict 'refs';
[15870]42no strict 'subs';
43
[13545]44use Encode;
[4]45
46BEGIN {
[17738]47 @DirectoryPlugin::ISA = ('PrintInfo');
[4]48}
49
[4744]50my $arguments =
51 [ { 'name' => "block_exp",
[15870]52 'desc' => "{BasePlugin.block_exp}",
[6408]53 'type' => "regexp",
[4744]54 'deft' => &get_default_block_exp(),
55 'reqd' => "no" },
[13417]56 # this option has been deprecated. leave it here for now so we can warn people not to use it
[4744]57 { 'name' => "use_metadata_files",
[15870]58 'desc' => "{DirectoryPlugin.use_metadata_files}",
[4744]59 'type' => "flag",
[13188]60 'reqd' => "no",
61 'hiddengli' => "yes" },
[7686]62 { 'name' => "recheck_directories",
[15870]63 'desc' => "{DirectoryPlugin.recheck_directories}",
[7686]64 'type' => "flag",
[4744]65 'reqd' => "no" } ];
[13188]66
[15870]67my $options = { 'name' => "DirectoryPlugin",
68 'desc' => "{DirectoryPlugin.desc}",
[6408]69 'abstract' => "no",
[4744]70 'inherits' => "yes",
71 'args' => $arguments };
[3540]72
[4]73sub new {
[10218]74 my ($class) = shift (@_);
75 my ($pluginlist,$inputargs,$hashArgOptLists) = @_;
76 push(@$pluginlist, $class);
[3540]77
[15870]78 push(@{$hashArgOptLists->{"ArgList"}},@{$arguments});
79 push(@{$hashArgOptLists->{"OptList"}},$options);
[10218]80
[17738]81 my $self = new PrintInfo($pluginlist, $inputargs, $hashArgOptLists);
[13188]82
83 if ($self->{'info_only'}) {
84 # don't worry about any options or initialisations etc
85 return bless $self, $class;
86 }
87
88 # we have left this option in so we can warn people who are still using it
[2813]89 if ($self->{'use_metadata_files'}) {
[15870]90 die "ERROR: DirectoryPlugin -use_metadata_files option has been deprecated. Please remove the option and add MetadataXMLPlug to your plugin list instead!\n";
[2813]91 }
[16391]92
[17738]93 $self->{'num_processed'} = 0;
94 $self->{'num_not_processed'} = 0;
95 $self->{'num_blocked'} = 0;
96 $self->{'num_archives'} = 0;
97
[8737]98 $self->{'subdir_extrametakeys'} = {};
99
[4]100 return bless $self, $class;
101}
102
[17738]103# called once, at the start of processing
104sub init {
105 my $self = shift (@_);
106 my ($verbosity, $outhandle, $failhandle) = @_;
107
108 # verbosity is passed through from the processor
109 $self->{'verbosity'} = $verbosity;
110
111 # as are the outhandle and failhandle
112 $self->{'outhandle'} = $outhandle if defined $outhandle;
113 $self->{'failhandle'} = $failhandle;
114
115}
116
117# called once, after all passes have finished
118sub deinit {
119 my ($self) = @_;
120
121}
122
123# called at the beginning of each plugin pass (import has one, building has many)
[10156]124sub begin {
125 my $self = shift (@_);
126 my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
127
[21586]128 # Only lookup timestamp info for import.pl, and only if incremental is set
[10156]129 my $proc_package_name = ref $processor;
[12969]130 if ($proc_package_name !~ /buildproc$/ && $self->{'incremental'} == 1) {
[21586]131 # Get the infodbtype value for this collection from the arcinfo object
132 my $infodbtype = $processor->getoutputinfo()->{'infodbtype'};
[23171]133 $infodbtype = "gdbm" if $infodbtype eq "gdbm-txtgz"; # in archives, cannot use txtgz version
[10156]134 my $output_dir = $processor->getoutputdir();
[21586]135 my $archives_inf = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $output_dir);
[18441]136
[10156]137 if ( -e $archives_inf ) {
138 $self->{'inf_timestamp'} = -M $archives_inf;
139 }
140 }
141}
142
[21308]143sub remove_all {
[21285]144 my $self = shift (@_);
145 my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
146}
[21308]147
[21315]148
[21308]149sub remove_one {
150 my $self = shift (@_);
[21315]151 my ($file, $oids, $archivedir) = @_;
152 return undef; # this will never be called for directories (will it??)
153
[21308]154}
155
156
[17738]157# called at the end of each plugin pass
[15870]158sub end {
[17738]159 my ($self) = shift (@_);
[10156]160
[15870]161}
162
163
[17738]164
[4]165# return 1 if this class might recurse using $pluginfo
166sub is_recursive {
167 my $self = shift (@_);
[2813]168
[4]169 return 1;
170}
171
[2228]172sub get_default_block_exp {
173 my $self = shift (@_);
[2813]174
[26867]175 return '(?i)(CVS|\.svn|Thumbs\.db|OIDcount|\.DS_Store|~)$';
[2228]176}
177
[16391]178sub check_directory_path {
[2228]179
[16391]180 my $self = shift(@_);
181 my ($dirname) = @_;
[2813]182
[2228]183 return undef unless (-d $dirname);
[16391]184
[2228]185 return 0 if ($self->{'block_exp'} ne "" && $dirname =~ /$self->{'block_exp'}/);
[7932]186
[16391]187 my $outhandle = $self->{'outhandle'};
188
[2228]189 # check to make sure we're not reading the archives or index directory
190 my $gsdlhome = quotemeta($ENV{'GSDLHOME'});
[2813]191 if ($dirname =~ m/^$gsdlhome\/.*?\/import.*?\/(archives|index)$/) {
[15870]192 print $outhandle "DirectoryPlugin: $dirname appears to be a reference to a Greenstone collection, skipping.\n";
[2228]193 return 0;
[1755]194 }
[2813]195
[1755]196 # check to see we haven't got a cyclic path...
197 if ($dirname =~ m%(/.*){,41}%) {
[15870]198 print $outhandle "DirectoryPlugin: $dirname is 40 directories deep, is this a recursive path? if not increase constant in DirectoryPlugin.pm.\n";
[2228]199 return 0;
[1755]200 }
[2813]201
[1755]202 # check to see we haven't got a cyclic path...
203 if ($dirname =~ m%.*?import/(.+?)/import/\1.*%) {
[15870]204 print $outhandle "DirectoryPlugin: $dirname appears to be in a recursive loop...\n";
[2228]205 return 0;
[1755]206 }
[16391]207
208 return 1;
209}
210
211# this may be called more than once
212sub sort_out_associated_files {
213
214 my $self = shift (@_);
215 my ($block_hash) = @_;
216 if (!scalar (keys %{$block_hash->{'shared_fileroot'}})) {
217 return;
218 }
219
220 $self->{'assocfile_info'} = {} unless defined $self->{'assocfile_info'};
221 my $metadata = $self->{'assocfile_info'};
222 foreach my $prefix (keys %{$block_hash->{'shared_fileroot'}}) {
223 my $record = $block_hash->{'shared_fileroot'}->{$prefix};
224
225 my $tie_to = $record->{'tie_to'};
226 my $exts = $record->{'exts'};
227
228 if ((defined $tie_to) && (scalar (keys %$exts) > 0)) {
229 # set up fileblocks and assocfile_tobe
230 my $base_file = "$prefix$tie_to";
231 $metadata->{$base_file} = {} unless defined $metadata->{$base_file};
232 my $base_file_metadata = $metadata->{$base_file};
233
234 $base_file_metadata->{'gsdlassocfile_tobe'} = [] unless defined $base_file_metadata->{'gsdlassocfile_tobe'};
235 my $assoc_tobe = $base_file_metadata->{'gsdlassocfile_tobe'};
236 foreach my $e (keys %$exts) {
237 # block the file
[23561]238 &util::block_filename($block_hash,"$prefix$e");
[16391]239 # set up as an associatd file
240 print STDERR " $self->{'plugin_type'}: Associating $prefix$e with $tie_to version\n";
241 my $mime_type = ""; # let system auto detect this
242 push(@$assoc_tobe,"$prefix$e:$mime_type:");
243
244 }
245 }
246 } # foreach record
247
248 $block_hash->{'shared_fileroot'} = undef;
249 $block_hash->{'shared_fileroot'} = {};
250
251}
252
253
254# do block exp OR special blocking ???
255
256sub file_is_blocked {
257 my $self = shift (@_);
258 my ($block_hash, $filename_full_path) = @_;
259
[23363]260 $filename_full_path = &util::upgrade_if_dos_filename($filename_full_path);
261
[28489]262 if (($ENV{'GSDLOS'} =~ m/^windows$/) && ($^O ne "cygwin")) {
[23544]263 # on windows, all block paths are lowercased.
264 my $lower_filename = lc ($filename_full_path);
265 if (defined $block_hash->{'file_blocks'}->{$lower_filename}) {
266 $self->{'num_blocked'} ++;
267 return 1;
268 }
[16391]269 }
[23544]270 else {
271 if (defined $block_hash->{'file_blocks'}->{$filename_full_path}) {
272 $self->{'num_blocked'} ++;
273 return 1;
274 }
275 }
[16391]276 # check Directory plugin's own block_exp
277 if ($self->{'block_exp'} ne "" && $filename_full_path =~ /$self->{'block_exp'}/) {
278 $self->{'num_blocked'} ++;
279 return 1; # blocked
280 }
281 return 0;
282}
283
284
285
286sub file_block_read {
287 my $self = shift (@_);
288 my ($pluginfo, $base_dir, $file, $block_hash, $metadata, $gli) = @_;
289
290 my $outhandle = $self->{'outhandle'};
291 my $verbosity = $self->{'verbosity'};
[2813]292
[16391]293 # Calculate the directory name and ensure it is a directory and
294 # that it is not explicitly blocked.
295 my $dirname = $file;
[27306]296 $dirname = &FileUtils::filenameConcatenate($base_dir, $file) if $base_dir =~ /\w/;
[16391]297
298 my $directory_ok = $self->check_directory_path($dirname);
299 return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
300
[18523]301 print $outhandle "Global file scan checking directory: $dirname\n";
302
[18441]303 $block_hash->{'all_files'} = {} unless defined $block_hash->{'all_files'};
[20577]304 $block_hash->{'metadata_files'} = {} unless defined $block_hash->{'metadata_files'};
[18441]305
[16391]306 $block_hash->{'file_blocks'} = {} unless defined $block_hash->{'file_blocks'};
307 $block_hash->{'shared_fileroot'} = {} unless defined $block_hash->{'shared_fileroot'};
308
309 # Recur over directory contents.
310 my (@dir, $subfile);
311 #my $count = 0;
312
313 print $outhandle "DirectoryPlugin block: getting directory $dirname\n" if ($verbosity > 2);
314
315 # find all the files in the directory
316 if (!opendir (DIR, $dirname)) {
317 if ($gli) {
318 print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
319 }
320 print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
321 return -1; # error in processing
322 }
[27578]323 @dir = sort readdir (DIR);
[16391]324 closedir (DIR);
325
326 for (my $i = 0; $i < scalar(@dir); $i++) {
[23335]327 my $raw_subfile = $dir[$i];
328 next if ($raw_subfile =~ m/^\.\.?$/);
329
[16391]330 my $this_file_base_dir = $base_dir;
[27306]331 my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
[16391]332
[23335]333 # Recursively read each $raw_subfile
334 print $outhandle "DirectoryPlugin block recurring: $raw_file_subfile\n" if ($verbosity > 2);
[16391]335
336 #$count += &plugin::file_block_read ($pluginfo, $this_file_base_dir,
[23335]337
[16391]338 &plugin::file_block_read ($pluginfo, $this_file_base_dir,
[23335]339 $raw_file_subfile,
[16391]340 $block_hash, $metadata, $gli);
341
342 }
343 $self->sort_out_associated_files($block_hash);
344 #return $count;
[23419]345 return 1;
[16391]346
347}
[17738]348
349# We don't do metadata_read
350sub metadata_read {
351 my $self = shift (@_);
[19493]352 my ($pluginfo, $base_dir, $file, $block_hash,
353 $extrametakeys, $extrametadata, $extrametafile,
[23212]354 $processor, $gli, $aux) = @_;
[17738]355
356 return undef;
357}
358
359
[16391]360# return number of files processed, undef if can't process
361# Note that $base_dir might be "" and that $file might
362# include directories
363
364# This function passes around metadata hash structures. Metadata hash
365# structures are hashes that map from a (scalar) key (the metadata element
366# name) to either a scalar metadata value or a reference to an array of
367# such values.
368
369sub read {
370 my $self = shift (@_);
371 my ($pluginfo, $base_dir, $file, $block_hash, $in_metadata, $processor, $maxdocs, $total_count, $gli) = @_;
[17320]372
[16391]373 my $outhandle = $self->{'outhandle'};
374 my $verbosity = $self->{'verbosity'};
[24349]375
[16391]376 # Calculate the directory name and ensure it is a directory and
377 # that it is not explicitly blocked.
378 my $dirname;
379 if ($file eq "") {
380 $dirname = $base_dir;
381 } else {
382 $dirname = $file;
[27306]383 $dirname = &FileUtils::filenameConcatenate($base_dir, $file) if $base_dir =~ /\w/;
[16391]384 }
[24932]385
[16391]386 my $directory_ok = $self->check_directory_path($dirname);
387 return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
388
[2228]389 if (($verbosity > 2) && ((scalar keys %$in_metadata) > 0)) {
[15870]390 print $outhandle "DirectoryPlugin: metadata passed in: ",
[2813]391 join(", ", keys %$in_metadata), "\n";
[2228]392 }
[2813]393
[16391]394
[2228]395 # Recur over directory contents.
396 my (@dir, $subfile);
[6332]397
[16391]398 print $outhandle "DirectoryPlugin read: getting directory $dirname\n" if ($verbosity > 2);
[2813]399
[2228]400 # find all the files in the directory
401 if (!opendir (DIR, $dirname)) {
[9584]402 if ($gli) {
403 print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
404 }
[15870]405 print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
[7362]406 return -1; # error in processing
[2228]407 }
[27578]408 @dir = sort readdir (DIR);
[23335]409 map { $_ = &unicode::raw_filename_to_url_encoded($_) } @dir;
[2228]410 closedir (DIR);
[7686]411
412 # Re-order the files in the list so any directories ending with .all are moved to the end
[8716]413 for (my $i = scalar(@dir) - 1; $i >= 0; $i--) {
[27306]414 if (-d &FileUtils::filenameConcatenate($dirname, $dir[$i]) && $dir[$i] =~ /\.all$/) {
[7686]415 push(@dir, splice(@dir, $i, 1));
416 }
417 }
418
[13188]419 # setup the metadata structures. we do a metadata_read pass to see if there is any additional metadata, then pass it to read
420
[2228]421 my $additionalmetadata = 0; # is there extra metadata available?
422 my %extrametadata; # maps from filespec to extra metadata keys
[19493]423 my %extrametafile; # maps from filespec to the metadata.xml (or similar) file it came from
[2228]424 my @extrametakeys; # keys of %extrametadata in order read
[8512]425
[16391]426
[24932]427 my $os_dirsep = &util::get_os_dirsep();
[8737]428 my $dirsep = &util::get_dirsep();
[11919]429 my $base_dir_regexp = $base_dir;
430 $base_dir_regexp =~ s/\//$os_dirsep/g;
[24932]431
432 # Want to get relative path of local_dirname within the base_directory
433 # but with URL style slashes.
434 my $local_dirname = &util::filename_within_directory_url_format($dirname, $base_dir);
435
[22896]436 # if we are in import folder, then local_dirname will be empty
437 if ($local_dirname ne "") {
[24932]438 # look for extra metadata passed down from higher folders
439 $local_dirname .= "/"; # closing slash must be URL type slash also and not $dirsep;
[22896]440 if (defined $self->{'subdir_extrametakeys'}->{$local_dirname}) {
441 my $extrakeys = $self->{'subdir_extrametakeys'}->{$local_dirname};
442 foreach my $ek (@$extrakeys) {
443 my $extrakeys_re = $ek->{'re'};
444 my $extrakeys_md = $ek->{'md'};
445 my $extrakeys_mf = $ek->{'mf'};
[24951]446 &extrametautil::addmetakey(\@extrametakeys, $extrakeys_re);
447 &extrametautil::setmetadata(\%extrametadata, $extrakeys_re, $extrakeys_md);
448 &extrametautil::setmetafile(\%extrametafile, $extrakeys_re, $extrakeys_mf);
[22896]449 }
450 delete($self->{'subdir_extrametakeys'}->{$local_dirname});
[8737]451 }
452 }
[23212]453 # apply metadata pass for each of the files in the directory -- ignore
454 # maxdocs here
[7686]455 my $num_files = scalar(@dir);
[8512]456 for (my $i = 0; $i < scalar(@dir); $i++) {
457 my $subfile = $dir[$i];
[23335]458 next if ($subfile =~ m/^\.\.?$/);
459
[8512]460 my $this_file_base_dir = $base_dir;
[23335]461 my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
462
[27306]463 my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
464 my $raw_full_filename = &FileUtils::filenameConcatenate($this_file_base_dir, $raw_file_subfile);
[23335]465
466 if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
467 print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for metadata_read\n" if ($verbosity > 2);
[16391]468 next;
469 }
470
[23335]471 # Recursively read each $raw_subfile
472 print $outhandle "DirectoryPlugin metadata recurring: $raw_subfile\n" if ($verbosity > 2);
[8512]473
[23212]474 &plugin::metadata_read ($pluginfo, $this_file_base_dir,
[23335]475 $raw_file_subfile,$block_hash,
[23212]476 \@extrametakeys, \%extrametadata,
477 \%extrametafile,
478 $processor, $gli);
[8512]479 $additionalmetadata = 1;
480 }
[16391]481
[8737]482 # filter out any extrametakeys that mention subdirectories and store
483 # for later use (i.e. when that sub-directory is being processed)
[25094]484 foreach my $ek (@extrametakeys) { # where each Extrametakey (which is a filename) is stored as a url-style regex
485
486 my ($subdir_re,$extrakey_dir) = &util::url_fileparse($ek);
487
488 if ($extrakey_dir ne "") {
489 # a subdir was specified
[24951]490 my $md = &extrametautil::getmetadata(\%extrametadata, $ek);
491 my $mf = &extrametautil::getmetafile(\%extrametafile, $ek);
[8737]492
493 my $subdir_extrametakeys = $self->{'subdir_extrametakeys'};
[19493]494 my $subdir_rec = { 're' => $subdir_re, 'md' => $md, 'mf' => $mf };
[15005]495
[24951]496 # when it's looked up, it must be relative to the base dir
[17320]497 push(@{$subdir_extrametakeys->{"$local_dirname$extrakey_dir"}},$subdir_rec);
[8737]498 }
499 }
[8512]500
501 # import each of the files in the directory
[23212]502 my $count=0;
[7686]503 for (my $i = 0; $i <= scalar(@dir); $i++) {
504 # When every file in the directory has been done, pause for a moment (figuratively!)
505 # If the -recheck_directories argument hasn't been provided, stop now (default)
506 # Otherwise, re-read the contents of the directory to check for new files
507 # Any new files are added to the @dir list and are processed as normal
508 # This is necessary when documents to be indexed are specified in bibliographic DBs
509 # These files are copied/downloaded and stored in a new folder at import time
510 if ($i == $num_files) {
511 last unless $self->{'recheck_directories'};
512
513 # Re-read the files in the directory to see if there are any new files
514 last if (!opendir (DIR, $dirname));
[27578]515 my @dirnow = sort readdir (DIR);
[23335]516 map { $_ = &unicode::raw_filename_to_url_encoded($_) } @dirnow;
[7686]517 closedir (DIR);
518
519 # We're only interested if there are more files than there were before
520 last if (scalar(@dirnow) <= scalar(@dir));
521
522 # Any new files are added to the end of @dir to get processed by the loop
[8716]523 my $j;
[7686]524 foreach my $subfilenow (@dirnow) {
525 for ($j = 0; $j < $num_files; $j++) {
526 last if ($subfilenow eq $dir[$j]);
527 }
528 if ($j == $num_files) {
529 # New file
530 push(@dir, $subfilenow);
531 }
532 }
533 # When the new files have been processed, check again
534 $num_files = scalar(@dir);
535 }
536
537 my $subfile = $dir[$i];
[9853]538 last if ($maxdocs != -1 && ($count + $total_count) >= $maxdocs);
[2228]539 next if ($subfile =~ /^\.\.?$/);
[3108]540
[23335]541 my $this_file_base_dir = $base_dir;
542 my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
[16391]543
[27306]544 my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
[23335]545 my $raw_full_filename
[27306]546 = &FileUtils::filenameConcatenate($this_file_base_dir,$raw_file_subfile);
[23335]547
548 if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
549 print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for read\n" if ($verbosity > 2);
[16391]550 next;
551 }
[23335]552 #print STDERR "processing $raw_full_filename\n";
[7932]553 # Follow Windows shortcuts
[28489]554 if ($raw_subfile =~ m/(?i)\.lnk$/ && (($ENV{'GSDLOS'} =~ m/^windows$/i) && ($^O ne "cygwin"))) {
[7932]555 require Win32::Shortcut;
[27306]556 my $shortcut = new Win32::Shortcut(&FileUtils::filenameConcatenate($dirname, $raw_subfile));
[7932]557 if ($shortcut) {
558 # The file to be processed is now the target of the shortcut
559 $this_file_base_dir = "";
560 $file = "";
[23335]561 $raw_subfile = $shortcut->Path;
[7932]562 }
563 }
564
[3108]565 # check for a symlink pointing back to a leading directory
[23335]566 if (-d "$dirname/$raw_subfile" && -l "$dirname/$raw_subfile") {
[3108]567 # readlink gives a "fatal error" on systems that don't implement
568 # symlinks. This assumes the the -l test above would fail on those.
[23335]569 my $linkdest=readlink "$dirname/$raw_subfile";
[3108]570 if (!defined ($linkdest)) {
571 # system error - file not found?
[15870]572 warn "DirectoryPlugin: symlink problem - $!";
[3108]573 } else {
574 # see if link points to current or a parent directory
575 if ($linkdest =~ m@^[\./\\]+$@ ||
576 index($dirname, $linkdest) != -1) {
[23335]577 warn "DirectoryPlugin: Ignoring recursive symlink ($dirname/$raw_subfile -> $linkdest)\n";
[3108]578 next;
579 ;
580 }
581 }
582 }
583
[23335]584 print $outhandle "DirectoryPlugin: preparing metadata for $raw_subfile\n" if ($verbosity > 2);
[317]585
[23335]586 # Make a copy of $in_metadata to pass to $raw_subfile
[17313]587 my $out_metadata = {};
[13188]588 &metadatautil::combine_metadata_structures($out_metadata, $in_metadata);
[317]589
[16391]590 # check the assocfile_info
[23335]591 if (defined $self->{'assocfile_info'}->{$raw_full_filename}) {
592 &metadatautil::combine_metadata_structures($out_metadata, $self->{'assocfile_info'}->{$raw_full_filename});
[16391]593 }
[23335]594
595 # $subfile by this point is url-encoded => all ASCII chars => no need to encode as UTF8
596
[2228]597 # Next add metadata read in XML files (if it is supplied)
598 if ($additionalmetadata == 1) {
[19493]599 foreach my $filespec (@extrametakeys) {
[23335]600 ## use the url-encoded filename to do the filename comparison
601
602 if ($subfile =~ /^$filespec$/) {
[2228]603 print $outhandle "File \"$subfile\" matches filespec \"$filespec\"\n"
604 if ($verbosity > 2);
[24951]605 my $mdref = &extrametautil::getmetadata(\%extrametadata, $filespec);
606 my $mfref = &extrametautil::getmetafile(\%extrametafile, $filespec);
[19493]607
608 # Add the list files where the metadata came from
609 # into the metadata table so we can track this
610 # This mechanism is similar to how gsdlassocfile works
611
612 my @metafile_pair = ();
613 foreach my $l (keys %$mfref) {
614 my $f = $mfref->{$l};
[19516]615 push (@metafile_pair, "$f : $l");
[19493]616 }
617
618 $mdref->{'gsdlmetafile'} = \@metafile_pair;
619
[13188]620 &metadatautil::combine_metadata_structures($out_metadata, $mdref);
[2228]621 }
[4]622 }
623 }
[10156]624
625 if (defined $self->{'inf_timestamp'}) {
[18469]626 # Look to see if it's a completely new file
[10156]627
[23335]628 if (!$block_hash->{'new_files'}->{$raw_full_filename}) {
[18469]629 # Not a new file, must be an existing file
630 # Let' see if it's newer than the last import.pl
631
632
[23335]633 if (! -d $raw_full_filename) {
634 if (!$block_hash->{'reindex_files'}->{$raw_full_filename}) {
[20577]635 # filename has been around for longer than inf_timestamp
[18469]636 print $outhandle "**** Skipping $subfile\n" if ($verbosity >3);
637 next;
638 }
639 else {
640 # Remove old folder in archives (might hash to something different)
641 # *** should be doing this on a Del one as well
642 # but leave folder name?? and ensure hashs to
643 # same again??
644
645 # Then let through as new doc??
646
647 # mark to doc-oids that rely on it for re-indexing
648 }
[10156]649 }
650 }
651 }
652
[2228]653 # Recursively read each $subfile
[15870]654 print $outhandle "DirectoryPlugin recurring: $subfile\n" if ($verbosity > 2);
[8512]655
656 $count += &plugin::read ($pluginfo, $this_file_base_dir,
[23335]657 $raw_file_subfile, $block_hash,
[9853]658 $out_metadata, $processor, $maxdocs, ($total_count + $count), $gli);
[4]659 }
[7686]660
[8512]661 return $count;
[2228]662}
[4]663
[17738]664sub compile_stats {
665 my $self = shift(@_);
666 my ($stats) = @_;
667}
668
[4]6691;
Note: See TracBrowser for help on using the repository browser.