root/main/trunk/greenstone2/perllib/plugins/DirectoryPlugin.pm @ 24971

Revision 24971, 22.7 KB (checked in by ak19, 8 years ago)

1. Introduced the util::filepath_to_url_format subroutine which will be used to convert filenames to URL style filenames to match the slashes used in the filename regex-es in extrameta keys used to index into extrameta data structures. 2. Fixed bug on windows where metadata.xml specifies filenames as regex with backslash in front of the file extension's period mark: DirectoryPlugin? needed to unregex the filepath before calling fileparse on it, else the escaping backslash would interfere with perl's fileparse routine (only on windows, since backslash also represents a dirsep here). 3. Updated all those perl plugins where the new util::filepath_to_url_format needs to be called so that they use URL style filenames (thereafter regexed) to index into the extrameta data structures.

  • Property svn:executable set to *
  • Property svn:keywords set to Author Date Id Revision
Line 
1###########################################################################
2#
3# DirectoryPlugin.pm --
4# A component of the Greenstone digital library software
5# from the New Zealand Digital Library Project at the
6# University of Waikato, New Zealand.
7#
8# Copyright (C) 1999 New Zealand Digital Library Project
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation; either version 2 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License
21# along with this program; if not, write to the Free Software
22# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23#
24###########################################################################
25
26# DirectoryPlugin is a plugin which recurses through directories processing
27# each file it finds - which basically means passing it down the plugin
28# pipeline
29
30package DirectoryPlugin;
31
32use extrametautil;
33use PrintInfo;
34use plugin;
35use util;
36use metadatautil;
37
38use File::Basename;
39use strict;
40no strict 'refs';
41no strict 'subs';
42
43use Encode;
44
45BEGIN {
46    @DirectoryPlugin::ISA = ('PrintInfo');
47}
48
49my $arguments =
50    [ { 'name' => "block_exp",
51    'desc' => "{BasePlugin.block_exp}",
52    'type' => "regexp",
53    'deft' => &get_default_block_exp(),
54    'reqd' => "no" },
55      # this option has been deprecated. leave it here for now so we can warn people not to use it
56      { 'name' => "use_metadata_files",
57    'desc' => "{DirectoryPlugin.use_metadata_files}",
58    'type' => "flag",
59    'reqd' => "no",
60    'hiddengli' => "yes" },
61      { 'name' => "recheck_directories",
62    'desc' => "{DirectoryPlugin.recheck_directories}",
63    'type' => "flag",
64    'reqd' => "no" } ];
65   
66my $options = { 'name'     => "DirectoryPlugin",
67        'desc'     => "{DirectoryPlugin.desc}",
68        'abstract' => "no",
69        'inherits' => "yes",
70        'args'     => $arguments };
71
72sub new {
73    my ($class) = shift (@_);
74    my ($pluginlist,$inputargs,$hashArgOptLists) = @_;
75    push(@$pluginlist, $class);
76
77    push(@{$hashArgOptLists->{"ArgList"}},@{$arguments});
78    push(@{$hashArgOptLists->{"OptList"}},$options);
79
80    my $self = new PrintInfo($pluginlist, $inputargs, $hashArgOptLists);
81   
82    if ($self->{'info_only'}) {
83    # don't worry about any options or initialisations etc
84    return bless $self, $class;
85    }
86
87    # we have left this option in so we can warn people who are still using it
88    if ($self->{'use_metadata_files'}) {
89    die "ERROR: DirectoryPlugin -use_metadata_files option has been deprecated. Please remove the option and add MetadataXMLPlug to your plugin list instead!\n";
90    }
91   
92    $self->{'num_processed'} = 0;
93    $self->{'num_not_processed'} = 0;
94    $self->{'num_blocked'} = 0;
95    $self->{'num_archives'} = 0;
96
97    $self->{'subdir_extrametakeys'} = {};
98
99    return bless $self, $class;
100}
101
102# called once, at the start of processing
103sub init {
104    my $self = shift (@_);
105    my ($verbosity, $outhandle, $failhandle) = @_;
106
107    # verbosity is passed through from the processor
108    $self->{'verbosity'} = $verbosity;
109
110    # as are the outhandle and failhandle
111    $self->{'outhandle'} = $outhandle if defined $outhandle;
112    $self->{'failhandle'} = $failhandle;
113
114}
115
116# called once, after all passes have finished
117sub deinit {
118    my ($self) = @_;
119
120}
121
122# called at the beginning of each plugin pass (import has one, building has many)
123sub begin {
124    my $self = shift (@_);
125    my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
126
127    # Only lookup timestamp info for import.pl, and only if incremental is set
128    my $proc_package_name = ref $processor;
129    if ($proc_package_name !~ /buildproc$/ && $self->{'incremental'} == 1) {
130        # Get the infodbtype value for this collection from the arcinfo object
131        my $infodbtype = $processor->getoutputinfo()->{'infodbtype'};
132    $infodbtype = "gdbm" if $infodbtype eq "gdbm-txtgz"; # in archives, cannot use txtgz version
133    my $output_dir = $processor->getoutputdir();
134        my $archives_inf = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $output_dir);
135
136    if ( -e $archives_inf ) {
137        $self->{'inf_timestamp'} = -M $archives_inf;
138    }
139    }
140}
141
142sub remove_all {
143    my $self = shift (@_);
144    my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
145}
146
147
148sub remove_one {
149    my $self = shift (@_);
150    my ($file, $oids, $archivedir) = @_;
151    return undef; # this will never be called for directories (will it??)
152
153}
154
155
156# called at the end of each plugin pass
157sub end {
158    my ($self) = shift (@_);
159
160}
161
162
163
164# return 1 if this class might recurse using $pluginfo
165sub is_recursive {
166    my $self = shift (@_);
167   
168    return 1;
169}
170
171sub get_default_block_exp {
172    my $self = shift (@_);
173   
174    return '(?i)(CVS|\.svn|Thumbs\.db|OIDcount|~)$';
175}
176
177sub check_directory_path {
178
179    my $self = shift(@_);
180    my ($dirname) = @_;
181   
182    return undef unless (-d $dirname);
183
184    return 0 if ($self->{'block_exp'} ne "" && $dirname =~ /$self->{'block_exp'}/);
185
186    my $outhandle = $self->{'outhandle'};
187   
188    # check to make sure we're not reading the archives or index directory
189    my $gsdlhome = quotemeta($ENV{'GSDLHOME'});
190    if ($dirname =~ m/^$gsdlhome\/.*?\/import.*?\/(archives|index)$/) {
191    print $outhandle "DirectoryPlugin: $dirname appears to be a reference to a Greenstone collection, skipping.\n";
192        return 0;
193    }
194   
195    # check to see we haven't got a cyclic path...
196    if ($dirname =~ m%(/.*){,41}%) {
197    print $outhandle "DirectoryPlugin: $dirname is 40 directories deep, is this a recursive path? if not increase constant in DirectoryPlugin.pm.\n";
198    return 0;
199    }
200   
201    # check to see we haven't got a cyclic path...
202    if ($dirname =~ m%.*?import/(.+?)/import/\1.*%) {
203    print $outhandle "DirectoryPlugin: $dirname appears to be in a recursive loop...\n";
204    return 0;
205    }
206
207    return 1;
208}
209
210# this may be called more than once
211sub sort_out_associated_files {
212
213    my $self = shift (@_);
214    my ($block_hash) = @_;
215    if (!scalar (keys %{$block_hash->{'shared_fileroot'}})) {
216    return;
217    }
218
219    $self->{'assocfile_info'} = {} unless defined $self->{'assocfile_info'};
220    my $metadata = $self->{'assocfile_info'};
221    foreach my $prefix (keys %{$block_hash->{'shared_fileroot'}}) {
222    my $record = $block_hash->{'shared_fileroot'}->{$prefix};
223
224    my $tie_to = $record->{'tie_to'};
225    my $exts = $record->{'exts'};
226   
227    if ((defined $tie_to) && (scalar (keys %$exts) > 0)) {
228        # set up fileblocks and assocfile_tobe
229        my $base_file = "$prefix$tie_to";
230        $metadata->{$base_file} = {} unless defined $metadata->{$base_file};
231        my $base_file_metadata = $metadata->{$base_file};
232       
233        $base_file_metadata->{'gsdlassocfile_tobe'} = [] unless defined $base_file_metadata->{'gsdlassocfile_tobe'};
234        my $assoc_tobe = $base_file_metadata->{'gsdlassocfile_tobe'};
235        foreach my $e (keys %$exts) {
236        # block the file
237        &util::block_filename($block_hash,"$prefix$e");
238        # set up as an associatd file
239        print STDERR "  $self->{'plugin_type'}: Associating $prefix$e with $tie_to version\n";
240        my $mime_type = ""; # let system auto detect this
241        push(@$assoc_tobe,"$prefix$e:$mime_type:");
242
243        }
244    }
245    } # foreach record
246
247    $block_hash->{'shared_fileroot'} = undef;
248    $block_hash->{'shared_fileroot'} = {};
249
250}
251
252
253# do block exp OR special blocking ???
254
255sub file_is_blocked {
256    my $self = shift (@_);
257    my ($block_hash, $filename_full_path) = @_;
258
259    $filename_full_path = &util::upgrade_if_dos_filename($filename_full_path);
260###    print STDERR "*** DirectoryPlugin::file_is_blocked $filename_full_path\n";
261
262    if ($ENV{'GSDLOS'} =~ m/^windows$/) {
263    # on windows, all block paths are lowercased.
264    my $lower_filename = lc ($filename_full_path);
265    if (defined $block_hash->{'file_blocks'}->{$lower_filename}) {
266        $self->{'num_blocked'} ++;
267        return 1;
268    }
269    }
270    else {
271    if (defined $block_hash->{'file_blocks'}->{$filename_full_path}) {
272        $self->{'num_blocked'} ++;
273        return 1;
274    }
275    }
276    # check Directory plugin's own block_exp
277    if ($self->{'block_exp'} ne "" && $filename_full_path =~ /$self->{'block_exp'}/) {
278    $self->{'num_blocked'} ++;
279    return 1; # blocked
280    }
281    return 0;
282}
283
284
285
286sub file_block_read {
287    my $self = shift (@_);
288    my ($pluginfo, $base_dir, $file, $block_hash, $metadata, $gli) = @_;
289
290    my $outhandle = $self->{'outhandle'};
291    my $verbosity = $self->{'verbosity'};
292   
293    # Calculate the directory name and ensure it is a directory and
294    # that it is not explicitly blocked.
295    my $dirname = $file;
296    $dirname = &util::filename_cat ($base_dir, $file) if $base_dir =~ /\w/;
297
298    my $directory_ok = $self->check_directory_path($dirname);
299    return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
300
301    print $outhandle "Global file scan checking directory: $dirname\n";
302
303    $block_hash->{'all_files'} = {} unless defined $block_hash->{'all_files'};
304    $block_hash->{'metadata_files'} = {} unless defined $block_hash->{'metadata_files'};
305
306    $block_hash->{'file_blocks'} = {} unless defined $block_hash->{'file_blocks'};
307    $block_hash->{'shared_fileroot'} = {} unless defined $block_hash->{'shared_fileroot'};
308
309     # Recur over directory contents.
310    my (@dir, $subfile);
311    #my $count = 0;
312   
313    print $outhandle "DirectoryPlugin block: getting directory $dirname\n" if ($verbosity > 2);
314   
315    # find all the files in the directory
316    if (!opendir (DIR, $dirname)) {
317    if ($gli) {
318        print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
319    }
320    print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
321    return -1; # error in processing
322    }
323    @dir = readdir (DIR);
324    closedir (DIR);
325   
326    for (my $i = 0; $i < scalar(@dir); $i++) {
327    my $raw_subfile = $dir[$i];
328    next if ($raw_subfile =~ m/^\.\.?$/);
329
330    my $this_file_base_dir = $base_dir;
331    my $raw_file_subfile = &util::filename_cat($file, $raw_subfile);
332
333    # Recursively read each $raw_subfile
334    print $outhandle "DirectoryPlugin block recurring: $raw_file_subfile\n" if ($verbosity > 2);
335   
336    #$count += &plugin::file_block_read ($pluginfo, $this_file_base_dir,
337
338    &plugin::file_block_read ($pluginfo, $this_file_base_dir,
339                  $raw_file_subfile,
340                  $block_hash, $metadata, $gli);
341   
342    }
343    $self->sort_out_associated_files($block_hash);
344    #return $count;
345    return 1;
346   
347}
348
349# We don't do metadata_read
350sub metadata_read {
351    my $self = shift (@_);
352    my ($pluginfo, $base_dir, $file, $block_hash,
353    $extrametakeys, $extrametadata, $extrametafile,
354    $processor, $gli, $aux) = @_;
355
356    return undef;
357}
358
359
360# return number of files processed, undef if can't process
361# Note that $base_dir might be "" and that $file might
362# include directories
363
364# This function passes around metadata hash structures.  Metadata hash
365# structures are hashes that map from a (scalar) key (the metadata element
366# name) to either a scalar metadata value or a reference to an array of
367# such values.
368
369sub read {
370    my $self = shift (@_);
371    my ($pluginfo, $base_dir, $file, $block_hash, $in_metadata, $processor, $maxdocs, $total_count, $gli) = @_;
372
373    my $outhandle = $self->{'outhandle'};
374    my $verbosity = $self->{'verbosity'};
375
376    # Calculate the directory name and ensure it is a directory and
377    # that it is not explicitly blocked.
378    my $dirname;
379    if ($file eq "") {
380    $dirname = $base_dir;
381    } else {
382    $dirname = $file;
383    $dirname = &util::filename_cat ($base_dir, $file) if $base_dir =~ /\w/;
384    }
385   
386    my $directory_ok = $self->check_directory_path($dirname);
387    return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
388       
389    if (($verbosity > 2) && ((scalar keys %$in_metadata) > 0)) {
390        print $outhandle "DirectoryPlugin: metadata passed in: ",
391    join(", ", keys %$in_metadata), "\n";
392    }
393   
394
395    # Recur over directory contents.
396    my (@dir, $subfile);
397   
398    print $outhandle "DirectoryPlugin read: getting directory $dirname\n" if ($verbosity > 2);
399   
400    # find all the files in the directory
401    if (!opendir (DIR, $dirname)) {
402    if ($gli) {
403        print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
404    }
405    print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
406    return -1; # error in processing
407    }
408    @dir = readdir (DIR);
409    map { $_ = &unicode::raw_filename_to_url_encoded($_) } @dir;
410    closedir (DIR);
411
412    # Re-order the files in the list so any directories ending with .all are moved to the end
413    for (my $i = scalar(@dir) - 1; $i >= 0; $i--) {
414    if (-d &util::filename_cat($dirname, $dir[$i]) && $dir[$i] =~ /\.all$/) {
415        push(@dir, splice(@dir, $i, 1));
416    }
417    }
418
419    # setup the metadata structures. we do a metadata_read pass to see if there is any additional metadata, then pass it to read
420   
421    my $additionalmetadata = 0;      # is there extra metadata available?
422    my %extrametadata;               # maps from filespec to extra metadata keys
423    my %extrametafile;               # maps from filespec to the metadata.xml (or similar) file it came from
424    my @extrametakeys;               # keys of %extrametadata in order read
425
426
427    my $os_dirsep = &util::get_os_dirsep();
428    my $dirsep    = &util::get_dirsep();
429    my $base_dir_regexp = $base_dir;
430    $base_dir_regexp =~ s/\//$os_dirsep/g;
431       
432    # Want to get relative path of local_dirname within the base_directory
433    # but with URL style slashes.
434    my $local_dirname = &util::filename_within_directory_url_format($dirname, $base_dir);
435
436    # if we are in import folder, then local_dirname will be empty
437    if ($local_dirname ne "") {
438    # look for extra metadata passed down from higher folders   
439    $local_dirname .= "/"; # closing slash must be URL type slash also and not $dirsep;
440    if (defined $self->{'subdir_extrametakeys'}->{$local_dirname}) {
441        my $extrakeys = $self->{'subdir_extrametakeys'}->{$local_dirname};
442        foreach my $ek (@$extrakeys) {
443        my $extrakeys_re  = $ek->{'re'};
444        my $extrakeys_md  = $ek->{'md'};
445        my $extrakeys_mf  = $ek->{'mf'};
446        &extrametautil::addmetakey(\@extrametakeys, $extrakeys_re);
447        &extrametautil::setmetadata(\%extrametadata, $extrakeys_re, $extrakeys_md);
448        &extrametautil::setmetafile(\%extrametafile, $extrakeys_re, $extrakeys_mf);
449        }
450        delete($self->{'subdir_extrametakeys'}->{$local_dirname});
451    }
452    }
453    # apply metadata pass for each of the files in the directory -- ignore
454    # maxdocs here
455    my $num_files = scalar(@dir);
456    for (my $i = 0; $i < scalar(@dir); $i++) {
457    my $subfile = $dir[$i];
458    next if ($subfile =~ m/^\.\.?$/);
459
460    my $this_file_base_dir = $base_dir;
461    my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
462
463    my $raw_file_subfile = &util::filename_cat($file, $raw_subfile);
464    my $raw_full_filename = &util::filename_cat($this_file_base_dir, $raw_file_subfile);
465
466    if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
467        print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for metadata_read\n" if ($verbosity > 2);
468        next;
469    }
470   
471    # Recursively read each $raw_subfile
472    print $outhandle "DirectoryPlugin metadata recurring: $raw_subfile\n" if ($verbosity > 2);
473   
474    &plugin::metadata_read ($pluginfo, $this_file_base_dir,
475                $raw_file_subfile,$block_hash,
476                \@extrametakeys, \%extrametadata,
477                \%extrametafile,
478                $processor, $gli);
479    $additionalmetadata = 1;
480    }
481
482    # filter out any extrametakeys that mention subdirectories and store
483    # for later use (i.e. when that sub-directory is being processed)
484    foreach my $ek (@extrametakeys) { # where each Extrametakey (which is a filename) is stored as a regex
485    my $ek_non_re = &util::unregex_filename($ek); # unregex it. This step is *also* required for the fileparse operation since the file suffix
486                                                  # will be specified as \.txt and we don't want fileparse to split it there making \.txt the "filename"
487    my ($subdir_re,$extrakey_dir) = &File::Basename::fileparse($ek_non_re);     
488    my $dirsep_re = &util::get_re_dirsep();
489    if ($ek_non_re =~ m/$dirsep_re/) { # specifies at least one directory
490        my $md = &extrametautil::getmetadata(\%extrametadata, $ek);
491        my $mf = &extrametautil::getmetafile(\%extrametafile, $ek);
492
493        my $subdir_extrametakeys = $self->{'subdir_extrametakeys'};
494       
495    # Indexing into the extrameta data structures requires the filename's style of slashes to be in URL format
496    # Then need to convert the filename to a regex, no longer to protect windows directory chars \, but for
497    # protecting special characters like brackets in the filepath such as "C:\Program Files (x86)\Greenstone".
498    $subdir_re = &util::filepath_to_url_format($subdir_re); # Possibly not necessary since subdir_re is the last segment of the filepath $ek ####
499    $subdir_re = &util::filename_to_regex($subdir_re);      # Escape any special chars like brackets and . in subdir_re
500       
501        my $subdir_rec = { 're' => $subdir_re, 'md' => $md, 'mf' => $mf };
502
503        # when it's looked up, it must be relative to the base dir
504        push(@{$subdir_extrametakeys->{"$local_dirname$extrakey_dir"}},$subdir_rec);
505        #push(@{$subdir_extrametakeys->{"$extrakey_dir"}},$subdir_rec);
506    }
507    }
508   
509    # import each of the files in the directory
510    my $count=0;
511    for (my $i = 0; $i <= scalar(@dir); $i++) {
512    # When every file in the directory has been done, pause for a moment (figuratively!)
513    # If the -recheck_directories argument hasn't been provided, stop now (default)
514    # Otherwise, re-read the contents of the directory to check for new files
515    #   Any new files are added to the @dir list and are processed as normal
516    #   This is necessary when documents to be indexed are specified in bibliographic DBs
517    #   These files are copied/downloaded and stored in a new folder at import time
518    if ($i == $num_files) {
519        last unless $self->{'recheck_directories'};
520
521        # Re-read the files in the directory to see if there are any new files
522        last if (!opendir (DIR, $dirname));
523        my @dirnow = readdir (DIR);
524        map { $_ = &unicode::raw_filename_to_url_encoded($_) } @dirnow;
525        closedir (DIR);
526
527        # We're only interested if there are more files than there were before
528        last if (scalar(@dirnow) <= scalar(@dir));
529
530        # Any new files are added to the end of @dir to get processed by the loop
531        my $j;
532        foreach my $subfilenow (@dirnow) {
533        for ($j = 0; $j < $num_files; $j++) {
534            last if ($subfilenow eq $dir[$j]);
535        }
536        if ($j == $num_files) {
537            # New file
538            push(@dir, $subfilenow);
539        }
540        }
541        # When the new files have been processed, check again
542        $num_files = scalar(@dir);
543    }
544
545    my $subfile = $dir[$i];
546    last if ($maxdocs != -1 && ($count + $total_count) >= $maxdocs);
547    next if ($subfile =~ /^\.\.?$/);
548
549    my $this_file_base_dir = $base_dir;
550    my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
551
552    my $raw_file_subfile = &util::filename_cat($file, $raw_subfile);
553    my $raw_full_filename
554        = &util::filename_cat($this_file_base_dir,$raw_file_subfile);
555
556    if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
557        print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for read\n" if ($verbosity > 2);
558        next;
559    }
560    #print STDERR "processing $raw_full_filename\n";
561    # Follow Windows shortcuts
562    if ($raw_subfile =~ /(?i)\.lnk$/ && $ENV{'GSDLOS'} =~ /^windows$/i) {
563        require Win32::Shortcut;
564        my $shortcut = new Win32::Shortcut(&util::filename_cat($dirname, $raw_subfile));
565        if ($shortcut) {
566        # The file to be processed is now the target of the shortcut
567        $this_file_base_dir = "";
568        $file = "";
569        $raw_subfile = $shortcut->Path;
570        }
571    }
572
573    # check for a symlink pointing back to a leading directory
574    if (-d "$dirname/$raw_subfile" && -l "$dirname/$raw_subfile") {
575        # readlink gives a "fatal error" on systems that don't implement
576        # symlinks. This assumes the the -l test above would fail on those.
577        my $linkdest=readlink "$dirname/$raw_subfile";
578        if (!defined ($linkdest)) {
579        # system error - file not found?
580        warn "DirectoryPlugin: symlink problem - $!";
581        } else {
582        # see if link points to current or a parent directory
583        if ($linkdest =~ m@^[\./\\]+$@ ||
584            index($dirname, $linkdest) != -1) {
585            warn "DirectoryPlugin: Ignoring recursive symlink ($dirname/$raw_subfile -> $linkdest)\n";
586            next;
587            ;
588        }
589        }
590    }
591
592    print $outhandle "DirectoryPlugin: preparing metadata for $raw_subfile\n" if ($verbosity > 2);
593
594    # Make a copy of $in_metadata to pass to $raw_subfile
595    my $out_metadata = {};
596    &metadatautil::combine_metadata_structures($out_metadata, $in_metadata);
597
598    # check the assocfile_info
599    if (defined $self->{'assocfile_info'}->{$raw_full_filename}) {
600        &metadatautil::combine_metadata_structures($out_metadata, $self->{'assocfile_info'}->{$raw_full_filename});
601    }
602
603    # $subfile by this point is url-encoded => all ASCII chars => no need to encode as UTF8
604
605    # Next add metadata read in XML files (if it is supplied)
606    if ($additionalmetadata == 1) {
607        foreach my $filespec (@extrametakeys) {
608        ## use the url-encoded filename to do the filename comparison
609
610        if ($subfile =~ /^$filespec$/) {
611            print $outhandle "File \"$subfile\" matches filespec \"$filespec\"\n"
612            if ($verbosity > 2);
613            my $mdref = &extrametautil::getmetadata(\%extrametadata, $filespec);
614            my $mfref = &extrametautil::getmetafile(\%extrametafile, $filespec);
615
616            # Add the list files where the metadata came from
617            # into the metadata table so we can track this
618            # This mechanism is similar to how gsdlassocfile works
619
620            my @metafile_pair = ();
621            foreach my $l (keys %$mfref) {
622            my $f = $mfref->{$l};
623            push (@metafile_pair, "$f : $l");
624            }
625
626            $mdref->{'gsdlmetafile'} = \@metafile_pair;
627
628            &metadatautil::combine_metadata_structures($out_metadata, $mdref);
629        }
630        }
631    }
632
633    if (defined $self->{'inf_timestamp'}) {
634        # Look to see if it's a completely new file
635
636        if (!$block_hash->{'new_files'}->{$raw_full_filename}) {
637        # Not a new file, must be an existing file
638        # Let' see if it's newer than the last import.pl
639
640
641        if (! -d $raw_full_filename) {
642            if (!$block_hash->{'reindex_files'}->{$raw_full_filename}) {
643            # filename has been around for longer than inf_timestamp
644            print $outhandle "**** Skipping $subfile\n" if ($verbosity >3);
645            next;
646            }
647            else {
648            # Remove old folder in archives (might hash to something different)
649            # *** should be doing this on a Del one as well
650            # but leave folder name?? and ensure hashs to
651            # same again??
652
653            # Then let through as new doc??
654
655            # mark to doc-oids that rely on it for re-indexing
656            }
657        }
658        }
659    }
660
661    # Recursively read each $subfile
662    print $outhandle "DirectoryPlugin recurring: $subfile\n" if ($verbosity > 2);
663   
664    $count += &plugin::read ($pluginfo, $this_file_base_dir,
665                 $raw_file_subfile, $block_hash,
666                 $out_metadata, $processor, $maxdocs, ($total_count + $count), $gli);
667    }
668
669    return $count;
670}
671
672sub compile_stats {
673    my $self = shift(@_);
674    my ($stats) = @_;
675}
676
6771;
Note: See TracBrowser for help on using the browser.