1 | ###########################################################################
|
---|
2 | #
|
---|
3 | # DirectoryPlugin.pm --
|
---|
4 | # A component of the Greenstone digital library software
|
---|
5 | # from the New Zealand Digital Library Project at the
|
---|
6 | # University of Waikato, New Zealand.
|
---|
7 | #
|
---|
8 | # Copyright (C) 1999 New Zealand Digital Library Project
|
---|
9 | #
|
---|
10 | # This program is free software; you can redistribute it and/or modify
|
---|
11 | # it under the terms of the GNU General Public License as published by
|
---|
12 | # the Free Software Foundation; either version 2 of the License, or
|
---|
13 | # (at your option) any later version.
|
---|
14 | #
|
---|
15 | # This program is distributed in the hope that it will be useful,
|
---|
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
18 | # GNU General Public License for more details.
|
---|
19 | #
|
---|
20 | # You should have received a copy of the GNU General Public License
|
---|
21 | # along with this program; if not, write to the Free Software
|
---|
22 | # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
---|
23 | #
|
---|
24 | ###########################################################################
|
---|
25 |
|
---|
26 | # DirectoryPlugin is a plugin which recurses through directories processing
|
---|
27 | # each file it finds - which basically means passing it down the plugin
|
---|
28 | # pipeline
|
---|
29 |
|
---|
30 | package DirectoryPlugin;
|
---|
31 |
|
---|
32 | use extrametautil;
|
---|
33 | use PrintInfo;
|
---|
34 | use plugin;
|
---|
35 | use util;
|
---|
36 | use FileUtils;
|
---|
37 | use metadatautil;
|
---|
38 |
|
---|
39 | use File::Basename;
|
---|
40 | use strict;
|
---|
41 | no strict 'refs';
|
---|
42 | no strict 'subs';
|
---|
43 |
|
---|
44 | use Encode::Locale;
|
---|
45 | use Encode;
|
---|
46 | use Unicode::Normalize;
|
---|
47 |
|
---|
48 | BEGIN {
|
---|
49 | @DirectoryPlugin::ISA = ('PrintInfo');
|
---|
50 | }
|
---|
51 |
|
---|
52 | my $arguments =
|
---|
53 | [ { 'name' => "block_exp",
|
---|
54 | 'desc' => "{BasePlugin.block_exp}",
|
---|
55 | 'type' => "regexp",
|
---|
56 | 'deft' => &get_default_block_exp(),
|
---|
57 | 'reqd' => "no" },
|
---|
58 | # this option has been deprecated. leave it here for now so we can warn people not to use it
|
---|
59 | { 'name' => "use_metadata_files",
|
---|
60 | 'desc' => "{DirectoryPlugin.use_metadata_files}",
|
---|
61 | 'type' => "flag",
|
---|
62 | 'reqd' => "no",
|
---|
63 | 'hiddengli' => "yes" },
|
---|
64 | { 'name' => "recheck_directories",
|
---|
65 | 'desc' => "{DirectoryPlugin.recheck_directories}",
|
---|
66 | 'type' => "flag",
|
---|
67 | 'reqd' => "no" } ];
|
---|
68 |
|
---|
69 | my $options = { 'name' => "DirectoryPlugin",
|
---|
70 | 'desc' => "{DirectoryPlugin.desc}",
|
---|
71 | 'abstract' => "no",
|
---|
72 | 'inherits' => "yes",
|
---|
73 | 'args' => $arguments };
|
---|
74 |
|
---|
75 | sub new {
|
---|
76 | my ($class) = shift (@_);
|
---|
77 | my ($pluginlist,$inputargs,$hashArgOptLists) = @_;
|
---|
78 | push(@$pluginlist, $class);
|
---|
79 |
|
---|
80 | push(@{$hashArgOptLists->{"ArgList"}},@{$arguments});
|
---|
81 | push(@{$hashArgOptLists->{"OptList"}},$options);
|
---|
82 |
|
---|
83 | my $self = new PrintInfo($pluginlist, $inputargs, $hashArgOptLists);
|
---|
84 |
|
---|
85 | if ($self->{'info_only'}) {
|
---|
86 | # don't worry about any options or initialisations etc
|
---|
87 | return bless $self, $class;
|
---|
88 | }
|
---|
89 |
|
---|
90 | # we have left this option in so we can warn people who are still using it
|
---|
91 | if ($self->{'use_metadata_files'}) {
|
---|
92 | die "ERROR: DirectoryPlugin -use_metadata_files option has been deprecated. Please remove the option and add MetadataXMLPlug to your plugin list instead!\n";
|
---|
93 | }
|
---|
94 |
|
---|
95 | $self->{'num_processed'} = 0;
|
---|
96 | $self->{'num_not_processed'} = 0;
|
---|
97 | $self->{'num_blocked'} = 0;
|
---|
98 | $self->{'num_archives'} = 0;
|
---|
99 |
|
---|
100 | $self->{'subdir_extrametakeys'} = {};
|
---|
101 |
|
---|
102 | return bless $self, $class;
|
---|
103 | }
|
---|
104 |
|
---|
105 | # called once, at the start of processing
|
---|
106 | sub init {
|
---|
107 | my $self = shift (@_);
|
---|
108 | my ($verbosity, $outhandle, $failhandle) = @_;
|
---|
109 |
|
---|
110 | # verbosity is passed through from the processor
|
---|
111 | $self->{'verbosity'} = $verbosity;
|
---|
112 |
|
---|
113 | # as are the outhandle and failhandle
|
---|
114 | $self->{'outhandle'} = $outhandle if defined $outhandle;
|
---|
115 | $self->{'failhandle'} = $failhandle;
|
---|
116 |
|
---|
117 | if ($self->{'verbosity'} > 2) {
|
---|
118 | print "[INFO] This DirectoryPlugin supports version 2 manifest files\n";
|
---|
119 | }
|
---|
120 | }
|
---|
121 |
|
---|
122 | # called once, after all passes have finished
|
---|
123 | sub deinit {
|
---|
124 | my ($self) = @_;
|
---|
125 |
|
---|
126 | }
|
---|
127 |
|
---|
128 | # called at the beginning of each plugin pass (import has one, building has many)
|
---|
129 | sub begin {
|
---|
130 | my $self = shift (@_);
|
---|
131 | my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
|
---|
132 |
|
---|
133 | # Only lookup timestamp info for import.pl, and only if incremental is set
|
---|
134 | my $proc_package_name = ref $processor;
|
---|
135 | if ($proc_package_name !~ /buildproc$/ && $self->{'incremental'} == 1) {
|
---|
136 | # Get the infodbtype value for this collection from the arcinfo object
|
---|
137 | my $infodbtype = $processor->getoutputinfo()->{'infodbtype'};
|
---|
138 | $infodbtype = "gdbm" if $infodbtype eq "gdbm-txtgz"; # in archives, cannot use txtgz version
|
---|
139 | my $output_dir = $processor->getoutputdir();
|
---|
140 | my $archives_inf = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $output_dir);
|
---|
141 |
|
---|
142 | if ( -e $archives_inf ) {
|
---|
143 | $self->{'inf_timestamp'} = -M $archives_inf;
|
---|
144 | }
|
---|
145 | }
|
---|
146 | }
|
---|
147 |
|
---|
148 | sub remove_all {
|
---|
149 | my $self = shift (@_);
|
---|
150 | my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
|
---|
151 | }
|
---|
152 |
|
---|
153 |
|
---|
154 | sub remove_one {
|
---|
155 | my $self = shift (@_);
|
---|
156 | my ($file, $oids, $archivedir) = @_;
|
---|
157 | return undef; # this will never be called for directories (will it??)
|
---|
158 |
|
---|
159 | }
|
---|
160 |
|
---|
161 |
|
---|
162 | # called at the end of each plugin pass
|
---|
163 | sub end {
|
---|
164 | my ($self) = shift (@_);
|
---|
165 |
|
---|
166 | }
|
---|
167 |
|
---|
168 |
|
---|
169 |
|
---|
170 | # return 1 if this class might recurse using $pluginfo
|
---|
171 | sub is_recursive {
|
---|
172 | my $self = shift (@_);
|
---|
173 |
|
---|
174 | return 1;
|
---|
175 | }
|
---|
176 |
|
---|
177 | sub get_default_block_exp {
|
---|
178 | my $self = shift (@_);
|
---|
179 |
|
---|
180 | return '(?i)(CVS|\.svn|Thumbs\.db|OIDcount|\.DS_Store|~)$';
|
---|
181 | }
|
---|
182 |
|
---|
183 | sub check_directory_path {
|
---|
184 |
|
---|
185 | my $self = shift(@_);
|
---|
186 | my ($dirname) = @_;
|
---|
187 |
|
---|
188 | #return undef unless (-d $dirname);
|
---|
189 | return undef unless (&FileUtils::directoryExists($dirname));
|
---|
190 |
|
---|
191 | return 0 if ($self->{'block_exp'} ne "" && $dirname =~ /$self->{'block_exp'}/);
|
---|
192 |
|
---|
193 | my $outhandle = $self->{'outhandle'};
|
---|
194 |
|
---|
195 | # check to make sure we're not reading the archives or index directory
|
---|
196 | my $gsdlhome = quotemeta($ENV{'GSDLHOME'});
|
---|
197 | if ($dirname =~ m/^$gsdlhome\/.*?\/import.*?\/(archives|index)$/) {
|
---|
198 | print $outhandle "DirectoryPlugin: $dirname appears to be a reference to a Greenstone collection, skipping.\n";
|
---|
199 | return 0;
|
---|
200 | }
|
---|
201 |
|
---|
202 | # check to see we haven't got a cyclic path...
|
---|
203 | if ($dirname =~ m%(/.*){,41}%) {
|
---|
204 | print $outhandle "DirectoryPlugin: $dirname is 40 directories deep, is this a recursive path? if not increase constant in DirectoryPlugin.pm.\n";
|
---|
205 | return 0;
|
---|
206 | }
|
---|
207 |
|
---|
208 | # check to see we haven't got a cyclic path...
|
---|
209 | if ($dirname =~ m%.*?import/(.+?)/import/\1.*%) {
|
---|
210 | print $outhandle "DirectoryPlugin: $dirname appears to be in a recursive loop...\n";
|
---|
211 | return 0;
|
---|
212 | }
|
---|
213 |
|
---|
214 | return 1;
|
---|
215 | }
|
---|
216 |
|
---|
217 | # this may be called more than once
|
---|
218 | sub sort_out_associated_files {
|
---|
219 |
|
---|
220 | my $self = shift (@_);
|
---|
221 | my ($block_hash) = @_;
|
---|
222 | if (!scalar (keys %{$block_hash->{'shared_fileroot'}})) {
|
---|
223 | return;
|
---|
224 | }
|
---|
225 |
|
---|
226 | $self->{'assocfile_info'} = {} unless defined $self->{'assocfile_info'};
|
---|
227 | my $metadata = $self->{'assocfile_info'};
|
---|
228 | foreach my $prefix (keys %{$block_hash->{'shared_fileroot'}}) {
|
---|
229 | my $record = $block_hash->{'shared_fileroot'}->{$prefix};
|
---|
230 |
|
---|
231 | my $tie_to = $record->{'tie_to'};
|
---|
232 | my $exts = $record->{'exts'};
|
---|
233 |
|
---|
234 | if ((defined $tie_to) && (scalar (keys %$exts) > 0)) {
|
---|
235 | # set up fileblocks and assocfile_tobe
|
---|
236 | my $base_file = "$prefix$tie_to";
|
---|
237 | $metadata->{$base_file} = {} unless defined $metadata->{$base_file};
|
---|
238 | my $base_file_metadata = $metadata->{$base_file};
|
---|
239 |
|
---|
240 | $base_file_metadata->{'gsdlassocfile_tobe'} = [] unless defined $base_file_metadata->{'gsdlassocfile_tobe'};
|
---|
241 | my $assoc_tobe = $base_file_metadata->{'gsdlassocfile_tobe'};
|
---|
242 | foreach my $e (keys %$exts) {
|
---|
243 | # block the file
|
---|
244 | &util::block_filename($block_hash,"$prefix$e");
|
---|
245 | # set up as an associatd file
|
---|
246 | print STDERR " $self->{'plugin_type'}: Associating $prefix$e with $tie_to version\n";
|
---|
247 | my $mime_type = ""; # let system auto detect this
|
---|
248 | push(@$assoc_tobe,"$prefix$e:$mime_type:");
|
---|
249 |
|
---|
250 | }
|
---|
251 | }
|
---|
252 | } # foreach record
|
---|
253 |
|
---|
254 | $block_hash->{'shared_fileroot'} = undef;
|
---|
255 | $block_hash->{'shared_fileroot'} = {};
|
---|
256 |
|
---|
257 | }
|
---|
258 |
|
---|
259 |
|
---|
260 | # do block exp OR special blocking ???
|
---|
261 |
|
---|
262 | sub file_is_blocked {
|
---|
263 | my $self = shift (@_);
|
---|
264 | my ($block_hash, $filename_full_path) = @_;
|
---|
265 |
|
---|
266 | $filename_full_path = &util::upgrade_if_dos_filename($filename_full_path);
|
---|
267 |
|
---|
268 | if (($ENV{'GSDLOS'} =~ m/^windows$/) && ($^O ne "cygwin")) {
|
---|
269 | # on windows, all block paths are lowercased.
|
---|
270 | my $lower_filename = lc ($filename_full_path);
|
---|
271 | if (defined $block_hash->{'file_blocks'}->{$lower_filename}) {
|
---|
272 | $self->{'num_blocked'} ++;
|
---|
273 | return 1;
|
---|
274 | }
|
---|
275 | }
|
---|
276 | else {
|
---|
277 | if (defined $block_hash->{'file_blocks'}->{$filename_full_path}) {
|
---|
278 | $self->{'num_blocked'} ++;
|
---|
279 | return 1;
|
---|
280 | }
|
---|
281 | }
|
---|
282 | # check Directory plugin's own block_exp
|
---|
283 | if ($self->{'block_exp'} ne "" && $filename_full_path =~ /$self->{'block_exp'}/) {
|
---|
284 | $self->{'num_blocked'} ++;
|
---|
285 | return 1; # blocked
|
---|
286 | }
|
---|
287 | return 0;
|
---|
288 | }
|
---|
289 |
|
---|
290 |
|
---|
291 |
|
---|
292 | sub file_block_read {
|
---|
293 | my $self = shift (@_);
|
---|
294 | my ($pluginfo, $base_dir, $file, $block_hash, $metadata, $gli) = @_;
|
---|
295 |
|
---|
296 | my $outhandle = $self->{'outhandle'};
|
---|
297 | my $verbosity = $self->{'verbosity'};
|
---|
298 |
|
---|
299 | # Calculate the directory name and ensure it is a directory and
|
---|
300 | # that it is not explicitly blocked.
|
---|
301 | my $dirname = $file;
|
---|
302 | $dirname = &FileUtils::filenameConcatenate($base_dir, $file) if $base_dir =~ /\w/;
|
---|
303 |
|
---|
304 | my $directory_ok = $self->check_directory_path($dirname);
|
---|
305 | return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
|
---|
306 |
|
---|
307 | print $outhandle "Global file scan checking directory: $dirname\n";
|
---|
308 |
|
---|
309 | $block_hash->{'all_files'} = {} unless defined $block_hash->{'all_files'};
|
---|
310 | $block_hash->{'metadata_files'} = {} unless defined $block_hash->{'metadata_files'};
|
---|
311 |
|
---|
312 | $block_hash->{'file_blocks'} = {} unless defined $block_hash->{'file_blocks'};
|
---|
313 | $block_hash->{'shared_fileroot'} = {} unless defined $block_hash->{'shared_fileroot'};
|
---|
314 |
|
---|
315 | # Recur over directory contents.
|
---|
316 | my (@dir, $subfile);
|
---|
317 | #my $count = 0;
|
---|
318 |
|
---|
319 | print $outhandle "DirectoryPlugin block: getting directory $dirname\n" if ($verbosity > 2);
|
---|
320 |
|
---|
321 | # find all the files in the directory
|
---|
322 | my $rvalue = &FileUtils::readDirectory($dirname);
|
---|
323 | if (!defined $rvalue) {
|
---|
324 | if ($gli) {
|
---|
325 | print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
|
---|
326 | }
|
---|
327 | print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
|
---|
328 | return -1; # error in processing
|
---|
329 | }
|
---|
330 | @dir = sort @{$rvalue};
|
---|
331 | #}
|
---|
332 | #else
|
---|
333 | #{
|
---|
334 | #if (!opendir (DIR, $dirname)) {
|
---|
335 | # if ($gli) {
|
---|
336 | # print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
|
---|
337 | # }
|
---|
338 | # print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
|
---|
339 | # return -1; # error in processing
|
---|
340 | # }
|
---|
341 | # @dir = sort readdir (DIR);
|
---|
342 | # closedir (DIR);
|
---|
343 | # }
|
---|
344 |
|
---|
345 | ###print "===== Dir contents =====\n";
|
---|
346 | ###use Devel::Peek;
|
---|
347 | ###Dump(@dir);
|
---|
348 | ###print "\n===== =====\n";
|
---|
349 |
|
---|
350 | for (my $i = 0; $i < scalar(@dir); $i++) {
|
---|
351 | my $raw_subfile = $dir[$i];
|
---|
352 | next if ($raw_subfile =~ m/^\.\.?$/);
|
---|
353 |
|
---|
354 | my $this_file_base_dir = $base_dir;
|
---|
355 | my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
|
---|
356 |
|
---|
357 | # Recursively read each $raw_subfile
|
---|
358 | print $outhandle "DirectoryPlugin block recurring: ". Encode::decode("utf8", $raw_file_subfile) ."\n" if ($verbosity > 2);
|
---|
359 | print $outhandle "DirectoryPlugin block recurring: ". Encode::decode(locale =>$raw_file_subfile) ."\n" if ($verbosity > 2);
|
---|
360 |
|
---|
361 | #$count += &plugin::file_block_read ($pluginfo, $this_file_base_dir,
|
---|
362 |
|
---|
363 | &plugin::file_block_read ($pluginfo, $this_file_base_dir,
|
---|
364 | $raw_file_subfile,
|
---|
365 | $block_hash, $metadata, $gli);
|
---|
366 |
|
---|
367 | }
|
---|
368 | $self->sort_out_associated_files($block_hash);
|
---|
369 | #return $count;
|
---|
370 | return 1;
|
---|
371 |
|
---|
372 | }
|
---|
373 |
|
---|
374 | # We don't do metadata_read
|
---|
375 | sub metadata_read {
|
---|
376 | my $self = shift (@_);
|
---|
377 | my ($pluginfo, $base_dir, $file, $block_hash,
|
---|
378 | $extrametakeys, $extrametadata, $extrametafile,
|
---|
379 | $processor, $gli, $aux) = @_;
|
---|
380 |
|
---|
381 | return undef;
|
---|
382 | }
|
---|
383 |
|
---|
384 |
|
---|
385 | # return number of files processed, undef if can't process
|
---|
386 | # Note that $base_dir might be "" and that $file might
|
---|
387 | # include directories
|
---|
388 |
|
---|
389 | # This function passes around metadata hash structures. Metadata hash
|
---|
390 | # structures are hashes that map from a (scalar) key (the metadata element
|
---|
391 | # name) to either a scalar metadata value or a reference to an array of
|
---|
392 | # such values.
|
---|
393 |
|
---|
394 | sub read {
|
---|
395 | my $self = shift (@_);
|
---|
396 | my ($pluginfo, $base_dir, $file, $block_hash, $in_metadata, $processor, $maxdocs, $total_count, $gli) = @_;
|
---|
397 | my $outhandle = $self->{'outhandle'};
|
---|
398 | my $verbosity = $self->{'verbosity'};
|
---|
399 |
|
---|
400 | # Calculate the directory name and ensure it is a directory and
|
---|
401 | # that it is not explicitly blocked.
|
---|
402 | my $dirname;
|
---|
403 | if ($file eq "") {
|
---|
404 | $dirname = $base_dir;
|
---|
405 | } else {
|
---|
406 | $dirname = $file;
|
---|
407 | $dirname = &FileUtils::filenameConcatenate($base_dir, $file) if $base_dir =~ /\w/;
|
---|
408 | }
|
---|
409 |
|
---|
410 | my $directory_ok = $self->check_directory_path($dirname);
|
---|
411 | return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
|
---|
412 |
|
---|
413 | if (($verbosity > 2) && ((scalar keys %$in_metadata) > 0)) {
|
---|
414 | print $outhandle "DirectoryPlugin: metadata passed in: ",
|
---|
415 | join(", ", keys %$in_metadata), "\n";
|
---|
416 | }
|
---|
417 |
|
---|
418 |
|
---|
419 | # Recur over directory contents.
|
---|
420 | my @dir;
|
---|
421 |
|
---|
422 | print $outhandle "DirectoryPlugin processing $dirname\n";
|
---|
423 |
|
---|
424 | # find all the files in the directory
|
---|
425 | if (!opendir (DIR, $dirname)) {
|
---|
426 | if ($gli) {
|
---|
427 | print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
|
---|
428 | }
|
---|
429 | print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
|
---|
430 | return -1; # error in processing
|
---|
431 | }
|
---|
432 | @dir = sort readdir (DIR);
|
---|
433 | map { $_ = &unicode::raw_filename_to_url_encoded($_); } @dir;
|
---|
434 | closedir (DIR);
|
---|
435 | # Re-order the files in the list so any directories ending with .all are moved to the end
|
---|
436 | for (my $i = scalar(@dir) - 1; $i >= 0; $i--) {
|
---|
437 | if (-d &FileUtils::filenameConcatenate($dirname, $dir[$i]) && $dir[$i] =~ /\.all$/) {
|
---|
438 | push(@dir, splice(@dir, $i, 1));
|
---|
439 | }
|
---|
440 | }
|
---|
441 |
|
---|
442 | # Chain through to the rest of the read function (now split off and named
|
---|
443 | # read_phase2)
|
---|
444 | my $count = $self->read_phase2($pluginfo, $dirname, \@dir, $base_dir, $file, $block_hash, $in_metadata, $processor, $maxdocs, $total_count, $gli);
|
---|
445 |
|
---|
446 | return $count;
|
---|
447 | }
|
---|
448 |
|
---|
449 | sub read_phase2
|
---|
450 | {
|
---|
451 | my $self = shift (@_);
|
---|
452 | my ($pluginfo, $dirname, $dir_ref, $base_dir, $file, $block_hash, $in_metadata, $processor, $maxdocs, $total_count, $gli) = @_;
|
---|
453 | # These were defined in read (phase 1)
|
---|
454 | my @dir = @{$dir_ref};
|
---|
455 | my $subfile;
|
---|
456 |
|
---|
457 | my $outhandle = $self->{'outhandle'};
|
---|
458 | my $verbosity = $self->{'verbosity'};
|
---|
459 |
|
---|
460 | # setup the metadata structures. we do a metadata_read pass to see if there is any additional metadata, then pass it to read
|
---|
461 |
|
---|
462 | my $additionalmetadata = 0; # is there extra metadata available?
|
---|
463 | my %extrametadata; # maps from filespec to extra metadata keys
|
---|
464 | my %extrametafile; # maps from filespec to the metadata.xml (or similar) file it came from
|
---|
465 | my @extrametakeys; # keys of %extrametadata in order read
|
---|
466 |
|
---|
467 |
|
---|
468 | my $os_dirsep = &util::get_os_dirsep();
|
---|
469 | my $dirsep = &util::get_dirsep();
|
---|
470 | my $base_dir_regexp = $base_dir;
|
---|
471 | $base_dir_regexp =~ s/\//$os_dirsep/g;
|
---|
472 |
|
---|
473 | # Want to get relative path of local_dirname within the base_directory
|
---|
474 | # but with URL style slashes.
|
---|
475 | my $local_dirname = &util::filename_within_directory_url_format($dirname, $base_dir);
|
---|
476 |
|
---|
477 | # if we are in import folder, then local_dirname will be empty
|
---|
478 | if ($local_dirname ne "") {
|
---|
479 | # look for extra metadata passed down from higher folders
|
---|
480 | $local_dirname .= "/"; # closing slash must be URL type slash also and not $dirsep;
|
---|
481 | if (defined $self->{'subdir_extrametakeys'}->{$local_dirname}) {
|
---|
482 | my $extrakeys = $self->{'subdir_extrametakeys'}->{$local_dirname};
|
---|
483 | foreach my $ek (@$extrakeys) {
|
---|
484 | my $extrakeys_re = $ek->{'re'};
|
---|
485 | my $extrakeys_md = $ek->{'md'};
|
---|
486 | my $extrakeys_mf = $ek->{'mf'};
|
---|
487 | &extrametautil::addmetakey(\@extrametakeys, $extrakeys_re);
|
---|
488 | &extrametautil::setmetadata(\%extrametadata, $extrakeys_re, $extrakeys_md);
|
---|
489 | &extrametautil::setmetafile(\%extrametafile, $extrakeys_re, $extrakeys_mf);
|
---|
490 | }
|
---|
491 | delete($self->{'subdir_extrametakeys'}->{$local_dirname});
|
---|
492 | }
|
---|
493 | }
|
---|
494 | # apply metadata pass for each of the files in the directory -- ignore
|
---|
495 | # maxdocs here
|
---|
496 | my $num_files = scalar(@dir);
|
---|
497 | for (my $i = 0; $i < scalar(@dir); $i++) {
|
---|
498 | my $subfile = $dir[$i];
|
---|
499 | next if ($subfile =~ m/^\.\.?$/);
|
---|
500 |
|
---|
501 | my $this_file_base_dir = $base_dir;
|
---|
502 | my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
|
---|
503 |
|
---|
504 | my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
|
---|
505 | my $raw_full_filename = &FileUtils::filenameConcatenate($this_file_base_dir, $raw_file_subfile);
|
---|
506 |
|
---|
507 | if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
|
---|
508 | print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for metadata_read\n" if ($verbosity > 2);
|
---|
509 | next;
|
---|
510 | }
|
---|
511 |
|
---|
512 | # Recursively read each $raw_subfile
|
---|
513 | print $outhandle "DirectoryPlugin metadata recurring: $raw_subfile\n" if ($verbosity > 2);
|
---|
514 |
|
---|
515 | &plugin::metadata_read ($pluginfo, $this_file_base_dir,
|
---|
516 | $raw_file_subfile,$block_hash,
|
---|
517 | \@extrametakeys, \%extrametadata,
|
---|
518 | \%extrametafile,
|
---|
519 | $processor, $gli);
|
---|
520 | $additionalmetadata = 1;
|
---|
521 | }
|
---|
522 |
|
---|
523 | # filter out any extrametakeys that mention subdirectories and store
|
---|
524 | # for later use (i.e. when that sub-directory is being processed)
|
---|
525 | foreach my $ek (@extrametakeys) { # where each Extrametakey (which is a filename) is stored as a url-style regex
|
---|
526 |
|
---|
527 | my ($subdir_re,$extrakey_dir) = &util::url_fileparse($ek);
|
---|
528 |
|
---|
529 | if ($extrakey_dir ne "") {
|
---|
530 | # a subdir was specified
|
---|
531 | my $md = &extrametautil::getmetadata(\%extrametadata, $ek);
|
---|
532 | my $mf = &extrametautil::getmetafile(\%extrametafile, $ek);
|
---|
533 |
|
---|
534 | my $subdir_extrametakeys = $self->{'subdir_extrametakeys'};
|
---|
535 | my $subdir_rec = { 're' => $subdir_re, 'md' => $md, 'mf' => $mf };
|
---|
536 |
|
---|
537 | # when it's looked up, it must be relative to the base dir
|
---|
538 | push(@{$subdir_extrametakeys->{"$local_dirname$extrakey_dir"}},$subdir_rec);
|
---|
539 | }
|
---|
540 | }
|
---|
541 |
|
---|
542 | # import each of the files in the directory
|
---|
543 | my $count=0;
|
---|
544 | for (my $i = 0; $i <= scalar(@dir); $i++) {
|
---|
545 | # When every file in the directory has been done, pause for a moment (figuratively!)
|
---|
546 | # If the -recheck_directories argument hasn't been provided, stop now (default)
|
---|
547 | # Otherwise, re-read the contents of the directory to check for new files
|
---|
548 | # Any new files are added to the @dir list and are processed as normal
|
---|
549 | # This is necessary when documents to be indexed are specified in bibliographic DBs
|
---|
550 | # These files are copied/downloaded and stored in a new folder at import time
|
---|
551 | if ($i == $num_files) {
|
---|
552 | last unless $self->{'recheck_directories'};
|
---|
553 |
|
---|
554 | # Re-read the files in the directory to see if there are any new files
|
---|
555 | last if (!opendir (DIR, $dirname));
|
---|
556 | my @dirnow = sort readdir (DIR);
|
---|
557 | map { $_ = &unicode::raw_filename_to_url_encoded($_) } @dirnow;
|
---|
558 | closedir (DIR);
|
---|
559 |
|
---|
560 | # We're only interested if there are more files than there were before
|
---|
561 | last if (scalar(@dirnow) <= scalar(@dir));
|
---|
562 |
|
---|
563 | # Any new files are added to the end of @dir to get processed by the loop
|
---|
564 | my $j;
|
---|
565 | foreach my $subfilenow (@dirnow) {
|
---|
566 | for ($j = 0; $j < $num_files; $j++) {
|
---|
567 | last if ($subfilenow eq $dir[$j]);
|
---|
568 | }
|
---|
569 | if ($j == $num_files) {
|
---|
570 | # New file
|
---|
571 | push(@dir, $subfilenow);
|
---|
572 | }
|
---|
573 | }
|
---|
574 | # When the new files have been processed, check again
|
---|
575 | $num_files = scalar(@dir);
|
---|
576 | }
|
---|
577 |
|
---|
578 | my $subfile = $dir[$i];
|
---|
579 | last if ($maxdocs != -1 && ($count + $total_count) >= $maxdocs);
|
---|
580 | next if ($subfile =~ /^\.\.?$/);
|
---|
581 |
|
---|
582 | my $this_file_base_dir = $base_dir;
|
---|
583 | my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
|
---|
584 | # get the canonical unicode version of the filename. This may not match
|
---|
585 | # the filename on the file system. We will use it to compare to regex
|
---|
586 | # in the metadata table.
|
---|
587 | my $unicode_subfile = &util::raw_filename_to_unicode($dirname, $raw_subfile);
|
---|
588 | my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
|
---|
589 | my $raw_full_filename
|
---|
590 | = &FileUtils::filenameConcatenate($this_file_base_dir,$raw_file_subfile);
|
---|
591 |
|
---|
592 | if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
|
---|
593 | print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for read\n" if ($verbosity > 2);
|
---|
594 | next;
|
---|
595 | }
|
---|
596 | ###rint STDERR "** DirectoryPlugin processing $raw_full_filename\n";
|
---|
597 | # Follow Windows shortcuts
|
---|
598 | if ($raw_subfile =~ m/(?i)\.lnk$/ && (($ENV{'GSDLOS'} =~ m/^windows$/i) && ($^O ne "cygwin"))) {
|
---|
599 | require Win32::Shortcut;
|
---|
600 | my $shortcut = new Win32::Shortcut(&FileUtils::filenameConcatenate($dirname, $raw_subfile));
|
---|
601 | if ($shortcut) {
|
---|
602 | # The file to be processed is now the target of the shortcut
|
---|
603 | $this_file_base_dir = "";
|
---|
604 | $file = "";
|
---|
605 | $raw_subfile = $shortcut->Path;
|
---|
606 | }
|
---|
607 | }
|
---|
608 |
|
---|
609 | # check for a symlink pointing back to a leading directory
|
---|
610 | if (-d "$dirname/$raw_subfile" && -l "$dirname/$raw_subfile") {
|
---|
611 | # readlink gives a "fatal error" on systems that don't implement
|
---|
612 | # symlinks. This assumes the the -l test above would fail on those.
|
---|
613 | my $linkdest=readlink "$dirname/$raw_subfile";
|
---|
614 | if (!defined ($linkdest)) {
|
---|
615 | # system error - file not found?
|
---|
616 | warn "DirectoryPlugin: symlink problem - $!";
|
---|
617 | } else {
|
---|
618 | # see if link points to current or a parent directory
|
---|
619 | if ($linkdest =~ m@^[\./\\]+$@ ||
|
---|
620 | index($dirname, $linkdest) != -1) {
|
---|
621 | warn "DirectoryPlugin: Ignoring recursive symlink ($dirname/$raw_subfile -> $linkdest)\n";
|
---|
622 | next;
|
---|
623 | ;
|
---|
624 | }
|
---|
625 | }
|
---|
626 | }
|
---|
627 |
|
---|
628 | print $outhandle "DirectoryPlugin: preparing metadata for $raw_subfile\n" if ($verbosity > 2);
|
---|
629 |
|
---|
630 | # Make a copy of $in_metadata to pass to $raw_subfile
|
---|
631 | my $out_metadata = {};
|
---|
632 | &metadatautil::combine_metadata_structures($out_metadata, $in_metadata);
|
---|
633 |
|
---|
634 | # check the assocfile_info
|
---|
635 | if (defined $self->{'assocfile_info'}->{$raw_full_filename}) {
|
---|
636 | &metadatautil::combine_metadata_structures($out_metadata, $self->{'assocfile_info'}->{$raw_full_filename});
|
---|
637 | }
|
---|
638 |
|
---|
639 | ### Now we need to look up the metadata table to see if there is any
|
---|
640 | # extra metadata for us. We need the canonical unicode version here.
|
---|
641 | if ($additionalmetadata == 1) {
|
---|
642 | foreach my $filespec (@extrametakeys) {
|
---|
643 | if ($unicode_subfile =~ /^$filespec$/) {
|
---|
644 | print $outhandle "File \"$unicode_subfile\" matches filespec \"$filespec\"\n"
|
---|
645 | if ($verbosity > 2);
|
---|
646 | my $mdref = &extrametautil::getmetadata(\%extrametadata, $filespec);
|
---|
647 | my $mfref = &extrametautil::getmetafile(\%extrametafile, $filespec);
|
---|
648 |
|
---|
649 | # Add the list files where the metadata came from
|
---|
650 | # into the metadata table so we can track this
|
---|
651 | # This mechanism is similar to how gsdlassocfile works
|
---|
652 |
|
---|
653 | my @metafile_pair = ();
|
---|
654 | foreach my $l (keys %$mfref) {
|
---|
655 | my $f = $mfref->{$l};
|
---|
656 | push (@metafile_pair, "$f : $l");
|
---|
657 | }
|
---|
658 |
|
---|
659 | $mdref->{'gsdlmetafile'} = \@metafile_pair;
|
---|
660 |
|
---|
661 | &metadatautil::combine_metadata_structures($out_metadata, $mdref);
|
---|
662 | }
|
---|
663 | }
|
---|
664 | }
|
---|
665 |
|
---|
666 | if (defined $self->{'inf_timestamp'}) {
|
---|
667 | # Look to see if it's a completely new file
|
---|
668 |
|
---|
669 | if (!$block_hash->{'new_files'}->{$raw_full_filename}) {
|
---|
670 | # Not a new file, must be an existing file
|
---|
671 | # Let' see if it's newer than the last import.pl
|
---|
672 |
|
---|
673 |
|
---|
674 | if (! -d $raw_full_filename) {
|
---|
675 | if (!$block_hash->{'reindex_files'}->{$raw_full_filename}) {
|
---|
676 | # filename has been around for longer than inf_timestamp
|
---|
677 | print $outhandle "**** Skipping $unicode_subfile\n" if ($verbosity >3);
|
---|
678 | next;
|
---|
679 | }
|
---|
680 | else {
|
---|
681 | # Remove old folder in archives (might hash to something different)
|
---|
682 | # *** should be doing this on a Del one as well
|
---|
683 | # but leave folder name?? and ensure hashs to
|
---|
684 | # same again??
|
---|
685 |
|
---|
686 | # Then let through as new doc??
|
---|
687 |
|
---|
688 | # mark to doc-oids that rely on it for re-indexing
|
---|
689 | }
|
---|
690 | }
|
---|
691 | }
|
---|
692 | }
|
---|
693 |
|
---|
694 | # Recursively read each $subfile
|
---|
695 | print $outhandle "DirectoryPlugin recurring: $unicode_subfile\n" if ($verbosity > 2);
|
---|
696 |
|
---|
697 | $count += &plugin::read ($pluginfo, $this_file_base_dir,
|
---|
698 | $raw_file_subfile, $block_hash,
|
---|
699 | $out_metadata, $processor, $maxdocs, ($total_count + $count), $gli);
|
---|
700 | }
|
---|
701 |
|
---|
702 | return $count;
|
---|
703 | }
|
---|
704 |
|
---|
705 | sub compile_stats {
|
---|
706 | my $self = shift(@_);
|
---|
707 | my ($stats) = @_;
|
---|
708 | }
|
---|
709 |
|
---|
710 | # Manifest files, version 2, provide an explicit listing of the documents to be
|
---|
711 | # processed by Greenstone. This allows a user to avoid expensive file tree
|
---|
712 | # searches - a crucial requirement for very-large scale collections and
|
---|
713 | # parallel processing. However, we still want to leverage the metadata parsing
|
---|
714 | # functionality found here in DirectoryPlugin. Thus we have this special call
|
---|
715 | # to read that expects a single file. The normal read function starts by
|
---|
716 | # listing the files in a given directory and then performs a number of actions
|
---|
717 | # over them (including recursing down into any further directories found). We
|
---|
718 | # circumvent that behaviour by 'pretending' to already have a directory listing
|
---|
719 | # containing at most two file - the file passed in, and an accompanying
|
---|
720 | # metadata.xml file if one exists.
|
---|
721 | sub read_for_manifest_v2
|
---|
722 | {
|
---|
723 | my $self = shift (@_);
|
---|
724 | my ($pluginfo, $file, $block_hash, $processor, $gli) = @_;
|
---|
725 | my $base_dir = '';
|
---|
726 | my $in_metadata = {};
|
---|
727 | my $maxdocs = -1;
|
---|
728 | my $total_count = 0;
|
---|
729 | # Ensure we have the full path of the file to process
|
---|
730 | my $full_path = $file;
|
---|
731 | if ($base_dir =~ /\w/)
|
---|
732 | {
|
---|
733 | $full_path = &FileUtils::filenameConcatenate($base_dir, $file);
|
---|
734 | }
|
---|
735 | # Unlike the vanilla read(), directories are unacceptable
|
---|
736 | if (!-f $full_path)
|
---|
737 | {
|
---|
738 | return 0;
|
---|
739 | }
|
---|
740 | # Now split the full path into a directory and a filename
|
---|
741 | my ($dirname, $the_file) = $full_path =~ /^(.*)\/([^\/]+)$/;
|
---|
742 | # We will prepopulate a 'directory listing' with this file
|
---|
743 | my @dir = ($the_file);
|
---|
744 | # See if there is an accompanying
|
---|
745 | my $metadata_xml_path = $dirname . '/metadata.xml';
|
---|
746 | if (-f $metadata_xml_path)
|
---|
747 | {
|
---|
748 | unshift(@dir, 'metadata.xml');
|
---|
749 | }
|
---|
750 | # Chain through to the normal read process, but with out 'forged' directory
|
---|
751 | # listing so as to avoid all the costs of actually listing / recursing.
|
---|
752 | my $count = $self->read_phase2($pluginfo, $dirname, \@dir, $base_dir, $dirname, $block_hash, $in_metadata, $processor, $maxdocs, $total_count, $gli);
|
---|
753 | # We don't return count, but test that it is 1 exactly.
|
---|
754 | if ($count != 1)
|
---|
755 | {
|
---|
756 | print STDERR "ERROR! The count of documents processed from a single call to DirectoryPlugin::read_for_manifest_v2() is not 1.\n";
|
---|
757 | }
|
---|
758 | }
|
---|
759 |
|
---|
760 | 1;
|
---|