[537] | 1 | ###########################################################################
|
---|
| 2 | #
|
---|
[15870] | 3 | # DirectoryPlugin.pm --
|
---|
[537] | 4 | # A component of the Greenstone digital library software
|
---|
| 5 | # from the New Zealand Digital Library Project at the
|
---|
| 6 | # University of Waikato, New Zealand.
|
---|
| 7 | #
|
---|
| 8 | # Copyright (C) 1999 New Zealand Digital Library Project
|
---|
| 9 | #
|
---|
| 10 | # This program is free software; you can redistribute it and/or modify
|
---|
| 11 | # it under the terms of the GNU General Public License as published by
|
---|
| 12 | # the Free Software Foundation; either version 2 of the License, or
|
---|
| 13 | # (at your option) any later version.
|
---|
| 14 | #
|
---|
| 15 | # This program is distributed in the hope that it will be useful,
|
---|
| 16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 18 | # GNU General Public License for more details.
|
---|
| 19 | #
|
---|
| 20 | # You should have received a copy of the GNU General Public License
|
---|
| 21 | # along with this program; if not, write to the Free Software
|
---|
| 22 | # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
---|
| 23 | #
|
---|
| 24 | ###########################################################################
|
---|
| 25 |
|
---|
[15870] | 26 | # DirectoryPlugin is a plugin which recurses through directories processing
|
---|
| 27 | # each file it finds - which basically means passing it down the plugin
|
---|
| 28 | # pipeline
|
---|
[4] | 29 |
|
---|
[15870] | 30 | package DirectoryPlugin;
|
---|
[2228] | 31 |
|
---|
[24951] | 32 | use extrametautil;
|
---|
[17738] | 33 | use PrintInfo;
|
---|
[4] | 34 | use plugin;
|
---|
[136] | 35 | use util;
|
---|
[27306] | 36 | use FileUtils;
|
---|
[13188] | 37 | use metadatautil;
|
---|
[4] | 38 |
|
---|
[8737] | 39 | use File::Basename;
|
---|
[10254] | 40 | use strict;
|
---|
| 41 | no strict 'refs';
|
---|
[15870] | 42 | no strict 'subs';
|
---|
| 43 |
|
---|
[29760] | 44 | use Encode::Locale;
|
---|
[13545] | 45 | use Encode;
|
---|
[29763] | 46 | use Unicode::Normalize;
|
---|
[4] | 47 |
|
---|
| 48 | BEGIN {
|
---|
[17738] | 49 | @DirectoryPlugin::ISA = ('PrintInfo');
|
---|
[4] | 50 | }
|
---|
| 51 |
|
---|
[4744] | 52 | my $arguments =
|
---|
| 53 | [ { 'name' => "block_exp",
|
---|
[15870] | 54 | 'desc' => "{BasePlugin.block_exp}",
|
---|
[6408] | 55 | 'type' => "regexp",
|
---|
[4744] | 56 | 'deft' => &get_default_block_exp(),
|
---|
| 57 | 'reqd' => "no" },
|
---|
[13417] | 58 | # this option has been deprecated. leave it here for now so we can warn people not to use it
|
---|
[4744] | 59 | { 'name' => "use_metadata_files",
|
---|
[15870] | 60 | 'desc' => "{DirectoryPlugin.use_metadata_files}",
|
---|
[4744] | 61 | 'type' => "flag",
|
---|
[13188] | 62 | 'reqd' => "no",
|
---|
| 63 | 'hiddengli' => "yes" },
|
---|
[7686] | 64 | { 'name' => "recheck_directories",
|
---|
[15870] | 65 | 'desc' => "{DirectoryPlugin.recheck_directories}",
|
---|
[7686] | 66 | 'type' => "flag",
|
---|
[4744] | 67 | 'reqd' => "no" } ];
|
---|
[13188] | 68 |
|
---|
[15870] | 69 | my $options = { 'name' => "DirectoryPlugin",
|
---|
| 70 | 'desc' => "{DirectoryPlugin.desc}",
|
---|
[6408] | 71 | 'abstract' => "no",
|
---|
[4744] | 72 | 'inherits' => "yes",
|
---|
| 73 | 'args' => $arguments };
|
---|
[3540] | 74 |
|
---|
[4] | 75 | sub new {
|
---|
[10218] | 76 | my ($class) = shift (@_);
|
---|
| 77 | my ($pluginlist,$inputargs,$hashArgOptLists) = @_;
|
---|
| 78 | push(@$pluginlist, $class);
|
---|
[3540] | 79 |
|
---|
[15870] | 80 | push(@{$hashArgOptLists->{"ArgList"}},@{$arguments});
|
---|
| 81 | push(@{$hashArgOptLists->{"OptList"}},$options);
|
---|
[10218] | 82 |
|
---|
[17738] | 83 | my $self = new PrintInfo($pluginlist, $inputargs, $hashArgOptLists);
|
---|
[13188] | 84 |
|
---|
| 85 | if ($self->{'info_only'}) {
|
---|
| 86 | # don't worry about any options or initialisations etc
|
---|
| 87 | return bless $self, $class;
|
---|
| 88 | }
|
---|
| 89 |
|
---|
| 90 | # we have left this option in so we can warn people who are still using it
|
---|
[2813] | 91 | if ($self->{'use_metadata_files'}) {
|
---|
[15870] | 92 | die "ERROR: DirectoryPlugin -use_metadata_files option has been deprecated. Please remove the option and add MetadataXMLPlug to your plugin list instead!\n";
|
---|
[2813] | 93 | }
|
---|
[16391] | 94 |
|
---|
[17738] | 95 | $self->{'num_processed'} = 0;
|
---|
| 96 | $self->{'num_not_processed'} = 0;
|
---|
| 97 | $self->{'num_blocked'} = 0;
|
---|
| 98 | $self->{'num_archives'} = 0;
|
---|
| 99 |
|
---|
[8737] | 100 | $self->{'subdir_extrametakeys'} = {};
|
---|
| 101 |
|
---|
[4] | 102 | return bless $self, $class;
|
---|
| 103 | }
|
---|
| 104 |
|
---|
[17738] | 105 | # called once, at the start of processing
|
---|
| 106 | sub init {
|
---|
| 107 | my $self = shift (@_);
|
---|
| 108 | my ($verbosity, $outhandle, $failhandle) = @_;
|
---|
| 109 |
|
---|
| 110 | # verbosity is passed through from the processor
|
---|
| 111 | $self->{'verbosity'} = $verbosity;
|
---|
| 112 |
|
---|
| 113 | # as are the outhandle and failhandle
|
---|
| 114 | $self->{'outhandle'} = $outhandle if defined $outhandle;
|
---|
| 115 | $self->{'failhandle'} = $failhandle;
|
---|
| 116 |
|
---|
| 117 | }
|
---|
| 118 |
|
---|
| 119 | # called once, after all passes have finished
|
---|
| 120 | sub deinit {
|
---|
| 121 | my ($self) = @_;
|
---|
| 122 |
|
---|
| 123 | }
|
---|
| 124 |
|
---|
| 125 | # called at the beginning of each plugin pass (import has one, building has many)
|
---|
[10156] | 126 | sub begin {
|
---|
| 127 | my $self = shift (@_);
|
---|
| 128 | my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
|
---|
| 129 |
|
---|
[21586] | 130 | # Only lookup timestamp info for import.pl, and only if incremental is set
|
---|
[10156] | 131 | my $proc_package_name = ref $processor;
|
---|
[12969] | 132 | if ($proc_package_name !~ /buildproc$/ && $self->{'incremental'} == 1) {
|
---|
[21586] | 133 | # Get the infodbtype value for this collection from the arcinfo object
|
---|
| 134 | my $infodbtype = $processor->getoutputinfo()->{'infodbtype'};
|
---|
[23171] | 135 | $infodbtype = "gdbm" if $infodbtype eq "gdbm-txtgz"; # in archives, cannot use txtgz version
|
---|
[10156] | 136 | my $output_dir = $processor->getoutputdir();
|
---|
[21586] | 137 | my $archives_inf = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $output_dir);
|
---|
[18441] | 138 |
|
---|
[10156] | 139 | if ( -e $archives_inf ) {
|
---|
| 140 | $self->{'inf_timestamp'} = -M $archives_inf;
|
---|
| 141 | }
|
---|
| 142 | }
|
---|
| 143 | }
|
---|
| 144 |
|
---|
[21308] | 145 | sub remove_all {
|
---|
[21285] | 146 | my $self = shift (@_);
|
---|
| 147 | my ($pluginfo, $base_dir, $processor, $maxdocs) = @_;
|
---|
| 148 | }
|
---|
[21308] | 149 |
|
---|
[21315] | 150 |
|
---|
[21308] | 151 | sub remove_one {
|
---|
| 152 | my $self = shift (@_);
|
---|
[21315] | 153 | my ($file, $oids, $archivedir) = @_;
|
---|
| 154 | return undef; # this will never be called for directories (will it??)
|
---|
| 155 |
|
---|
[21308] | 156 | }
|
---|
| 157 |
|
---|
| 158 |
|
---|
[17738] | 159 | # called at the end of each plugin pass
|
---|
[15870] | 160 | sub end {
|
---|
[17738] | 161 | my ($self) = shift (@_);
|
---|
[10156] | 162 |
|
---|
[15870] | 163 | }
|
---|
| 164 |
|
---|
| 165 |
|
---|
[17738] | 166 |
|
---|
[4] | 167 | # return 1 if this class might recurse using $pluginfo
|
---|
| 168 | sub is_recursive {
|
---|
| 169 | my $self = shift (@_);
|
---|
[2813] | 170 |
|
---|
[4] | 171 | return 1;
|
---|
| 172 | }
|
---|
| 173 |
|
---|
[2228] | 174 | sub get_default_block_exp {
|
---|
| 175 | my $self = shift (@_);
|
---|
[2813] | 176 |
|
---|
[26867] | 177 | return '(?i)(CVS|\.svn|Thumbs\.db|OIDcount|\.DS_Store|~)$';
|
---|
[2228] | 178 | }
|
---|
| 179 |
|
---|
[16391] | 180 | sub check_directory_path {
|
---|
[2228] | 181 |
|
---|
[16391] | 182 | my $self = shift(@_);
|
---|
| 183 | my ($dirname) = @_;
|
---|
[2813] | 184 |
|
---|
[2228] | 185 | return undef unless (-d $dirname);
|
---|
[16391] | 186 |
|
---|
[2228] | 187 | return 0 if ($self->{'block_exp'} ne "" && $dirname =~ /$self->{'block_exp'}/);
|
---|
[7932] | 188 |
|
---|
[16391] | 189 | my $outhandle = $self->{'outhandle'};
|
---|
| 190 |
|
---|
[2228] | 191 | # check to make sure we're not reading the archives or index directory
|
---|
| 192 | my $gsdlhome = quotemeta($ENV{'GSDLHOME'});
|
---|
[2813] | 193 | if ($dirname =~ m/^$gsdlhome\/.*?\/import.*?\/(archives|index)$/) {
|
---|
[15870] | 194 | print $outhandle "DirectoryPlugin: $dirname appears to be a reference to a Greenstone collection, skipping.\n";
|
---|
[2228] | 195 | return 0;
|
---|
[1755] | 196 | }
|
---|
[2813] | 197 |
|
---|
[1755] | 198 | # check to see we haven't got a cyclic path...
|
---|
| 199 | if ($dirname =~ m%(/.*){,41}%) {
|
---|
[15870] | 200 | print $outhandle "DirectoryPlugin: $dirname is 40 directories deep, is this a recursive path? if not increase constant in DirectoryPlugin.pm.\n";
|
---|
[2228] | 201 | return 0;
|
---|
[1755] | 202 | }
|
---|
[2813] | 203 |
|
---|
[1755] | 204 | # check to see we haven't got a cyclic path...
|
---|
| 205 | if ($dirname =~ m%.*?import/(.+?)/import/\1.*%) {
|
---|
[15870] | 206 | print $outhandle "DirectoryPlugin: $dirname appears to be in a recursive loop...\n";
|
---|
[2228] | 207 | return 0;
|
---|
[1755] | 208 | }
|
---|
[16391] | 209 |
|
---|
| 210 | return 1;
|
---|
| 211 | }
|
---|
| 212 |
|
---|
| 213 | # this may be called more than once
|
---|
| 214 | sub sort_out_associated_files {
|
---|
| 215 |
|
---|
| 216 | my $self = shift (@_);
|
---|
| 217 | my ($block_hash) = @_;
|
---|
| 218 | if (!scalar (keys %{$block_hash->{'shared_fileroot'}})) {
|
---|
| 219 | return;
|
---|
| 220 | }
|
---|
| 221 |
|
---|
| 222 | $self->{'assocfile_info'} = {} unless defined $self->{'assocfile_info'};
|
---|
| 223 | my $metadata = $self->{'assocfile_info'};
|
---|
| 224 | foreach my $prefix (keys %{$block_hash->{'shared_fileroot'}}) {
|
---|
| 225 | my $record = $block_hash->{'shared_fileroot'}->{$prefix};
|
---|
| 226 |
|
---|
| 227 | my $tie_to = $record->{'tie_to'};
|
---|
| 228 | my $exts = $record->{'exts'};
|
---|
| 229 |
|
---|
| 230 | if ((defined $tie_to) && (scalar (keys %$exts) > 0)) {
|
---|
| 231 | # set up fileblocks and assocfile_tobe
|
---|
| 232 | my $base_file = "$prefix$tie_to";
|
---|
| 233 | $metadata->{$base_file} = {} unless defined $metadata->{$base_file};
|
---|
| 234 | my $base_file_metadata = $metadata->{$base_file};
|
---|
| 235 |
|
---|
| 236 | $base_file_metadata->{'gsdlassocfile_tobe'} = [] unless defined $base_file_metadata->{'gsdlassocfile_tobe'};
|
---|
| 237 | my $assoc_tobe = $base_file_metadata->{'gsdlassocfile_tobe'};
|
---|
| 238 | foreach my $e (keys %$exts) {
|
---|
| 239 | # block the file
|
---|
[23561] | 240 | &util::block_filename($block_hash,"$prefix$e");
|
---|
[16391] | 241 | # set up as an associatd file
|
---|
| 242 | print STDERR " $self->{'plugin_type'}: Associating $prefix$e with $tie_to version\n";
|
---|
| 243 | my $mime_type = ""; # let system auto detect this
|
---|
| 244 | push(@$assoc_tobe,"$prefix$e:$mime_type:");
|
---|
| 245 |
|
---|
| 246 | }
|
---|
| 247 | }
|
---|
| 248 | } # foreach record
|
---|
| 249 |
|
---|
| 250 | $block_hash->{'shared_fileroot'} = undef;
|
---|
| 251 | $block_hash->{'shared_fileroot'} = {};
|
---|
| 252 |
|
---|
| 253 | }
|
---|
| 254 |
|
---|
| 255 |
|
---|
| 256 | # do block exp OR special blocking ???
|
---|
| 257 |
|
---|
| 258 | sub file_is_blocked {
|
---|
| 259 | my $self = shift (@_);
|
---|
| 260 | my ($block_hash, $filename_full_path) = @_;
|
---|
| 261 |
|
---|
[23363] | 262 | $filename_full_path = &util::upgrade_if_dos_filename($filename_full_path);
|
---|
| 263 |
|
---|
[28489] | 264 | if (($ENV{'GSDLOS'} =~ m/^windows$/) && ($^O ne "cygwin")) {
|
---|
[23544] | 265 | # on windows, all block paths are lowercased.
|
---|
| 266 | my $lower_filename = lc ($filename_full_path);
|
---|
| 267 | if (defined $block_hash->{'file_blocks'}->{$lower_filename}) {
|
---|
| 268 | $self->{'num_blocked'} ++;
|
---|
| 269 | return 1;
|
---|
| 270 | }
|
---|
[16391] | 271 | }
|
---|
[23544] | 272 | else {
|
---|
| 273 | if (defined $block_hash->{'file_blocks'}->{$filename_full_path}) {
|
---|
| 274 | $self->{'num_blocked'} ++;
|
---|
| 275 | return 1;
|
---|
| 276 | }
|
---|
| 277 | }
|
---|
[16391] | 278 | # check Directory plugin's own block_exp
|
---|
| 279 | if ($self->{'block_exp'} ne "" && $filename_full_path =~ /$self->{'block_exp'}/) {
|
---|
| 280 | $self->{'num_blocked'} ++;
|
---|
| 281 | return 1; # blocked
|
---|
| 282 | }
|
---|
| 283 | return 0;
|
---|
| 284 | }
|
---|
| 285 |
|
---|
| 286 |
|
---|
| 287 |
|
---|
| 288 | sub file_block_read {
|
---|
| 289 | my $self = shift (@_);
|
---|
| 290 | my ($pluginfo, $base_dir, $file, $block_hash, $metadata, $gli) = @_;
|
---|
| 291 |
|
---|
| 292 | my $outhandle = $self->{'outhandle'};
|
---|
| 293 | my $verbosity = $self->{'verbosity'};
|
---|
[2813] | 294 |
|
---|
[16391] | 295 | # Calculate the directory name and ensure it is a directory and
|
---|
| 296 | # that it is not explicitly blocked.
|
---|
| 297 | my $dirname = $file;
|
---|
[27306] | 298 | $dirname = &FileUtils::filenameConcatenate($base_dir, $file) if $base_dir =~ /\w/;
|
---|
[16391] | 299 |
|
---|
| 300 | my $directory_ok = $self->check_directory_path($dirname);
|
---|
| 301 | return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
|
---|
| 302 |
|
---|
[18523] | 303 | print $outhandle "Global file scan checking directory: $dirname\n";
|
---|
| 304 |
|
---|
[18441] | 305 | $block_hash->{'all_files'} = {} unless defined $block_hash->{'all_files'};
|
---|
[20577] | 306 | $block_hash->{'metadata_files'} = {} unless defined $block_hash->{'metadata_files'};
|
---|
[18441] | 307 |
|
---|
[16391] | 308 | $block_hash->{'file_blocks'} = {} unless defined $block_hash->{'file_blocks'};
|
---|
| 309 | $block_hash->{'shared_fileroot'} = {} unless defined $block_hash->{'shared_fileroot'};
|
---|
| 310 |
|
---|
| 311 | # Recur over directory contents.
|
---|
| 312 | my (@dir, $subfile);
|
---|
| 313 | #my $count = 0;
|
---|
| 314 |
|
---|
| 315 | print $outhandle "DirectoryPlugin block: getting directory $dirname\n" if ($verbosity > 2);
|
---|
| 316 |
|
---|
| 317 | # find all the files in the directory
|
---|
| 318 | if (!opendir (DIR, $dirname)) {
|
---|
| 319 | if ($gli) {
|
---|
| 320 | print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
|
---|
| 321 | }
|
---|
| 322 | print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
|
---|
| 323 | return -1; # error in processing
|
---|
| 324 | }
|
---|
[27578] | 325 | @dir = sort readdir (DIR);
|
---|
[16391] | 326 | closedir (DIR);
|
---|
| 327 |
|
---|
| 328 | for (my $i = 0; $i < scalar(@dir); $i++) {
|
---|
[23335] | 329 | my $raw_subfile = $dir[$i];
|
---|
| 330 | next if ($raw_subfile =~ m/^\.\.?$/);
|
---|
| 331 |
|
---|
[16391] | 332 | my $this_file_base_dir = $base_dir;
|
---|
[27306] | 333 | my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
|
---|
[16391] | 334 |
|
---|
[23335] | 335 | # Recursively read each $raw_subfile
|
---|
[29745] | 336 | print $outhandle "DirectoryPlugin block recurring: ". Encode::decode("utf8", $raw_file_subfile) ."\n" if ($verbosity > 2);
|
---|
[29760] | 337 | print $outhandle "DirectoryPlugin block recurring: ". Encode::decode(locale =>$raw_file_subfile) ."\n" if ($verbosity > 2);
|
---|
[16391] | 338 |
|
---|
| 339 | #$count += &plugin::file_block_read ($pluginfo, $this_file_base_dir,
|
---|
[23335] | 340 |
|
---|
[16391] | 341 | &plugin::file_block_read ($pluginfo, $this_file_base_dir,
|
---|
[23335] | 342 | $raw_file_subfile,
|
---|
[16391] | 343 | $block_hash, $metadata, $gli);
|
---|
| 344 |
|
---|
| 345 | }
|
---|
| 346 | $self->sort_out_associated_files($block_hash);
|
---|
| 347 | #return $count;
|
---|
[23419] | 348 | return 1;
|
---|
[16391] | 349 |
|
---|
| 350 | }
|
---|
[17738] | 351 |
|
---|
| 352 | # We don't do metadata_read
|
---|
| 353 | sub metadata_read {
|
---|
| 354 | my $self = shift (@_);
|
---|
[19493] | 355 | my ($pluginfo, $base_dir, $file, $block_hash,
|
---|
| 356 | $extrametakeys, $extrametadata, $extrametafile,
|
---|
[23212] | 357 | $processor, $gli, $aux) = @_;
|
---|
[17738] | 358 |
|
---|
| 359 | return undef;
|
---|
| 360 | }
|
---|
| 361 |
|
---|
| 362 |
|
---|
[16391] | 363 | # return number of files processed, undef if can't process
|
---|
| 364 | # Note that $base_dir might be "" and that $file might
|
---|
| 365 | # include directories
|
---|
| 366 |
|
---|
| 367 | # This function passes around metadata hash structures. Metadata hash
|
---|
| 368 | # structures are hashes that map from a (scalar) key (the metadata element
|
---|
| 369 | # name) to either a scalar metadata value or a reference to an array of
|
---|
| 370 | # such values.
|
---|
| 371 |
|
---|
| 372 | sub read {
|
---|
| 373 | my $self = shift (@_);
|
---|
| 374 | my ($pluginfo, $base_dir, $file, $block_hash, $in_metadata, $processor, $maxdocs, $total_count, $gli) = @_;
|
---|
[17320] | 375 |
|
---|
[16391] | 376 | my $outhandle = $self->{'outhandle'};
|
---|
| 377 | my $verbosity = $self->{'verbosity'};
|
---|
[24349] | 378 |
|
---|
[16391] | 379 | # Calculate the directory name and ensure it is a directory and
|
---|
| 380 | # that it is not explicitly blocked.
|
---|
| 381 | my $dirname;
|
---|
| 382 | if ($file eq "") {
|
---|
| 383 | $dirname = $base_dir;
|
---|
| 384 | } else {
|
---|
| 385 | $dirname = $file;
|
---|
[27306] | 386 | $dirname = &FileUtils::filenameConcatenate($base_dir, $file) if $base_dir =~ /\w/;
|
---|
[16391] | 387 | }
|
---|
[24932] | 388 |
|
---|
[16391] | 389 | my $directory_ok = $self->check_directory_path($dirname);
|
---|
| 390 | return $directory_ok unless (defined $directory_ok && $directory_ok == 1);
|
---|
| 391 |
|
---|
[2228] | 392 | if (($verbosity > 2) && ((scalar keys %$in_metadata) > 0)) {
|
---|
[15870] | 393 | print $outhandle "DirectoryPlugin: metadata passed in: ",
|
---|
[2813] | 394 | join(", ", keys %$in_metadata), "\n";
|
---|
[2228] | 395 | }
|
---|
[2813] | 396 |
|
---|
[16391] | 397 |
|
---|
[2228] | 398 | # Recur over directory contents.
|
---|
| 399 | my (@dir, $subfile);
|
---|
[6332] | 400 |
|
---|
[16391] | 401 | print $outhandle "DirectoryPlugin read: getting directory $dirname\n" if ($verbosity > 2);
|
---|
[2813] | 402 |
|
---|
[2228] | 403 | # find all the files in the directory
|
---|
| 404 | if (!opendir (DIR, $dirname)) {
|
---|
[9584] | 405 | if ($gli) {
|
---|
| 406 | print STDERR "<ProcessingError n='$file' r='Could not read directory $dirname'>\n";
|
---|
| 407 | }
|
---|
[15870] | 408 | print $outhandle "DirectoryPlugin: WARNING - couldn't read directory $dirname\n";
|
---|
[7362] | 409 | return -1; # error in processing
|
---|
[2228] | 410 | }
|
---|
[27578] | 411 | @dir = sort readdir (DIR);
|
---|
[29795] | 412 | map { $_ = &unicode::raw_filename_to_url_encoded($_); print STDERR "****$_\n"; } @dir;
|
---|
[2228] | 413 | closedir (DIR);
|
---|
[29795] | 414 | #$_ = &util::get_dirsep_tail(&util::upgrade_if_dos_filename(&FileUtils::filenameConcatenate($dirname, $_), 1));
|
---|
[7686] | 415 | # Re-order the files in the list so any directories ending with .all are moved to the end
|
---|
[8716] | 416 | for (my $i = scalar(@dir) - 1; $i >= 0; $i--) {
|
---|
[29795] | 417 | #print STDERR "*****file = $dir[$i]\n";
|
---|
| 418 | #my $long = &Win32::GetLongPathName(&util::filename_cat($dirname,$dir[$i]));
|
---|
| 419 | #print STDERR "long = $long\n";
|
---|
| 420 | #my $upgrade = &util::upgrade_if_dos_filename($dir[$i], 1);
|
---|
| 421 | #print STDERR "upgrade = $upgrade\n";
|
---|
[27306] | 422 | if (-d &FileUtils::filenameConcatenate($dirname, $dir[$i]) && $dir[$i] =~ /\.all$/) {
|
---|
[7686] | 423 | push(@dir, splice(@dir, $i, 1));
|
---|
| 424 | }
|
---|
| 425 | }
|
---|
| 426 |
|
---|
[13188] | 427 | # setup the metadata structures. we do a metadata_read pass to see if there is any additional metadata, then pass it to read
|
---|
| 428 |
|
---|
[2228] | 429 | my $additionalmetadata = 0; # is there extra metadata available?
|
---|
| 430 | my %extrametadata; # maps from filespec to extra metadata keys
|
---|
[19493] | 431 | my %extrametafile; # maps from filespec to the metadata.xml (or similar) file it came from
|
---|
[2228] | 432 | my @extrametakeys; # keys of %extrametadata in order read
|
---|
[8512] | 433 |
|
---|
[16391] | 434 |
|
---|
[24932] | 435 | my $os_dirsep = &util::get_os_dirsep();
|
---|
[8737] | 436 | my $dirsep = &util::get_dirsep();
|
---|
[11919] | 437 | my $base_dir_regexp = $base_dir;
|
---|
| 438 | $base_dir_regexp =~ s/\//$os_dirsep/g;
|
---|
[24932] | 439 |
|
---|
| 440 | # Want to get relative path of local_dirname within the base_directory
|
---|
| 441 | # but with URL style slashes.
|
---|
| 442 | my $local_dirname = &util::filename_within_directory_url_format($dirname, $base_dir);
|
---|
| 443 |
|
---|
[22896] | 444 | # if we are in import folder, then local_dirname will be empty
|
---|
| 445 | if ($local_dirname ne "") {
|
---|
[24932] | 446 | # look for extra metadata passed down from higher folders
|
---|
| 447 | $local_dirname .= "/"; # closing slash must be URL type slash also and not $dirsep;
|
---|
[22896] | 448 | if (defined $self->{'subdir_extrametakeys'}->{$local_dirname}) {
|
---|
| 449 | my $extrakeys = $self->{'subdir_extrametakeys'}->{$local_dirname};
|
---|
| 450 | foreach my $ek (@$extrakeys) {
|
---|
| 451 | my $extrakeys_re = $ek->{'re'};
|
---|
| 452 | my $extrakeys_md = $ek->{'md'};
|
---|
| 453 | my $extrakeys_mf = $ek->{'mf'};
|
---|
[24951] | 454 | &extrametautil::addmetakey(\@extrametakeys, $extrakeys_re);
|
---|
| 455 | &extrametautil::setmetadata(\%extrametadata, $extrakeys_re, $extrakeys_md);
|
---|
| 456 | &extrametautil::setmetafile(\%extrametafile, $extrakeys_re, $extrakeys_mf);
|
---|
[22896] | 457 | }
|
---|
| 458 | delete($self->{'subdir_extrametakeys'}->{$local_dirname});
|
---|
[8737] | 459 | }
|
---|
| 460 | }
|
---|
[23212] | 461 | # apply metadata pass for each of the files in the directory -- ignore
|
---|
| 462 | # maxdocs here
|
---|
[7686] | 463 | my $num_files = scalar(@dir);
|
---|
[8512] | 464 | for (my $i = 0; $i < scalar(@dir); $i++) {
|
---|
| 465 | my $subfile = $dir[$i];
|
---|
[23335] | 466 | next if ($subfile =~ m/^\.\.?$/);
|
---|
| 467 |
|
---|
[8512] | 468 | my $this_file_base_dir = $base_dir;
|
---|
[23335] | 469 | my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
|
---|
| 470 |
|
---|
[27306] | 471 | my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
|
---|
| 472 | my $raw_full_filename = &FileUtils::filenameConcatenate($this_file_base_dir, $raw_file_subfile);
|
---|
[23335] | 473 |
|
---|
| 474 | if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
|
---|
| 475 | print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for metadata_read\n" if ($verbosity > 2);
|
---|
[16391] | 476 | next;
|
---|
| 477 | }
|
---|
| 478 |
|
---|
[23335] | 479 | # Recursively read each $raw_subfile
|
---|
| 480 | print $outhandle "DirectoryPlugin metadata recurring: $raw_subfile\n" if ($verbosity > 2);
|
---|
[8512] | 481 |
|
---|
[23212] | 482 | &plugin::metadata_read ($pluginfo, $this_file_base_dir,
|
---|
[23335] | 483 | $raw_file_subfile,$block_hash,
|
---|
[23212] | 484 | \@extrametakeys, \%extrametadata,
|
---|
| 485 | \%extrametafile,
|
---|
| 486 | $processor, $gli);
|
---|
[8512] | 487 | $additionalmetadata = 1;
|
---|
| 488 | }
|
---|
[16391] | 489 |
|
---|
[8737] | 490 | # filter out any extrametakeys that mention subdirectories and store
|
---|
| 491 | # for later use (i.e. when that sub-directory is being processed)
|
---|
[25094] | 492 | foreach my $ek (@extrametakeys) { # where each Extrametakey (which is a filename) is stored as a url-style regex
|
---|
| 493 |
|
---|
| 494 | my ($subdir_re,$extrakey_dir) = &util::url_fileparse($ek);
|
---|
| 495 |
|
---|
| 496 | if ($extrakey_dir ne "") {
|
---|
| 497 | # a subdir was specified
|
---|
[24951] | 498 | my $md = &extrametautil::getmetadata(\%extrametadata, $ek);
|
---|
| 499 | my $mf = &extrametautil::getmetafile(\%extrametafile, $ek);
|
---|
[8737] | 500 |
|
---|
| 501 | my $subdir_extrametakeys = $self->{'subdir_extrametakeys'};
|
---|
[19493] | 502 | my $subdir_rec = { 're' => $subdir_re, 'md' => $md, 'mf' => $mf };
|
---|
[15005] | 503 |
|
---|
[24951] | 504 | # when it's looked up, it must be relative to the base dir
|
---|
[17320] | 505 | push(@{$subdir_extrametakeys->{"$local_dirname$extrakey_dir"}},$subdir_rec);
|
---|
[8737] | 506 | }
|
---|
| 507 | }
|
---|
[8512] | 508 |
|
---|
| 509 | # import each of the files in the directory
|
---|
[23212] | 510 | my $count=0;
|
---|
[7686] | 511 | for (my $i = 0; $i <= scalar(@dir); $i++) {
|
---|
| 512 | # When every file in the directory has been done, pause for a moment (figuratively!)
|
---|
| 513 | # If the -recheck_directories argument hasn't been provided, stop now (default)
|
---|
| 514 | # Otherwise, re-read the contents of the directory to check for new files
|
---|
| 515 | # Any new files are added to the @dir list and are processed as normal
|
---|
| 516 | # This is necessary when documents to be indexed are specified in bibliographic DBs
|
---|
| 517 | # These files are copied/downloaded and stored in a new folder at import time
|
---|
| 518 | if ($i == $num_files) {
|
---|
| 519 | last unless $self->{'recheck_directories'};
|
---|
| 520 |
|
---|
| 521 | # Re-read the files in the directory to see if there are any new files
|
---|
| 522 | last if (!opendir (DIR, $dirname));
|
---|
[27578] | 523 | my @dirnow = sort readdir (DIR);
|
---|
[23335] | 524 | map { $_ = &unicode::raw_filename_to_url_encoded($_) } @dirnow;
|
---|
[7686] | 525 | closedir (DIR);
|
---|
| 526 |
|
---|
| 527 | # We're only interested if there are more files than there were before
|
---|
| 528 | last if (scalar(@dirnow) <= scalar(@dir));
|
---|
| 529 |
|
---|
| 530 | # Any new files are added to the end of @dir to get processed by the loop
|
---|
[8716] | 531 | my $j;
|
---|
[7686] | 532 | foreach my $subfilenow (@dirnow) {
|
---|
| 533 | for ($j = 0; $j < $num_files; $j++) {
|
---|
| 534 | last if ($subfilenow eq $dir[$j]);
|
---|
| 535 | }
|
---|
| 536 | if ($j == $num_files) {
|
---|
| 537 | # New file
|
---|
| 538 | push(@dir, $subfilenow);
|
---|
| 539 | }
|
---|
| 540 | }
|
---|
| 541 | # When the new files have been processed, check again
|
---|
| 542 | $num_files = scalar(@dir);
|
---|
| 543 | }
|
---|
| 544 |
|
---|
| 545 | my $subfile = $dir[$i];
|
---|
[9853] | 546 | last if ($maxdocs != -1 && ($count + $total_count) >= $maxdocs);
|
---|
[2228] | 547 | next if ($subfile =~ /^\.\.?$/);
|
---|
[29795] | 548 | print STDERR "poo poo poo\n";
|
---|
[23335] | 549 | my $this_file_base_dir = $base_dir;
|
---|
| 550 | my $raw_subfile = &unicode::url_encoded_to_raw_filename($subfile);
|
---|
[29795] | 551 | my $unicode_subfile = &util::raw_filename_to_unicode($dirname, $raw_subfile);
|
---|
| 552 | print STDERR "raw = $raw_subfile, unicode = $unicode_subfile\n";
|
---|
[29763] | 553 |
|
---|
[27306] | 554 | my $raw_file_subfile = &FileUtils::filenameConcatenate($file, $raw_subfile);
|
---|
[23335] | 555 | my $raw_full_filename
|
---|
[27306] | 556 | = &FileUtils::filenameConcatenate($this_file_base_dir,$raw_file_subfile);
|
---|
[23335] | 557 |
|
---|
| 558 | if ($self->file_is_blocked($block_hash,$raw_full_filename)) {
|
---|
| 559 | print STDERR "DirectoryPlugin: file $raw_full_filename was blocked for read\n" if ($verbosity > 2);
|
---|
[16391] | 560 | next;
|
---|
| 561 | }
|
---|
[29795] | 562 | print STDERR "** Dir Plugin processing $raw_full_filename\n";
|
---|
[7932] | 563 | # Follow Windows shortcuts
|
---|
[28489] | 564 | if ($raw_subfile =~ m/(?i)\.lnk$/ && (($ENV{'GSDLOS'} =~ m/^windows$/i) && ($^O ne "cygwin"))) {
|
---|
[7932] | 565 | require Win32::Shortcut;
|
---|
[27306] | 566 | my $shortcut = new Win32::Shortcut(&FileUtils::filenameConcatenate($dirname, $raw_subfile));
|
---|
[7932] | 567 | if ($shortcut) {
|
---|
| 568 | # The file to be processed is now the target of the shortcut
|
---|
| 569 | $this_file_base_dir = "";
|
---|
| 570 | $file = "";
|
---|
[23335] | 571 | $raw_subfile = $shortcut->Path;
|
---|
[7932] | 572 | }
|
---|
| 573 | }
|
---|
| 574 |
|
---|
[3108] | 575 | # check for a symlink pointing back to a leading directory
|
---|
[23335] | 576 | if (-d "$dirname/$raw_subfile" && -l "$dirname/$raw_subfile") {
|
---|
[3108] | 577 | # readlink gives a "fatal error" on systems that don't implement
|
---|
| 578 | # symlinks. This assumes the the -l test above would fail on those.
|
---|
[23335] | 579 | my $linkdest=readlink "$dirname/$raw_subfile";
|
---|
[3108] | 580 | if (!defined ($linkdest)) {
|
---|
| 581 | # system error - file not found?
|
---|
[15870] | 582 | warn "DirectoryPlugin: symlink problem - $!";
|
---|
[3108] | 583 | } else {
|
---|
| 584 | # see if link points to current or a parent directory
|
---|
| 585 | if ($linkdest =~ m@^[\./\\]+$@ ||
|
---|
| 586 | index($dirname, $linkdest) != -1) {
|
---|
[23335] | 587 | warn "DirectoryPlugin: Ignoring recursive symlink ($dirname/$raw_subfile -> $linkdest)\n";
|
---|
[3108] | 588 | next;
|
---|
| 589 | ;
|
---|
| 590 | }
|
---|
| 591 | }
|
---|
| 592 | }
|
---|
| 593 |
|
---|
[23335] | 594 | print $outhandle "DirectoryPlugin: preparing metadata for $raw_subfile\n" if ($verbosity > 2);
|
---|
[317] | 595 |
|
---|
[23335] | 596 | # Make a copy of $in_metadata to pass to $raw_subfile
|
---|
[17313] | 597 | my $out_metadata = {};
|
---|
[13188] | 598 | &metadatautil::combine_metadata_structures($out_metadata, $in_metadata);
|
---|
[317] | 599 |
|
---|
[16391] | 600 | # check the assocfile_info
|
---|
[23335] | 601 | if (defined $self->{'assocfile_info'}->{$raw_full_filename}) {
|
---|
| 602 | &metadatautil::combine_metadata_structures($out_metadata, $self->{'assocfile_info'}->{$raw_full_filename});
|
---|
[16391] | 603 | }
|
---|
[23335] | 604 |
|
---|
[29745] | 605 | ### $subfile by this point is url-encoded => all ASCII chars => no need to encode as UTF8
|
---|
[29763] | 606 | print STDERR "****** subfile = $subfile, raw_subfile = $raw_subfile\n";
|
---|
| 607 | print STDERR &unicode::debug_unicode_string("subfile = $subfile, raw_subfile = $raw_subfile\n");
|
---|
[29795] | 608 | print STDERR "looking up unicode version $unicode_subfile (". &unicode::debug_unicode_string($unicode_subfile).") \n";
|
---|
[29745] | 609 | # instead of using the subfile, we need unicode aware string
|
---|
| 610 | ###my $lookup_name = decode("utf8", $raw_subfile);
|
---|
[29795] | 611 | ####print STDERR "lookup nmae = $lookup_name ( \n";
|
---|
[2228] | 612 | # Next add metadata read in XML files (if it is supplied)
|
---|
| 613 | if ($additionalmetadata == 1) {
|
---|
[19493] | 614 | foreach my $filespec (@extrametakeys) {
|
---|
[23335] | 615 | ## use the url-encoded filename to do the filename comparison
|
---|
[29763] | 616 | print STDERR "### comparing against filespec $filespec (";
|
---|
[29760] | 617 | print STDERR &unicode::debug_unicode_string("$filespec");
|
---|
| 618 | print STDERR ")\n";
|
---|
[29745] | 619 | if ($unicode_subfile =~ /^$filespec$/) {
|
---|
| 620 | ###if ($lookup_name =~ /^$filespec$/) {
|
---|
[29795] | 621 | print $outhandle "^^^^^^^^^File \"$unicode_subfile\" matches filespec \"$filespec\"\n"
|
---|
[2228] | 622 | if ($verbosity > 2);
|
---|
[24951] | 623 | my $mdref = &extrametautil::getmetadata(\%extrametadata, $filespec);
|
---|
| 624 | my $mfref = &extrametautil::getmetafile(\%extrametafile, $filespec);
|
---|
[19493] | 625 |
|
---|
| 626 | # Add the list files where the metadata came from
|
---|
| 627 | # into the metadata table so we can track this
|
---|
| 628 | # This mechanism is similar to how gsdlassocfile works
|
---|
| 629 |
|
---|
| 630 | my @metafile_pair = ();
|
---|
| 631 | foreach my $l (keys %$mfref) {
|
---|
| 632 | my $f = $mfref->{$l};
|
---|
[19516] | 633 | push (@metafile_pair, "$f : $l");
|
---|
[19493] | 634 | }
|
---|
| 635 |
|
---|
| 636 | $mdref->{'gsdlmetafile'} = \@metafile_pair;
|
---|
| 637 |
|
---|
[13188] | 638 | &metadatautil::combine_metadata_structures($out_metadata, $mdref);
|
---|
[2228] | 639 | }
|
---|
[4] | 640 | }
|
---|
| 641 | }
|
---|
[10156] | 642 |
|
---|
| 643 | if (defined $self->{'inf_timestamp'}) {
|
---|
[18469] | 644 | # Look to see if it's a completely new file
|
---|
[10156] | 645 |
|
---|
[23335] | 646 | if (!$block_hash->{'new_files'}->{$raw_full_filename}) {
|
---|
[18469] | 647 | # Not a new file, must be an existing file
|
---|
| 648 | # Let' see if it's newer than the last import.pl
|
---|
| 649 |
|
---|
| 650 |
|
---|
[23335] | 651 | if (! -d $raw_full_filename) {
|
---|
| 652 | if (!$block_hash->{'reindex_files'}->{$raw_full_filename}) {
|
---|
[20577] | 653 | # filename has been around for longer than inf_timestamp
|
---|
[29745] | 654 | print $outhandle "**** Skipping $unicode_subfile\n" if ($verbosity >3);
|
---|
[18469] | 655 | next;
|
---|
| 656 | }
|
---|
| 657 | else {
|
---|
| 658 | # Remove old folder in archives (might hash to something different)
|
---|
| 659 | # *** should be doing this on a Del one as well
|
---|
| 660 | # but leave folder name?? and ensure hashs to
|
---|
| 661 | # same again??
|
---|
| 662 |
|
---|
| 663 | # Then let through as new doc??
|
---|
| 664 |
|
---|
| 665 | # mark to doc-oids that rely on it for re-indexing
|
---|
| 666 | }
|
---|
[10156] | 667 | }
|
---|
| 668 | }
|
---|
| 669 | }
|
---|
| 670 |
|
---|
[2228] | 671 | # Recursively read each $subfile
|
---|
[29745] | 672 | print $outhandle "DirectoryPlugin recurring: $unicode_subfile\n" if ($verbosity > 2);
|
---|
[8512] | 673 |
|
---|
| 674 | $count += &plugin::read ($pluginfo, $this_file_base_dir,
|
---|
[23335] | 675 | $raw_file_subfile, $block_hash,
|
---|
[9853] | 676 | $out_metadata, $processor, $maxdocs, ($total_count + $count), $gli);
|
---|
[4] | 677 | }
|
---|
[7686] | 678 |
|
---|
[8512] | 679 | return $count;
|
---|
[2228] | 680 | }
|
---|
[4] | 681 |
|
---|
[17738] | 682 | sub compile_stats {
|
---|
| 683 | my $self = shift(@_);
|
---|
| 684 | my ($stats) = @_;
|
---|
| 685 | }
|
---|
| 686 |
|
---|
[4] | 687 | 1;
|
---|