root/main/trunk/greenstone2/perllib/inexport.pm @ 28641

Revision 28641, 47.8 KB (checked in by kjdon, 6 years ago)

moved common import./export options to here. and moved checking collect.cfg for the specific options out to import/export.pl. groupsize is now just for GreenstoneXML plugout as that is the only format it was used for anyway. export now has a saveas_options option, which is used to pass options to the plugout

  • Property svn:executable set to *
Line 
1###########################################################################
2#
3# inexport.pm -- useful class to support import.pl and export.pl
4# A component of the Greenstone digital library software
5# from the New Zealand Digital Library Project at the
6# University of Waikato, New Zealand.
7#
8# Copyright (C) 1999 New Zealand Digital Library Project
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation; either version 2 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License
21# along with this program; if not, write to the Free Software
22# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
23#
24###########################################################################
25
26package inexport;
27
28use strict;
29
30no strict 'refs'; # allow filehandles to be variables and vice versa
31no strict 'subs'; # allow barewords (eg STDERR) as function arguments
32
33use arcinfo;
34use colcfg;
35use dbutil;
36use doc;
37use plugin;
38use plugout;
39use manifest;
40use inexport;
41use util;
42use scriptutil;
43use FileHandle;
44use gsprintf 'gsprintf';
45use printusage;
46use parse2;
47
48use File::Basename;
49
50my $oidtype_list =
51    [ { 'name' => "hash",
52        'desc' => "{import.OIDtype.hash}" },
53      { 'name' => "hash_on_full_filename",
54        'desc' => "{import.OIDtype.hash_on_full_filename}" },
55      { 'name' => "assigned",
56        'desc' => "{import.OIDtype.assigned}" },
57      { 'name' => "incremental",
58        'desc' => "{import.OIDtype.incremental}" },
59      { 'name' => "filename",
60        'desc' => "{import.OIDtype.filename}" },
61      { 'name' => "dirname",
62        'desc' => "{import.OIDtype.dirname}" },
63      { 'name' => "full_filename",
64        'desc' => "{import.OIDtype.full_filename}" } ];
65
66$inexport::directory_arguments =
67[
68      { 'name' => "importdir",
69    'desc' => "{import.importdir}",
70    'type' => "string",
71    'reqd' => "no",
72        'hiddengli' => "yes" },
73      { 'name' => "collectdir",
74    'desc' => "{import.collectdir}",
75    'type' => "string",
76    # parsearg left "" as default
77    #'deft' => &FileUtils::filenameConcatenate($ENV{'GSDLHOME'}, "collect"),
78    'deft' => "",
79    'reqd' => "no",
80        'hiddengli' => "yes" },
81 
82];
83$inexport::arguments =
84[
85      # don't set the default to hash - want to allow this to come from
86      # entry in collect.cfg but want to override it here
87      { 'name' => "OIDtype",
88    'desc' => "{import.OIDtype}",
89    'type' => "enum",
90    'list' => $oidtype_list,
91    # parsearg left "" as default
92    #'deft' => "hash",
93    'reqd' => "no",
94    'modegli' => "2" },
95      { 'name' => "OIDmetadata",
96    'desc' => "{import.OIDmetadata}",
97    'type' => "string",
98     #'type' => "metadata", #doesn't work properly in GLI
99    # parsearg left "" as default
100    #'deft' => "dc.Identifier",
101    'reqd' => "no",
102    'modegli' => "2" },
103      { 'name' => "site",
104    'desc' => "{import.site}",
105    'type' => "string",
106    'deft' => "",
107    'reqd' => "no",
108        'hiddengli' => "yes" },
109      { 'name' => "manifest",
110    'desc' => "{import.manifest}",
111    'type' => "string",
112    'deft' => "",
113    'reqd' => "no",
114        'hiddengli' => "yes" } ,
115     { 'name' => "incremental",
116    'desc' => "{import.incremental}",
117    'type' => "flag",
118    'hiddengli' => "yes" },
119      { 'name' => "keepold",
120    'desc' => "{import.keepold}",
121    'type' => "flag",
122    'reqd' => "no",
123    'hiddengli' => "yes" },
124      { 'name' => "removeold",
125    'desc' => "{import.removeold}",
126    'type' => "flag",
127    'reqd' => "no",
128    'hiddengli' => "yes" },
129      { 'name' => "language",
130    'desc' => "{scripts.language}",
131    'type' => "string",
132    'reqd' => "no",
133    'hiddengli' => "yes" },
134      { 'name' => "maxdocs",
135    'desc' => "{import.maxdocs}",
136    'type' => "int",
137    'reqd' => "no",
138    # parsearg left "" as default
139    #'deft' => "-1",
140    'range' => "1,",
141    'modegli' => "1" },
142       { 'name' => "debug",
143    'desc' => "{import.debug}",
144    'type' => "flag",
145    'reqd' => "no",
146        'hiddengli' => "yes" },
147      { 'name' => "faillog",
148    'desc' => "{import.faillog}",
149    'type' => "string",
150    # parsearg left "" as default
151    #'deft' => &FileUtils::filenameConcatenate("<collectdir>", "colname", "etc", "fail.log"),
152    'deft' => "",
153    'reqd' => "no",
154        'modegli' => "3" },
155       { 'name' => "out",
156    'desc' => "{import.out}",
157    'type' => "string",
158    'deft' => "STDERR",
159    'reqd' => "no",
160        'hiddengli' => "yes" },
161      { 'name' => "statsfile",
162    'desc' => "{import.statsfile}",
163    'type' => "string",
164    'deft' => "STDERR",
165    'reqd' => "no",
166        'hiddengli' => "yes" },
167      { 'name' => "verbosity",
168    'desc' => "{import.verbosity}",
169    'type' => "int",
170    'range' => "0,",
171    # parsearg left "" as default
172    # 'deft' => "2",
173    'reqd' => "no",
174    'modegli' => "3" },
175      { 'name' => "gli",
176    'desc' => "{scripts.gli}",
177    'type' => "flag",
178    'reqd' => "no",
179    'hiddengli' => "yes" },
180      { 'name' => "xml",
181    'desc' => "{scripts.xml}",
182    'type' => "flag",
183    'reqd' => "no",
184    'hiddengli' => "yes" },
185
186];
187
188sub new
189{
190    my $class = shift (@_);
191    my ($mode,$argv,$options,$opt_listall_options) = @_;
192
193    my $self = { 'xml' => 0, 'mode' => $mode };
194
195    # general options available to all plugins
196    my $arguments = $options->{'args'};
197    my $intArgLeftinAfterParsing = parse2::parse($argv,$arguments,$self,"allow_extra_options");
198    # Parse returns -1 if something has gone wrong
199    if ($intArgLeftinAfterParsing == -1)
200    {
201    &PrintUsage::print_txt_usage($options, "{import.params}");
202    die "\n";
203    }
204   
205    my $language = $self->{'language'};
206    # If $language has been specified, load the appropriate resource bundle
207    # (Otherwise, the default resource bundle will be loaded automatically)
208    if ($language && $language =~ /\S/) {
209    &gsprintf::load_language_specific_resource_bundle($language);
210    }
211
212    if ($self->{'listall'}) {
213    if ($self->{'xml'}) {
214        &PrintUsage::print_xml_usage($opt_listall_options);
215    }
216    else
217    {
218        &PrintUsage::print_txt_usage($opt_listall_options,"{export.params}");
219    }
220    die "\n";
221    }
222
223
224    if ($self->{'xml'}) {
225        &PrintUsage::print_xml_usage($options);
226    print "\n";
227    return bless $self, $class;
228    }
229
230    if ($self->{'gli'}) { # the gli wants strings to be in UTF-8
231    &gsprintf::output_strings_in_UTF8;
232    }
233   
234    # now check that we had exactly one leftover arg, which should be
235    # the collection name. We don't want to do this earlier, cos
236    # -xml arg doesn't need a collection name
237    # Or if the user specified -h, then we output the usage also
238
239    if ($intArgLeftinAfterParsing != 1 || (@$argv && $argv->[0] =~ /^\-+h/))
240    {
241    ## TODO if we had invalid arg, can we signla that to user???
242    &PrintUsage::print_txt_usage($options, "{import.params}");
243    die "\n";
244    }
245
246    $self->{'close_out'} = 0;
247    my $out = $self->{'out'};
248    if ($out !~ /^(STDERR|STDOUT)$/i) {
249    open (OUT, ">$out") ||
250        (&gsprintf(STDERR, "{common.cannot_open_output_file}: $!\n", $out) && die);
251    $out = 'inexport::OUT';
252    $self->{'close_out'} = 1;
253    }
254    $out->autoflush(1);
255    $self->{'out'} = $out;
256
257    # @ARGV should be only one item, the name of the collection
258    $self->{'collection'} = shift @$argv;
259
260    # Unless otherwise stated all manifests are considered version 1---where
261    # they act more like an advanced process expression---as compared to newer
262    # manifest files that act as an explicit (and exhaustive) list of files to
263    # process [jmt12]
264    $self->{'manifest_version'} = 1;
265
266    return bless $self, $class;
267}
268
269# Simplified version of the contstructor for use with CGI scripts
270sub newCGI
271{
272    my $class = shift (@_);
273    my ($mode,$collect,$gsdl_cgi,$opt_site) = @_;
274
275    my $self = { 'xml' => 0, 'mode' => $mode };
276
277    $self->{'out'} = STDERR;
278   
279    if (defined $gsdl_cgi) {
280        $self->{'site'} = $opt_site;
281        my $collect_dir = $gsdl_cgi->get_collection_dir($opt_site);
282        $self->{'collectdir'} = $collect_dir;
283    }
284    else { 
285        $self->{'site'} = "";
286        $self->{'collectdir'} = &FileUtils::filenameConcatenate($ENV{'GSDLHOME'},"collect");
287    }
288    $self->{'faillog'} = "";
289   
290    $self->{'collection'} = $collect;
291
292    return bless $self, $class;
293}
294sub get_collection
295{
296    my $self = shift @_;
297   
298    return $self->{'collection'};
299}
300
301
302sub read_collection_cfg
303{
304    my $self = shift @_;
305    my ($collection,$options) = @_;
306
307    my $collectdir = $self->{'collectdir'};
308    my $site       = $self->{'site'};
309    my $out        = $self->{'out'};
310     
311    if (($collection = &colcfg::use_collection($site, $collection, $collectdir)) eq "") {
312    &PrintUsage::print_txt_usage($options, "{import.params}");
313    die "\n";
314    }
315
316    # set gs_version 2/3
317    $self->{'gs_version'} = "2";
318    if ((defined $site) && ($site ne "")) {
319    # gs3
320    $self->{'gs_version'} = "3";
321    }
322
323    # add collection's perllib dir into include path in
324    # case we have collection specific modules
325    &util::augmentINC(&FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, 'perllib'));
326
327    # check that we can open the faillog
328    my $faillog = $self->{'faillog'};
329    if ($faillog eq "") {
330    $faillog = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, "etc", "fail.log");
331    }
332    open (FAILLOG, ">$faillog") ||
333    (&gsprintf(STDERR, "{import.cannot_open_fail_log}\n", $faillog) && die);
334
335   
336    my $faillogname = $faillog;
337    $faillog = 'inexport::FAILLOG';
338    $faillog->autoflush(1);
339    $self->{'faillog'} = $faillog;
340    $self->{'faillogname'} = $faillogname;
341    $self->{'close_faillog'} = 1;
342
343    # Read in the collection configuration file.
344    my $gs_mode = "gs".$self->{'gs_version'}; #gs2 or gs3
345    my $config_filename = &colcfg::get_collect_cfg_name($out, $gs_mode);
346    my $collectcfg = &colcfg::read_collection_cfg ($config_filename, $gs_mode);
347
348    return ($config_filename,$collectcfg);
349}
350
351sub set_collection_options
352{
353    my $self = shift @_;
354    my ($collectcfg) = @_;
355
356    my $inexport_mode = $self->{'mode'};
357
358    my $verbosity  = $self->{'verbosity'};
359    my $debug      = $self->{'debug'};
360    my $importdir  = $self->{'importdir'};
361    my $archivedir = $self->{'archivedir'} || $self->{'exportdir'} || "";
362    my $out        = $self->{'out'};
363
364    # If the infodbtype value wasn't defined in the collect.cfg file, use the default
365    if (!defined($collectcfg->{'infodbtype'}))
366    {
367      $collectcfg->{'infodbtype'} = &dbutil::get_default_infodb_type();
368    }
369    if ($collectcfg->{'infodbtype'} eq "gdbm-txtgz") {
370    # we can't use the text version for archives dbs.
371    $collectcfg->{'infodbtype'} = "gdbm";
372    }
373
374    if (defined $collectcfg->{'importdir'} && $importdir eq "") {
375    $importdir = $collectcfg->{'importdir'};
376    }
377    if (defined $collectcfg->{'archivedir'} && $archivedir eq "") {
378    $archivedir = $collectcfg->{'archivedir'};
379    }
380    # fill in the default import and archives directories if none
381    # were supplied, turn all \ into / and remove trailing /
382    if ($importdir eq "")
383    {
384      $importdir = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, "import");
385    }
386    else
387    {
388      # Don't do this - it kills protocol prefixes
389      #$importdir =~ s/[\\\/]+/\//g;
390      #$importdir =~ s/\/$//;
391      # Do this instead
392      &FileUtils::sanitizePath($importdir);
393    }
394    if (!&FileUtils::directoryExists($importdir))
395    {
396      &gsprintf($out, "{import.no_import_dir}\n\n", $importdir);
397      die "\n";
398    }
399    $self->{'importdir'} = $importdir;
400
401    if ($archivedir eq "") {
402    if ($inexport_mode eq "import") {
403        $archivedir = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, "archives");
404    }
405    elsif ($inexport_mode eq "export") {
406        $archivedir = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, "export");
407    }
408    else {
409        print STDERR "Warning: Unrecognized import/export mode '$inexport_mode'\n";
410        print STDERR "         Defaulting to 'archives' for file output\n";
411        $archivedir = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, "archives");
412    }
413    }
414
415    $archivedir = &FileUtils::sanitizePath($archivedir);
416    #$archivedir =~ s/[\\\/]+/\//g;
417    #$archivedir =~ s/\/$//;
418    $self->{'archivedir'} = $archivedir;
419
420    if ($verbosity !~ /\d+/) {
421    if (defined $collectcfg->{'verbosity'} && $collectcfg->{'verbosity'} =~ /\d+/) {
422        $verbosity = $collectcfg->{'verbosity'};
423    } else {
424        $verbosity = 2; # the default
425    }
426    }
427    $self->{'verbosity'} = $verbosity;
428
429    if (defined $collectcfg->{'manifest'} && $self->{'manifest'} eq "") {
430    $self->{'manifest'} = $collectcfg->{'manifest'};
431    }
432
433    if (defined $collectcfg->{'gzip'} && !$self->{'gzip'}) {
434    if ($collectcfg->{'gzip'} =~ /^true$/i) {
435        $self->{'gzip'} = 1;
436    }
437    }
438
439    if ($self->{'maxdocs'} !~ /\-?\d+/) {
440    if (defined $collectcfg->{'maxdocs'} && $collectcfg->{'maxdocs'} =~ /\-?\d+/) {
441        $self->{'maxdocs'} = $collectcfg->{'maxdocs'};
442    } else {
443        $self->{'maxdocs'} = -1; # the default
444    }
445    }
446
447   
448
449    if (!defined $self->{'OIDtype'}
450    || ($self->{'OIDtype'} !~ /^(hash|hash_on_full_filename|incremental|assigned|filename|dirname|full_filename)$/ )) {
451    # OIDtype was either not defined on the command-line, or if it was not one of the recognized values
452    if (defined $collectcfg->{'OIDtype'}
453        && $collectcfg->{'OIDtype'} =~ /^(hash|hash_on_full_filename|incremental|assigned|filename|dirname|full_filename)$/) {
454        $self->{'OIDtype'} = $collectcfg->{'OIDtype'};
455    } else {
456        $self->{'OIDtype'} = "hash"; # the default
457    }
458    }
459
460    if ((!defined $self->{'OIDmetadata'}) || ($self->{'OIDmetadata'} eq "")) {
461    if (defined $collectcfg->{'OIDmetadata'}) {
462        $self->{'OIDmetadata'} = $collectcfg->{'OIDmetadata'};
463    } else {
464        $self->{'OIDmetadata'} = "dc.Identifier"; # the default
465    }
466    }
467
468    if (defined $collectcfg->{'debug'} && $collectcfg->{'debug'} =~ /^true$/i) {
469    $self->{'debug'} = 1;
470    }
471    if (defined $collectcfg->{'gli'} && $collectcfg->{'gli'} =~ /^true$/i) {
472    $self->{'gli'} = 1;
473    }
474    $self->{'gli'} = 0 unless defined $self->{'gli'};
475       
476    # check keepold and removeold
477    my $checkdir = ($inexport_mode eq "import") ? "archives" : "export";
478
479    my ($removeold, $keepold, $incremental, $incremental_mode)
480    = &scriptutil::check_removeold_and_keepold($self->{'removeold'}, $self->{'keepold'},
481                           $self->{'incremental'}, $checkdir,
482                           $collectcfg);
483
484    $self->{'removeold'}        = $removeold;
485    $self->{'keepold'}          = $keepold;
486    $self->{'incremental'}      = $incremental;
487    $self->{'incremental_mode'} = $incremental_mode;
488
489    # Since this wasted my morning, let's at least warn a user that manifest
490    # files now *only* work if keepold is set [jmt12]
491    if ($self->{'manifest'} && !$self->{'keepold'})
492    {
493      print STDERR "Warning: -manifest flag should not be specified without also setting -keepold or -incremental\n";
494    }
495    }
496
497sub process_files
498{
499    my $self = shift @_;
500    my ($config_filename,$collectcfg) = @_;
501
502    my $inexport_mode = $self->{'mode'};
503
504    my $verbosity   = $self->{'verbosity'};
505    my $debug       = $self->{'debug'};
506
507    my $importdir   = $self->{'importdir'};
508    my $archivedir = $self->{'archivedir'} || $self->{'exportdir'};
509
510    my $incremental = $self->{'incremental'};
511    my $incremental_mode = $self->{'incremental_mode'};
512
513    my $gs_version = $self->{'gs_version'};
514
515    my $removeold   = $self->{'removeold'};
516    my $keepold     = $self->{'keepold'};
517
518    my $saveas      = $self->{'saveas'};
519    my $saveas_options = $self->{'saveas_options'};
520    my $OIDtype     = $self->{'OIDtype'};
521    my $OIDmetadata = $self->{'OIDmetadata'};
522
523    my $out         = $self->{'out'};
524    my $faillog     = $self->{'faillog'};
525
526    my $maxdocs     = $self->{'maxdocs'};
527    my $gzip        = $self->{'gzip'};
528    my $groupsize   = $self->{'groupsize'};
529    my $sortmeta    = $self->{'sortmeta'};
530
531    my $removeprefix = $self->{'removeprefix'};
532    my $removesuffix = $self->{'removesuffix'};
533
534    my $gli          = $self->{'gli'};
535
536    # related to export
537    my $xsltfile         = $self->{'xsltfile'};
538    my $group_marc       = $self->{'group_marc'};
539    my $mapping_file     = $self->{'mapping_file'};
540    my $xslt_mets        = $self->{'xslt_mets'};
541    my $xslt_txt         = $self->{'xslt_txt'};
542    my $fedora_namespace = $self->{'fedora_namespace'};
543    my $metadata_prefix  = $self->{'metadata_prefix'};
544
545    if ($inexport_mode eq "import") {
546    print STDERR "<Import>\n" if $gli;
547    }
548    else {
549    print STDERR "<export>\n" if $gli;
550    }
551
552    my $manifest_lookup = new manifest($collectcfg->{'infodbtype'},$archivedir);
553    if ($self->{'manifest'} ne "") {
554    my $manifest_filename = $self->{'manifest'};
555
556    if (!&FileUtils::isFilenameAbsolute($manifest_filename)) {
557        $manifest_filename = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, $manifest_filename);
558    }
559
560        $self->{'manifest'} = &FileUtils::sanitizePath($self->{'manifest'});
561    #$self->{'manifest'} =~ s/[\\\/]+/\//g;
562    #$self->{'manifest'} =~ s/\/$//;
563
564    $manifest_lookup->parse($manifest_filename);
565
566        # manifests may now include a version number [jmt12]
567        $self->{'manifest_version'} = $manifest_lookup->get_version();
568    }
569
570    my $manifest = $self->{'manifest'};
571
572    # load all the plugins
573    my $plugins = [];
574    if (defined $collectcfg->{'plugin'}) {
575    $plugins = $collectcfg->{'plugin'};
576    }
577
578    my $plugin_incr_mode = $incremental_mode;
579    if ($manifest ne "") {
580    # if we have a manifest file, then we pretend we are fully incremental for plugins
581    $plugin_incr_mode = "all";
582    }
583    #some global options for the plugins
584    my @global_opts = ();
585
586    my $pluginfo = &plugin::load_plugins ($plugins, $verbosity, $out, $faillog, \@global_opts, $plugin_incr_mode, $gs_version);
587    if (scalar(@$pluginfo) == 0) {
588    &gsprintf($out, "{import.no_plugins_loaded}\n");
589    die "\n";
590    }
591
592    # remove the old contents of the archives directory (and tmp
593    # directory) if needed
594
595    if ($removeold) {
596    if (&FileUtils::directoryExists($archivedir)) {
597        &gsprintf($out, "{import.removing_archives}\n");
598        &FileUtils::removeFilesRecursive($archivedir);
599    }
600    my $tmpdir = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, "tmp");
601    $tmpdir =~ s/[\\\/]+/\//g;
602    $tmpdir =~ s/\/$//;
603    if (&FileUtils::directoryExists($tmpdir)) {
604        &gsprintf($out, "{import.removing_tmpdir}\n");
605        &FileUtils::removeFilesRecursive($tmpdir);
606    }
607    }
608
609    # create the archives dir if needed
610    &FileUtils::makeAllDirectories($archivedir);
611
612    # read the archive information file
613
614    # BACKWARDS COMPATIBILITY: Just in case there are old .ldb/.bdb files (won't do anything for other infodbtypes)
615    &util::rename_ldb_or_bdb_file(&FileUtils::filenameConcatenate($archivedir, "archiveinf-doc"));
616    &util::rename_ldb_or_bdb_file(&FileUtils::filenameConcatenate($archivedir, "archiveinf-src"));
617
618    # When we make these initial calls to determine the archive information doc
619    # and src databases we pass through a '1' to indicate this is the first
620    # time we are referring to these databases. When using dynamic dbutils
621    # (available in extensions) this indicates to some database types (for
622    # example, persistent servers) that this is a good time to perform any
623    # one time initialization. The argument has no effect on vanilla dbutils
624    # [jmt12]
625    my $perform_firsttime_init = 1;
626    my $arcinfo_doc_filename = &dbutil::get_infodb_file_path($collectcfg->{'infodbtype'}, "archiveinf-doc", $archivedir, $perform_firsttime_init);
627    my $arcinfo_src_filename = &dbutil::get_infodb_file_path($collectcfg->{'infodbtype'}, "archiveinf-src", $archivedir, $perform_firsttime_init);
628
629    my $archive_info = new arcinfo ($collectcfg->{'infodbtype'});
630    $archive_info->load_info ($arcinfo_doc_filename);
631
632    if ($manifest eq "") {
633    # Load in list of files in import folder from last import (if present)
634    $archive_info->load_prev_import_filelist ($arcinfo_src_filename);
635    }
636
637    ####Use Plugout####
638    my $plugout;
639
640    if ($inexport_mode eq "import") {
641    if (defined $collectcfg->{'plugout'}) {
642        # If a plugout was specified in the collect.cfg file, assume it is sensible
643        # We can't check the name because it could be anything, if it is a custom plugout
644        $plugout = $collectcfg->{'plugout'};
645    }
646    else{
647        if ($saveas !~ /^(GreenstoneXML|GreenstoneMETS)$/) {
648        push @$plugout,"GreenstoneXMLPlugout";
649        }
650        else{
651        push @$plugout,$saveas."Plugout";
652        }
653    }
654
655    }
656    else {
657    if (defined $collectcfg->{'plugout'} && $collectcfg->{'plugout'} =~ /^(.*METS|DSpace|MARCXML)Plugout/) {
658        $plugout = $collectcfg->{'plugout'};
659    }
660    else{
661        if ($saveas !~ /^(GreenstoneMETS|FedoraMETS|DSpace|MARCXML)$/) {
662        push @$plugout,"GreenstoneMETSPlugout";
663        }
664        else{
665        push @$plugout,$saveas."Plugout";
666        }
667    }
668    }
669   
670    my $plugout_name = $plugout->[0];
671
672    if ($inexport_mode eq "export" && defined $saveas_options) {
673    my @user_plugout_options = split(" ", $saveas_options);
674    push @$plugout, @user_plugout_options;
675    }
676    push @$plugout,("-output_info",$archive_info)  if (defined $archive_info);
677    push @$plugout,("-verbosity",$verbosity)       if (defined $verbosity);
678    push @$plugout,("-debug")                      if ($debug);
679    push @$plugout,("-gzip_output")                if ($gzip);
680    push @$plugout,("-output_handle",$out)         if (defined $out);
681
682    push @$plugout,("-xslt_file",$xsltfile)        if (defined $xsltfile && $xsltfile ne "");
683
684    if ($inexport_mode eq "import") {
685    if ($plugout_name =~ m/^GreenstoneXMLPlugout$/) {
686        push @$plugout,("-group_size",$groupsize)      if (defined $groupsize);
687    }
688    }
689    if ($plugout_name =~ m/^MARCXMLPlugout$/) {
690    push @$plugout,("-group")                      if ($group_marc);
691    push @$plugout,("-mapping_file",$mapping_file) if (defined $mapping_file && $mapping_file ne "");
692    }
693    if ($plugout_name =~ m/^.*METSPlugout$/) {
694    push @$plugout,("-xslt_mets",$xslt_mets)       if (defined $xslt_mets && $xslt_mets ne "");
695    push @$plugout,("-xslt_txt",$xslt_txt)         if (defined $xslt_txt && $xslt_txt ne "");
696    }
697
698    if ($plugout_name eq "FedoraMETSPlugout") {
699    push @$plugout,("-fedora_namespace",$fedora_namespace) if (defined $fedora_namespace && $fedora_namespace ne "");
700    }
701   
702    if ($plugout_name eq "DSpacePlugout") {
703    push @$plugout,("-metadata_prefix",$metadata_prefix) if (defined $metadata_prefix && $metadata_prefix ne "");   
704    }
705
706    my $processor = &plugout::load_plugout($plugout);
707    $processor->setoutputdir ($archivedir);
708    $processor->set_sortmeta ($sortmeta, $removeprefix, $removesuffix) if defined $sortmeta;
709
710    $processor->set_OIDtype ($OIDtype, $OIDmetadata);
711   
712    &plugin::begin($pluginfo, $importdir, $processor, $maxdocs, $gli);
713   
714    if ($removeold) {
715        # occasionally, plugins may want to do something on remove
716        # old, eg pharos image indexing
717    &plugin::remove_all($pluginfo, $importdir, $processor, $maxdocs, $gli);
718    }
719
720    # process the import directory
721    my $block_hash = {};
722    $block_hash->{'new_files'} = {};
723    $block_hash->{'reindex_files'} = {};
724    # all of these are set somewhere else, so it's more readable to define them
725    # here [jmt12]
726    $block_hash->{'all_files'} = {};
727    $block_hash->{'deleted_files'} = {};
728    $block_hash->{'file_blocks'} = {};
729    $block_hash->{'metadata_files'} = {};
730    $block_hash->{'shared_fileroot'} = '';
731    # a new flag so we can tell we had a manifest way down in the plugins
732    # [jmt12]
733    $block_hash->{'manifest'} = 'false';
734    my $metadata = {};
735   
736    # global blocking pass may set up some metadata
737    # - when we have a newer manifest file we don't do this -unless- the
738    #   collection configuration indicates this collection contains complex
739    #   (inherited) metadata [jmt12]
740    if ($manifest eq '' || (defined $collectcfg->{'complexmeta'} && $collectcfg->{'complexmeta'} eq 'true'))
741    {
742      &plugin::file_block_read($pluginfo, $importdir, "", $block_hash, $metadata, $gli);
743    }
744    else
745    {
746      print STDERR "Skipping global file scan due to manifest and complexmeta configuration\n";
747    }
748
749    if ($manifest ne "") {
750
751      # mark that we are using a manifest - information that might be needed
752      # down in plugins (for instance DirectoryPlugin)
753      $block_hash->{'manifest'} = $self->{'manifest_version'};
754
755    #
756    # 1. Process delete files first
757    #
758    my @deleted_files = keys %{$manifest_lookup->{'delete'}};
759    my @full_deleted_files = ();
760
761    # ensure all filenames are absolute
762    foreach my $df (@deleted_files) {
763        my $full_df =
764        (&FileUtils::isFilenameAbsolute($df))
765        ? $df
766        : &FileUtils::filenameConcatenate($importdir,$df);
767
768        if (-d $full_df) {
769        &add_dir_contents_to_list($full_df, \@full_deleted_files);
770        } else {
771        push(@full_deleted_files,$full_df);
772        }
773    }
774   
775    &plugin::remove_some($pluginfo, $collectcfg->{'infodbtype'}, $archivedir, \@full_deleted_files);
776    mark_docs_for_deletion($archive_info,{},
777                   \@full_deleted_files,
778                   $archivedir, $verbosity, "delete");
779
780
781    #
782    # 2. Now files for reindexing
783    #
784
785    my @reindex_files = keys %{$manifest_lookup->{'reindex'}};
786    my @full_reindex_files = ();
787    # ensure all filenames are absolute
788    foreach my $rf (@reindex_files) {       
789        my $full_rf =
790        (&FileUtils::isFilenameAbsolute($rf))
791        ? $rf
792        : &FileUtils::filenameConcatenate($importdir,$rf);
793
794        if (-d $full_rf) {
795        &add_dir_contents_to_list($full_rf, \@full_reindex_files);
796        } else {
797        push(@full_reindex_files,$full_rf);
798        }
799    }
800   
801    &plugin::remove_some($pluginfo, $collectcfg->{'infodbtype'}, $archivedir, \@full_reindex_files);
802    mark_docs_for_deletion($archive_info,{},\@full_reindex_files, $archivedir,$verbosity, "reindex");
803
804    # And now to ensure the new version of the file processed by
805    # appropriate plugin, we need to add it to block_hash reindex list
806    foreach my $full_rf (@full_reindex_files) {
807        $block_hash->{'reindex_files'}->{$full_rf} = 1;
808    }
809
810
811    #
812    # 3. Now finally any new files - add to block_hash new_files list
813    #
814
815    my @new_files = keys %{$manifest_lookup->{'index'}};
816    my @full_new_files = ();
817
818    foreach my $nf (@new_files) {
819        # ensure filename is absolute
820        my $full_nf =
821        (&FileUtils::isFilenameAbsolute($nf))
822        ? $nf
823        : &FileUtils::filenameConcatenate($importdir,$nf);
824
825        if (-d $full_nf) {
826        &add_dir_contents_to_list($full_nf, \@full_new_files);
827        } else {
828        push(@full_new_files,$full_nf);
829        }
830    }
831
832    my $arcinfo_src_filename = &dbutil::get_infodb_file_path($collectcfg->{'infodbtype'}, "archiveinf-src", $archivedir);
833      # need to check this file exists before trying to read it - in the past
834      # it wasn't possible to have a manifest unless keepold was also set so
835      # you were pretty much guarenteed arcinfo existed
836      # [jmt12]
837      # @todo &FileUtils::fileExists($arcinfo_src_filename) [jmt12]
838      if (-e $arcinfo_src_filename)
839      {
840    my $arcinfodb_map = {};
841    &dbutil::read_infodb_file($collectcfg->{'infodbtype'}, $arcinfo_src_filename, $arcinfodb_map);
842    foreach my $f (@full_new_files) {
843        my $rel_f = &util::abspath_to_placeholders($f);
844
845        # check that we haven't seen it already
846        if (defined $arcinfodb_map->{$rel_f}) {
847        # TODO make better warning
848        print STDERR "Warning: $f ($rel_f) already in src archive, \n";
849        } else {
850        $block_hash->{'new_files'}->{$f} = 1;
851        }
852    }
853
854    undef $arcinfodb_map;
855      }
856      # no existing files - so we can just add all the files [jmt12]
857      else
858      {
859        foreach my $f (@full_new_files)
860        {
861          $block_hash->{'new_files'}->{$f} = 1;
862        }
863      }
864
865      # If we are not using complex inherited metadata (and thus have skipped
866      # the global file scan) we need to at least check for a matching
867      # metadata.xml for the files being indexed/reindexed
868      # - unless we are using the newer version of Manifests, which are treated
869      #   verbatim, and should have a metadata element for metadata files (so
870      #   we can explicitly process metadata files other than metadata.xml)
871      # [jmt12]
872      if ($self->{'manifest_version'} < 1 && (!defined $collectcfg->{'complexmeta'} || $collectcfg->{'complexmeta'} ne 'true'))
873      {
874        my @all_files_to_import = (keys %{$block_hash->{'reindex_files'}}, keys %{$block_hash->{'new_files'}});
875        foreach my $file_to_import (@all_files_to_import)
876        {
877          my $metadata_xml_path = $file_to_import;
878          $metadata_xml_path =~ s/[^\\\/]*$/metadata.xml/;
879          if (&FileUtils::fileExists($metadata_xml_path))
880          {
881            &plugin::file_block_read($pluginfo, '', $metadata_xml_path, $block_hash, $metadata, $gli);
882          }
883        }
884      }
885
886      # new version manifest files explicitly list metadata files to be
887      # processed (ignoring complexmeta if set)
888      # [jmt12]
889      if ($self->{'manifest_version'} > 1)
890      {
891        # Process metadata files
892        foreach my $file_to_import (keys %{$block_hash->{'reindex_files'}}, keys %{$block_hash->{'new_files'}})
893        {
894          $self->perform_process_files($manifest, $pluginfo, '', $file_to_import, $block_hash, $metadata, $processor, $maxdocs);
895        }
896      }
897    }
898    else {
899    # if incremental, we read through the import folder to see whats changed.
900
901    if ($incremental || $incremental_mode eq "onlyadd") {
902        prime_doc_oid_count($archivedir);
903
904        # Can now work out which files were new, already existed, and have
905        # been deleted
906       
907        new_vs_old_import_diff($archive_info,$block_hash,$importdir,
908                   $archivedir,$verbosity,$incremental_mode);
909       
910        my @new_files = sort keys %{$block_hash->{'new_files'}};
911        if (scalar(@new_files>0)) {
912        print STDERR "New files and modified metadata files since last import:\n  ";
913        print STDERR join("\n  ",@new_files), "\n";
914        }
915
916        if ($incremental) {
917               # only look for deletions if we are truely incremental
918        my @deleted_files = sort keys %{$block_hash->{'deleted_files'}};
919        # Filter out any in gsdl/tmp area
920        my @filtered_deleted_files = ();
921        my $gsdl_tmp_area = &FileUtils::filenameConcatenate($ENV{'GSDLHOME'}, "tmp");
922        my $collect_tmp_area = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'}, "tmp");
923        $gsdl_tmp_area = &util::filename_to_regex($gsdl_tmp_area);
924        $collect_tmp_area = &util::filename_to_regex($collect_tmp_area);
925                 
926        foreach my $df (@deleted_files) {
927            next if ($df =~ m/^$gsdl_tmp_area/);
928            next if ($df =~ m/^$collect_tmp_area/);
929           
930            push(@filtered_deleted_files,$df);
931        }       
932       
933
934        @deleted_files = @filtered_deleted_files;
935       
936        if (scalar(@deleted_files)>0) {
937            print STDERR "Files deleted since last import:\n  ";
938            print STDERR join("\n  ",@deleted_files), "\n";
939       
940       
941            &plugin::remove_some($pluginfo, $collectcfg->{'infodbtype'}, $archivedir, \@deleted_files);
942           
943            mark_docs_for_deletion($archive_info,$block_hash,\@deleted_files, $archivedir,$verbosity, "delete");
944        }
945       
946        my @reindex_files = sort keys %{$block_hash->{'reindex_files'}};
947       
948        if (scalar(@reindex_files)>0) {
949            print STDERR "Files to reindex since last import:\n  ";
950            print STDERR join("\n  ",@reindex_files), "\n";
951            &plugin::remove_some($pluginfo, $collectcfg->{'infodbtype'}, $archivedir, \@reindex_files);
952            mark_docs_for_deletion($archive_info,$block_hash,\@reindex_files, $archivedir,$verbosity, "reindex");
953        }
954               
955        }       
956    }
957    }
958
959    # Check for existence of the file that's to contain earliestDateStamp in archivesdir
960    # Do nothing if the file already exists (file exists on incremental build).
961    # If the file doesn't exist, as happens on full build, create it and write out the current datestamp into it
962    # In buildcol, read the file's contents and set the earliestdateStamp in GS2's build.cfg / GS3's buildconfig.xml
963    # In doc.pm have set_oaiLastModified similar to set_lastmodified, and create the doc fields
964    # oailastmodified and oailastmodifieddate
965    my $earliestDatestampFile = &FileUtils::filenameConcatenate($archivedir, "earliestDatestamp");
966    if (!-f $earliestDatestampFile && -d $archivedir) {
967    my $current_time_in_seconds = time; # in seconds
968
969    if(open(FOUT, ">$earliestDatestampFile")) {
970        # || (&gsprintf(STDERR, "{common.cannot_open}: $!\n", $earliestDatestampFile) && die);
971        print FOUT $current_time_in_seconds;
972        close(FOUT);
973    }
974    else {
975        &gsprintf(STDERR, "{import.cannot_write_earliestdatestamp}\n", $earliestDatestampFile);
976    }
977
978    }
979
980   
981    $self->perform_process_files($manifest, $pluginfo, $importdir, '', $block_hash, $metadata, $processor, $maxdocs);
982
983    if ($saveas eq "FedoraMETS") {
984    # create collection "doc obj" for Fedora that contains
985    # collection-level metadata
986   
987    my $doc_obj = new doc($config_filename,"nonindexed_doc","none");
988    $doc_obj->set_OID("collection");
989   
990    my $col_name = undef;
991    my $col_meta = $collectcfg->{'collectionmeta'};
992   
993    if (defined $col_meta) {       
994        store_collectionmeta($col_meta,"collectionname",$doc_obj); # in GS3 this is a collection's name
995        store_collectionmeta($col_meta,"collectionextra",$doc_obj); # in GS3 this is a collection's description     
996    }
997    $processor->process($doc_obj);
998    }
999
1000    &plugin::end($pluginfo, $processor);
1001
1002    &plugin::deinit($pluginfo, $processor);
1003
1004    # Store the value of OIDCount (used in doc.pm) so it can be
1005    # restored correctly to this value on an incremental build
1006    # - this OIDcount file should only be generated for numerical oids [jmt12]
1007    if ($self->{'OIDtype'} eq 'incremental')
1008    {
1009      store_doc_oid_count($archivedir);
1010    }
1011
1012    # signal to the processor (plugout) that we have finished processing - if we are group processing, then the final output file needs closing.
1013    $processor->close_group_output() if $processor->is_group();
1014
1015#    if ($inexport_mode eq "import") {
1016    # write out the archive information file
1017    # for backwards compatability with archvies.inf file
1018    if ($arcinfo_doc_filename =~ m/(contents)|(\.inf)$/) {
1019        $archive_info->save_info($arcinfo_doc_filename);
1020    }
1021    else {
1022        $archive_info->save_revinfo_db($arcinfo_src_filename);
1023    }
1024#    }
1025    return $pluginfo;
1026}
1027
1028# @function perform_process_files()
1029# while process_files() above prepares the system to import files this is the
1030# function that actually initiates the plugin pipeline to process the files.
1031# This function the therefore be overridden in subclasses of inexport.pm should
1032# they wish to do different or further processing
1033# @author jmt12
1034sub perform_process_files
1035{
1036  my $self = shift(@_);
1037  my ($manifest, $pluginfo, $importdir, $file_to_import, $block_hash, $metadata, $processor, $maxdocs) = @_;
1038  my $gli = $self->{'gli'};
1039  # specific file to process - via manifest version 2+
1040  if ($file_to_import ne '')
1041  {
1042    &plugin::read ($pluginfo, '', $file_to_import, $block_hash, $metadata, $processor, $maxdocs, 0, $gli);
1043  }
1044  # global file scan - if we are using a new version manifest, files would have
1045  # been read above. Older manifests use extra settings in the $block_hash to
1046  # control what is imported, while non-manifest imports use a regular
1047  # $block_hash (so obeying process_exp and block_exp) [jmt12]
1048  elsif ($manifest eq '' || $self->{'manifest_version'} < 1)
1049  {
1050    &plugin::read ($pluginfo, $importdir, '', $block_hash, $metadata, $processor, $maxdocs, 0, $gli);
1051  }
1052  else
1053  {
1054    print STDERR "Skipping perform_process_files() due to manifest presence and version\n";
1055  }
1056}
1057# perform_process_files()
1058
1059# @function generate_statistics()
1060sub generate_statistics
1061{
1062  my $self = shift @_;
1063  my ($pluginfo) = @_;
1064
1065  my $inexport_mode = $self->{'mode'};
1066  my $out           = $self->{'out'};
1067  my $faillogname   = $self->{'faillogname'};
1068  my $gli           = $self->{'gli'};
1069
1070  &gsprintf($out, "\n");
1071  &gsprintf($out, "*********************************************\n");
1072  &gsprintf($out, "{$inexport_mode.complete}\n");
1073  &gsprintf($out, "*********************************************\n");
1074
1075  &plugin::write_stats($pluginfo, 'STDERR', $faillogname, $gli);
1076}
1077# generate_statistics()
1078
1079
1080# @function deinit()
1081# Close down any file handles that we opened (and hence are responsible for
1082# closing
1083sub deinit
1084{
1085  my $self = shift(@_);
1086  close OUT if $self->{'close_out'};
1087  close FAILLOG if $self->{'close_faillog'};
1088}
1089# deinit()
1090
1091
1092sub store_collectionmeta
1093{
1094    my ($collectionmeta,$field,$doc_obj) = @_;
1095   
1096    my $section = $doc_obj->get_top_section();
1097   
1098    my $field_hash = $collectionmeta->{$field};
1099   
1100    foreach my $k (keys %$field_hash)
1101    {
1102    my $val = $field_hash->{$k};
1103   
1104    ### print STDERR "*** $k = $field_hash->{$k}\n";
1105   
1106    my $md_label = "ex.$field";
1107   
1108   
1109    if ($k =~ m/^\[l=(.*?)\]$/)
1110    {
1111       
1112        my $md_suffix = $1;
1113        $md_label .= "^$md_suffix";
1114    }
1115   
1116   
1117    $doc_obj->add_utf8_metadata($section,$md_label, $val);
1118   
1119    # see collConfigxml.pm: GS2's "collectionextra" is called "description" in GS3,
1120    # while "collectionname" in GS2 is called "name" in GS3.
1121    # Variable $nameMap variable in collConfigxml.pm maps between GS2 and GS3
1122    if (($md_label eq "ex.collectionname^en") || ($md_label eq "ex.collectionname"))
1123    {
1124        $doc_obj->add_utf8_metadata($section,"dc.Title", $val);
1125    }
1126   
1127    }
1128}
1129
1130
1131sub oid_count_file {
1132    my ($archivedir) = @_;
1133    return &FileUtils::filenameConcatenate($archivedir, "OIDcount");
1134}
1135
1136
1137sub prime_doc_oid_count
1138{
1139    my ($archivedir) = @_;
1140    my $oid_count_filename = &oid_count_file($archivedir);
1141
1142    if (-e $oid_count_filename) {
1143    if (open(OIDIN,"<$oid_count_filename")) {
1144        my $OIDcount = <OIDIN>;
1145        chomp $OIDcount;       
1146        close(OIDIN);
1147
1148        $doc::OIDcount = $OIDcount;     
1149    }
1150    else {     
1151        &gsprintf(STDERR, "{import.cannot_read_OIDcount}\n", $oid_count_filename);
1152    }
1153    }
1154   
1155}
1156
1157sub store_doc_oid_count
1158{
1159    # Use the file "OIDcount" in the archives directory to record
1160    # what value doc.pm got up to
1161
1162    my ($archivedir) = @_;
1163    my $oid_count_filename = &oid_count_file($archivedir);
1164
1165    # @todo $oidout = &FileUtils::openFileDescriptor($oid_count_filename, 'w') [jmt12]
1166    if (open(OIDOUT,">$oid_count_filename")) {
1167    print OIDOUT $doc::OIDcount, "\n";
1168       
1169    close(OIDOUT);
1170    }
1171    else {
1172    &gsprintf(STDERR, "{import.cannot_write_OIDcount}\n", $oid_count_filename);
1173    }
1174}
1175
1176
1177
1178sub new_vs_old_import_diff
1179{
1180    my ($archive_info,$block_hash,$importdir,$archivedir,$verbosity,$incremental_mode) = @_;
1181
1182    # Get the infodbtype value for this collection from the arcinfo object
1183    my $infodbtype = $archive_info->{'infodbtype'};
1184
1185    # in this method, we want to know if metadata files are modified or not.
1186    my $arcinfo_doc_filename = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $archivedir);
1187
1188    my $archiveinf_timestamp = -M $arcinfo_doc_filename;
1189
1190    # First convert all files to absolute form
1191    # This is to support the situation where the import folder is not
1192    # the default
1193   
1194    my $prev_all_files = $archive_info->{'prev_import_filelist'};
1195    my $full_prev_all_files = {};
1196
1197    foreach my $prev_file (keys %$prev_all_files) {
1198
1199    if (!&FileUtils::isFilenameAbsolute($prev_file)) {
1200        my $full_prev_file = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'},$prev_file);
1201        $full_prev_all_files->{$full_prev_file} = $prev_file;
1202    }
1203    else {
1204        $full_prev_all_files->{$prev_file} = $prev_file;
1205    }
1206    }
1207
1208
1209    # Figure out which are the new files, existing files and so
1210    # by implication the files from the previous import that are not
1211    # there any more => mark them for deletion
1212    foreach my $curr_file (keys %{$block_hash->{'all_files'}}) {
1213   
1214    my $full_curr_file = $curr_file;
1215
1216    # entry in 'all_files' is moved to either 'existing_files',
1217    # 'deleted_files', 'new_files', or 'new_or_modified_metadata_files'
1218
1219    if (!&FileUtils::isFilenameAbsolute($curr_file)) {
1220        # add in import dir to make absolute
1221        $full_curr_file = &FileUtils::filenameConcatenate($importdir,$curr_file);
1222    }
1223
1224    # figure out if new file or not
1225    if (defined $full_prev_all_files->{$full_curr_file}) {
1226        # delete it so that only files that need deleting are left
1227        delete $full_prev_all_files->{$full_curr_file};
1228       
1229        # had it before. is it a metadata file?
1230        if ($block_hash->{'metadata_files'}->{$full_curr_file}) {
1231       
1232        # is it modified??
1233        if (-M $full_curr_file < $archiveinf_timestamp) {
1234            print STDERR "*** Detected a *modified metadata* file: $full_curr_file\n" if $verbosity >= 2;
1235            # its newer than last build
1236            $block_hash->{'new_or_modified_metadata_files'}->{$full_curr_file} = 1;
1237        }
1238        }
1239        else {
1240        if ($incremental_mode eq "all") {
1241           
1242            # had it before
1243            $block_hash->{'existing_files'}->{$full_curr_file} = 1;
1244           
1245        }
1246        else {
1247            # Warning in "onlyadd" mode, but had it before!
1248            print STDERR "Warning: File $full_curr_file previously imported.\n";
1249            print STDERR "         Treating as new file\n";
1250           
1251            $block_hash->{'new_files'}->{$full_curr_file} = 1;
1252           
1253        }
1254        }
1255    }
1256    else {
1257        if ($block_hash->{'metadata_files'}->{$full_curr_file}) {
1258        # the new file is the special sort of file greenstone uses
1259        # to attach metadata to src documents
1260        # i.e metadata.xml
1261        # (but note, the filename used is not constrained in
1262        # Greenstone to always be this)
1263
1264        print STDERR "*** Detected *new* metadata file: $full_curr_file\n" if $verbosity >= 2;
1265        $block_hash->{'new_or_modified_metadata_files'}->{$full_curr_file} = 1;
1266        }
1267        else {
1268        $block_hash->{'new_files'}->{$full_curr_file} = 1;
1269        }
1270    }
1271
1272   
1273    delete $block_hash->{'all_files'}->{$curr_file};
1274    }
1275
1276
1277
1278
1279    # Deal with complication of new or modified metadata files by forcing
1280    # everything from this point down in the file hierarchy to
1281    # be freshly imported. 
1282    #
1283    # This may mean files that have not changed are reindexed, but does
1284    # guarantee by the end of processing all new metadata is correctly
1285    # associated with the relevant document(s).
1286
1287    foreach my $new_mdf (keys %{$block_hash->{'new_or_modified_metadata_files'}}) {
1288    my ($fileroot,$situated_dir,$ext) = fileparse($new_mdf, "\\.[^\\.]+\$");
1289
1290    $situated_dir =~ s/[\\\/]+$//; # remove tailing slashes
1291    $situated_dir = &util::filename_to_regex($situated_dir); # need to escape windows slash \ and brackets in regular expression
1292   
1293    # Go through existing_files, and mark anything that is contained
1294    # within 'situated_dir' to be reindexed (in case some of the metadata
1295    # attaches to one of these files)
1296
1297    my $reindex_files = [];
1298
1299    foreach my $existing_f (keys %{$block_hash->{'existing_files'}}) {
1300   
1301        if ($existing_f =~ m/^$situated_dir/) {
1302
1303        print STDERR "**** Existing file $existing_f\nis located within\n$situated_dir\n";
1304
1305        push(@$reindex_files,$existing_f);
1306        $block_hash->{'reindex_files'}->{$existing_f} = 1;
1307        delete $block_hash->{'existing_files'}->{$existing_f};
1308
1309        }
1310    }
1311   
1312    # metadata file needs to be in new_files list so parsed by MetadataXMLPlug
1313    # (or equivalent)
1314    $block_hash->{'new_files'}->{$new_mdf} = 1;
1315
1316    }
1317
1318    # go through remaining existing files and work out what has changed and needs to be reindexed.
1319    my @existing_files = sort keys %{$block_hash->{'existing_files'}};
1320
1321    my $reindex_files = [];
1322
1323    foreach my $existing_filename (@existing_files) {
1324    if (-M $existing_filename < $archiveinf_timestamp) {
1325        # file is newer than last build
1326       
1327        my $existing_file = $existing_filename;
1328        #my $collectdir = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'});
1329
1330        #my $collectdir_resafe = &util::filename_to_regex($collectdir);
1331        #$existing_file =~ s/^$collectdir_resafe(\\|\/)?//;
1332       
1333        print STDERR "**** Reindexing existing file: $existing_file\n";
1334
1335        push(@$reindex_files,$existing_file);
1336        $block_hash->{'reindex_files'}->{$existing_filename} = 1;
1337    }
1338
1339    }
1340
1341   
1342    # By this point full_prev_all_files contains the files
1343    # mentioned in archiveinf-src.db but are not in the 'import'
1344    # folder (or whatever was specified through -importdir ...)
1345
1346    # This list can contain files that were created in the 'tmp' or
1347    # 'cache' areas (such as screen-size and thumbnail images).
1348    #
1349    # In building the final list of files to delete, we test to see if
1350    # it exists on the filesystem and if it does (unusual for a "normal"
1351    # file in import, but possible in the case of 'tmp' files),
1352    # supress it from going into the final list
1353
1354    my $collectdir = $ENV{'GSDLCOLLECTDIR'};
1355
1356    my @deleted_files = values %$full_prev_all_files;
1357    map { my $curr_file = $_;
1358      my $full_curr_file = $curr_file;
1359
1360      if (!&FileUtils::isFilenameAbsolute($curr_file)) {
1361          # add in import dir to make absolute
1362
1363          $full_curr_file = &FileUtils::filenameConcatenate($collectdir,$curr_file);
1364      }
1365
1366
1367      if (!-e $full_curr_file) {
1368          $block_hash->{'deleted_files'}->{$curr_file} = 1;
1369      }
1370      } @deleted_files;
1371
1372
1373
1374}
1375
1376
1377# this is used to delete "deleted" docs, and to remove old versions of "changed" docs
1378# $mode is 'delete' or 'reindex'
1379sub mark_docs_for_deletion
1380{
1381    my ($archive_info,$block_hash,$deleted_files,$archivedir,$verbosity,$mode) = @_;
1382
1383    my $mode_text = "deleted from index";
1384    if ($mode eq "reindex") {
1385    $mode_text = "reindexed";
1386    }
1387
1388    # Get the infodbtype value for this collection from the arcinfo object
1389    my $infodbtype = $archive_info->{'infodbtype'};
1390
1391    my $arcinfo_doc_filename = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $archivedir);
1392    my $arcinfo_src_filename = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-src", $archivedir);
1393
1394
1395    # record files marked for deletion in arcinfo
1396    foreach my $file (@$deleted_files) {
1397    # use 'archiveinf-src' info database file to look up all the OIDs
1398    # that this file is used in (note in most cases, it's just one OID)
1399   
1400    my $relfile = &util::abspath_to_placeholders($file);
1401
1402    my $src_rec = &dbutil::read_infodb_entry($infodbtype, $arcinfo_src_filename, $relfile);
1403    my $oids = $src_rec->{'oid'};
1404    my $file_record_deleted = 0;
1405
1406    # delete the src record
1407    my $src_infodb_file_handle = &dbutil::open_infodb_write_handle($infodbtype, $arcinfo_src_filename, "append");
1408    &dbutil::delete_infodb_entry($infodbtype, $src_infodb_file_handle, $relfile);
1409    &dbutil::close_infodb_write_handle($infodbtype, $src_infodb_file_handle);
1410
1411
1412    foreach my $oid (@$oids) {
1413
1414        # find the source doc (the primary file that becomes this oid)
1415        my $doc_rec = &dbutil::read_infodb_entry($infodbtype, $arcinfo_doc_filename, $oid);
1416        my $doc_source_file = $doc_rec->{'src-file'}->[0];
1417        $doc_source_file = &util::placeholders_to_abspath($doc_source_file);
1418
1419        if (!&util::filename_is_absolute($doc_source_file)) {
1420        $doc_source_file = &FileUtils::filenameConcatenate($ENV{'GSDLCOLLECTDIR'},$doc_source_file);
1421        }
1422
1423        if ($doc_source_file ne $file) {
1424        # its an associated or metadata file
1425       
1426        # mark source doc for reimport as one of its assoc files has changed or deleted
1427        $block_hash->{'reindex_files'}->{$doc_source_file} = 1;
1428       
1429        }
1430        my $curr_status = $archive_info->get_status_info($oid);
1431        if (defined($curr_status) && (($curr_status ne "D"))) {
1432        if ($verbosity>1) {
1433            print STDERR "$oid ($doc_source_file) marked to be $mode_text on next buildcol.pl\n";
1434        }
1435        # mark oid for deletion (it will be deleted or reimported)
1436        $archive_info->set_status_info($oid,"D");
1437        my $val = &dbutil::read_infodb_rawentry($infodbtype, $arcinfo_doc_filename, $oid);
1438        $val =~ s/^<index-status>(.*)$/<index-status>D/m;
1439
1440        my $val_rec = &dbutil::convert_infodb_string_to_hash($val);
1441        my $doc_infodb_file_handle = &dbutil::open_infodb_write_handle($infodbtype, $arcinfo_doc_filename, "append");
1442
1443        &dbutil::write_infodb_entry($infodbtype, $doc_infodb_file_handle, $oid, $val_rec);
1444        &dbutil::close_infodb_write_handle($infodbtype, $doc_infodb_file_handle);
1445        }
1446    }
1447   
1448    }
1449
1450    # now go through and check that we haven't marked any primary
1451    # files for reindex (because their associated files have
1452    # changed/deleted) when they have been deleted themselves. only in
1453    # delete mode.
1454
1455    if ($mode eq "delete") {
1456    foreach my $file (@$deleted_files) {
1457        if (defined $block_hash->{'reindex_files'}->{$file}) {
1458        delete $block_hash->{'reindex_files'}->{$file};
1459        }
1460    }
1461    }
1462
1463
1464}
1465
1466sub add_dir_contents_to_list {
1467
1468    my ($dirname, $list) = @_;
1469 
1470    # Recur over directory contents.
1471    my (@dir, $subfile);
1472   
1473    # find all the files in the directory
1474    if (!opendir (DIR, $dirname)) {
1475    print STDERR "inexport: WARNING - couldn't read directory $dirname\n";
1476    return -1; # error in processing
1477    }
1478    @dir = readdir (DIR);
1479    closedir (DIR);
1480   
1481    for (my $i = 0; $i < scalar(@dir); $i++) {
1482    my $subfile = $dir[$i];
1483    next if ($subfile =~ m/^\.\.?$/);
1484    next if ($subfile =~ /^\.svn$/);
1485    my $full_file = &FileUtils::filenameConcatenate($dirname, $subfile);
1486    if (-d $full_file) {
1487        &add_dir_contents_to_list($full_file, $list);
1488    } else {
1489        push (@$list, $full_file);
1490    }
1491    }
1492   
1493}
1494
1495   
14961;
Note: See TracBrowser for help on using the browser.