1 | ###########################################################################
|
---|
2 | #
|
---|
3 | # BasePlugout.pm -- base class for all the plugout modules
|
---|
4 | # A component of the Greenstone digital library software
|
---|
5 | # from the New Zealand Digital Library Project at the
|
---|
6 | # University of Waikato, New Zealand.
|
---|
7 | #
|
---|
8 | # Copyright (C) 2006 New Zealand Digital Library Project
|
---|
9 | #
|
---|
10 | # This program is free software; you can redistribute it and/or modify
|
---|
11 | # it under the terms of the GNU General Public License as published by
|
---|
12 | # the Free Software Foundation; either version 2 of the License, or
|
---|
13 | # (at your option) any later version.
|
---|
14 | #
|
---|
15 | # This program is distributed in the hope that it will be useful,
|
---|
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
18 | # GNU General Public License for more details.
|
---|
19 | #
|
---|
20 | # You should have received a copy of the GNU General Public License
|
---|
21 | # along with this program; if not, write to the Free Software
|
---|
22 | # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
---|
23 | #
|
---|
24 | ###########################################################################
|
---|
25 |
|
---|
26 | package BasePlugout;
|
---|
27 |
|
---|
28 | eval {require bytes};
|
---|
29 |
|
---|
30 | use strict;
|
---|
31 | no strict 'subs';
|
---|
32 | no strict 'refs';
|
---|
33 |
|
---|
34 | use dbutil;
|
---|
35 | use gsprintf 'gsprintf';
|
---|
36 | use printusage;
|
---|
37 | use parse2;
|
---|
38 | use util;
|
---|
39 | use FileUtils;
|
---|
40 | use sorttools;
|
---|
41 |
|
---|
42 | # suppress the annoying "subroutine redefined" warning that various
|
---|
43 | # gets cause under perl 5.6
|
---|
44 | $SIG{__WARN__} = sub {warn($_[0]) unless ($_[0] =~ /Subroutine\s+\S+\sredefined/)};
|
---|
45 |
|
---|
46 | my $arguments = [
|
---|
47 | { 'name' => "site",
|
---|
48 | 'desc' => "{BasPlugout.site}",
|
---|
49 | 'type' => "string",
|
---|
50 | 'reqd' => "no",
|
---|
51 | 'hiddengli' => "yes" },
|
---|
52 | { 'name' => "xslt_file",
|
---|
53 | 'desc' => "{BasPlugout.xslt_file}",
|
---|
54 | 'type' => "string",
|
---|
55 | 'reqd' => "no",
|
---|
56 | 'deft' => "",
|
---|
57 | 'hiddengli' => "no"},
|
---|
58 | { 'name' => "subdir_split_length",
|
---|
59 | 'desc' => "{BasPlugout.subdir_split_length}",
|
---|
60 | 'type' => "int",
|
---|
61 | 'reqd' => "no",
|
---|
62 | 'deft' => "8",
|
---|
63 | 'hiddengli' => "no"},
|
---|
64 | { 'name' => "subdir_hash_prefix",
|
---|
65 | 'desc' => "{BasPlugout.subdir_hash_prefix}",
|
---|
66 | 'type' => "flag",
|
---|
67 | 'reqd' => "no",
|
---|
68 | 'deft' => "0",
|
---|
69 | 'hiddengli' => "no"},
|
---|
70 | { 'name' => "gzip_output",
|
---|
71 | 'desc' => "{BasPlugout.gzip_output}",
|
---|
72 | 'type' => "flag",
|
---|
73 | 'reqd' => "no",
|
---|
74 | 'hiddengli' => "no"},
|
---|
75 | { 'name' => "verbosity",
|
---|
76 | 'desc' => "{BasPlugout.verbosity}",
|
---|
77 | 'type' => "int",
|
---|
78 | 'deft' => "0",
|
---|
79 | 'reqd' => "no",
|
---|
80 | 'hiddengli' => "no"},
|
---|
81 | { 'name' => "output_info",
|
---|
82 | 'desc' => "{BasPlugout.output_info}",
|
---|
83 | 'type' => "string",
|
---|
84 | 'reqd' => "yes",
|
---|
85 | 'hiddengli' => "yes"},
|
---|
86 | { 'name' => "output_handle",
|
---|
87 | 'desc' => "{BasPlugout.output_handle}",
|
---|
88 | 'type' => "string",
|
---|
89 | 'deft' => 'STDERR',
|
---|
90 | 'reqd' => "no",
|
---|
91 | 'hiddengli' => "yes"},
|
---|
92 | { 'name' => "debug",
|
---|
93 | 'desc' => "{BasPlugout.debug}",
|
---|
94 | 'type' => "flag",
|
---|
95 | 'reqd' => "no",
|
---|
96 | 'hiddengli' => "yes"},
|
---|
97 | { 'name' => 'no_rss',
|
---|
98 | 'desc' => "{BasPlugout.no_rss}",
|
---|
99 | 'type' => 'flag',
|
---|
100 | 'reqd' => 'no',
|
---|
101 | 'hiddengli' => 'yes'},
|
---|
102 | { 'name' => 'rss_title',
|
---|
103 | 'desc' => "{BasPlugout.rss_title}",
|
---|
104 | 'type' => 'string',
|
---|
105 | 'deft' => 'dc.Title',
|
---|
106 | 'reqd' => 'no',
|
---|
107 | 'hiddengli' => 'yes'},
|
---|
108 | { 'name' => 'assocfile_copymode',
|
---|
109 | 'desc' => "{import.assocfile_copymode}",
|
---|
110 | 'type' => 'enum',
|
---|
111 | 'list' => [ { 'name' => "copy", 'desc' => "{import.assocfile_copymode_copy}" },
|
---|
112 | { 'name' => "hardlink", 'desc' => "{import.assocfile_copymode_hardlink}" } ],
|
---|
113 | 'deft' => 'copy',
|
---|
114 | 'reqd' => 'yes',
|
---|
115 | 'hiddengli' => 'no'},
|
---|
116 | { 'name' => "no_auxiliary_databases",
|
---|
117 | 'desc' => "{BasPlugout.no_auxiliary_databases}",
|
---|
118 | 'type' => "flag",
|
---|
119 | 'reqd' => "no",
|
---|
120 | 'hiddengli' => "yes"}
|
---|
121 |
|
---|
122 | ];
|
---|
123 |
|
---|
124 | my $options = { 'name' => "BasePlugout",
|
---|
125 | 'desc' => "{BasPlugout.desc}",
|
---|
126 | 'abstract' => "yes",
|
---|
127 | 'inherits' => "no",
|
---|
128 | 'args' => $arguments};
|
---|
129 |
|
---|
130 |
|
---|
131 |
|
---|
132 | sub sigpipe_handler
|
---|
133 | {
|
---|
134 | # first argument is the signal name
|
---|
135 | my($sig) = @_;
|
---|
136 |
|
---|
137 | print "Unexpected SIG$sig ... exiting\n";
|
---|
138 | exit(1);
|
---|
139 | }
|
---|
140 |
|
---|
141 | $SIG{PIPE} = \&sigpipe_handler;
|
---|
142 |
|
---|
143 |
|
---|
144 | sub new
|
---|
145 | {
|
---|
146 | my $class = shift (@_);
|
---|
147 |
|
---|
148 | my ($plugoutlist,$args,$hashArgOptLists) = @_;
|
---|
149 | push(@$plugoutlist, $class);
|
---|
150 |
|
---|
151 | my $plugout_name = (defined $plugoutlist->[0]) ? $plugoutlist->[0] : $class;
|
---|
152 |
|
---|
153 | push(@{$hashArgOptLists->{"ArgList"}},@{$arguments});
|
---|
154 | push(@{$hashArgOptLists->{"OptList"}},$options);
|
---|
155 |
|
---|
156 | my $self = {};
|
---|
157 | $self->{'plugout_type'} = $class;
|
---|
158 | $self->{'option_list'} = $hashArgOptLists->{"OptList"};
|
---|
159 | $self->{"info_only"} = 0;
|
---|
160 |
|
---|
161 | # Check if gsdlinfo is in the argument list or not - if it is, don't parse
|
---|
162 | # the args, just return the object.
|
---|
163 | #print STDERR "#### " . join(",", @${args}) . "\n\n";
|
---|
164 | my $v=0;
|
---|
165 | foreach my $strArg (@{$args})
|
---|
166 | {
|
---|
167 | if(defined $strArg) {
|
---|
168 | if($strArg eq "-gsdlinfo")
|
---|
169 | {
|
---|
170 | $self->{"info_only"} = 1;
|
---|
171 | return bless $self, $class;
|
---|
172 | }
|
---|
173 | elsif ($strArg eq "-site") {
|
---|
174 | $v = $strArg;
|
---|
175 | }
|
---|
176 | elsif($v eq "-site") {
|
---|
177 | $self->{'site'} = $strArg;
|
---|
178 | }
|
---|
179 | }
|
---|
180 | }
|
---|
181 |
|
---|
182 | delete $self->{"info_only"};
|
---|
183 |
|
---|
184 | if(parse2::parse($args,$hashArgOptLists->{"ArgList"},$self) == -1)
|
---|
185 | {
|
---|
186 | my $classTempClass = bless $self, $class;
|
---|
187 | print STDERR "<BadPlugout d=$plugout_name>\n";
|
---|
188 | &gsprintf(STDERR, "\n{BasPlugout.bad_general_option}\n", $plugout_name);
|
---|
189 | $classTempClass->print_txt_usage(""); # Use default resource bundle
|
---|
190 | die "\n";
|
---|
191 | }
|
---|
192 |
|
---|
193 |
|
---|
194 | if(defined $self->{'xslt_file'} && $self->{'xslt_file'} ne "")
|
---|
195 | {
|
---|
196 | my $full_file_path = &util::locate_config_file($self->{'xslt_file'});
|
---|
197 | if (!defined $full_file_path) {
|
---|
198 | print STDERR "Can not find $self->{'xslt_file'}, please make sure you have supplied the correct file path or put the file into the collection's etc or greenstone's etc folder\n";
|
---|
199 | die "\n";
|
---|
200 | }
|
---|
201 | $self->{'xslt_file'} = $full_file_path;
|
---|
202 | }
|
---|
203 |
|
---|
204 | # for group processing
|
---|
205 | $self->{'gs_count'} = 0;
|
---|
206 | $self->{'group_position'} = 1;
|
---|
207 |
|
---|
208 | $self->{'keep_import_structure'} = 0;
|
---|
209 |
|
---|
210 | $self->{'generate_databases'} = 1;
|
---|
211 | if ($self->{'no_auxiliary_databases'}) {
|
---|
212 | $self->{'generate_databases'} = 0;
|
---|
213 | }
|
---|
214 | undef $self->{'no_auxiliary_databases'};
|
---|
215 |
|
---|
216 | &sorttools::setup_custom_sort();
|
---|
217 | return bless $self, $class;
|
---|
218 |
|
---|
219 | }
|
---|
220 |
|
---|
221 | # implement this in subclass if you want to do some initialization after
|
---|
222 | # loading and setting parameters, and before processing the documents.
|
---|
223 | sub begin {
|
---|
224 |
|
---|
225 | my $self= shift (@_);
|
---|
226 |
|
---|
227 | }
|
---|
228 | # implement in subclasses if it needs some non-group related cleanup (post-group cleanup
|
---|
229 | # Like begin(), end() is also called by inexport.pm
|
---|
230 | sub end {
|
---|
231 | my $self= shift (@_);
|
---|
232 |
|
---|
233 | }
|
---|
234 | sub print_xml_usage
|
---|
235 | {
|
---|
236 | my $self = shift(@_);
|
---|
237 | my $header = shift(@_);
|
---|
238 | my $high_level_information_only = shift(@_);
|
---|
239 |
|
---|
240 | # XML output is always in UTF-8
|
---|
241 | gsprintf::output_strings_in_UTF8;
|
---|
242 |
|
---|
243 | if ($header) {
|
---|
244 | &PrintUsage::print_xml_header("plugout");
|
---|
245 | }
|
---|
246 | $self->print_xml($high_level_information_only);
|
---|
247 | }
|
---|
248 |
|
---|
249 |
|
---|
250 | sub print_xml
|
---|
251 | {
|
---|
252 | my $self = shift(@_);
|
---|
253 | my $high_level_information_only = shift(@_);
|
---|
254 |
|
---|
255 | my $optionlistref = $self->{'option_list'};
|
---|
256 | my @optionlist = @$optionlistref;
|
---|
257 | my $plugoutoptions = shift(@$optionlistref);
|
---|
258 | return if (!defined($plugoutoptions));
|
---|
259 |
|
---|
260 | gsprintf(STDERR, "<PlugoutInfo>\n");
|
---|
261 | gsprintf(STDERR, " <Name>$plugoutoptions->{'name'}</Name>\n");
|
---|
262 | my $desc = gsprintf::lookup_string($plugoutoptions->{'desc'});
|
---|
263 | $desc =~ s/</&lt;/g; # doubly escaped
|
---|
264 | $desc =~ s/>/&gt;/g;
|
---|
265 | gsprintf(STDERR, " <Desc>$desc</Desc>\n");
|
---|
266 | gsprintf(STDERR, " <Abstract>$plugoutoptions->{'abstract'}</Abstract>\n");
|
---|
267 | gsprintf(STDERR, " <Inherits>$plugoutoptions->{'inherits'}</Inherits>\n");
|
---|
268 | unless (defined($high_level_information_only)) {
|
---|
269 | gsprintf(STDERR, " <Arguments>\n");
|
---|
270 | if (defined($plugoutoptions->{'args'})) {
|
---|
271 | &PrintUsage::print_options_xml($plugoutoptions->{'args'});
|
---|
272 | }
|
---|
273 | gsprintf(STDERR, " </Arguments>\n");
|
---|
274 |
|
---|
275 | # Recurse up the plugout hierarchy
|
---|
276 | $self->print_xml();
|
---|
277 | }
|
---|
278 | gsprintf(STDERR, "</PlugoutInfo>\n");
|
---|
279 | }
|
---|
280 |
|
---|
281 |
|
---|
282 | sub print_txt_usage
|
---|
283 | {
|
---|
284 | my $self = shift(@_);
|
---|
285 |
|
---|
286 | # Print the usage message for a plugout (recursively)
|
---|
287 | my $descoffset = $self->determine_description_offset(0);
|
---|
288 | $self->print_plugout_usage($descoffset, 1);
|
---|
289 | }
|
---|
290 |
|
---|
291 | sub determine_description_offset
|
---|
292 | {
|
---|
293 | my $self = shift(@_);
|
---|
294 | my $maxoffset = shift(@_);
|
---|
295 |
|
---|
296 | my $optionlistref = $self->{'option_list'};
|
---|
297 | my @optionlist = @$optionlistref;
|
---|
298 | my $plugoutoptions = pop(@$optionlistref);
|
---|
299 | return $maxoffset if (!defined($plugoutoptions));
|
---|
300 |
|
---|
301 | # Find the length of the longest option string of this download
|
---|
302 | my $plugoutargs = $plugoutoptions->{'args'};
|
---|
303 | if (defined($plugoutargs)) {
|
---|
304 | my $longest = &PrintUsage::find_longest_option_string($plugoutargs);
|
---|
305 | if ($longest > $maxoffset) {
|
---|
306 | $maxoffset = $longest;
|
---|
307 | }
|
---|
308 | }
|
---|
309 |
|
---|
310 | # Recurse up the download hierarchy
|
---|
311 | $maxoffset = $self->determine_description_offset($maxoffset);
|
---|
312 | $self->{'option_list'} = \@optionlist;
|
---|
313 | return $maxoffset;
|
---|
314 | }
|
---|
315 |
|
---|
316 |
|
---|
317 | sub print_plugout_usage
|
---|
318 | {
|
---|
319 | my $self = shift(@_);
|
---|
320 | my $descoffset = shift(@_);
|
---|
321 | my $isleafclass = shift(@_);
|
---|
322 |
|
---|
323 | my $optionlistref = $self->{'option_list'};
|
---|
324 | my @optionlist = @$optionlistref;
|
---|
325 | my $plugoutoptions = shift(@$optionlistref);
|
---|
326 | return if (!defined($plugoutoptions));
|
---|
327 |
|
---|
328 | my $plugoutname = $plugoutoptions->{'name'};
|
---|
329 | my $plugoutargs = $plugoutoptions->{'args'};
|
---|
330 | my $plugoutdesc = $plugoutoptions->{'desc'};
|
---|
331 |
|
---|
332 | # Produce the usage information using the data structure above
|
---|
333 | if ($isleafclass) {
|
---|
334 | if (defined($plugoutdesc)) {
|
---|
335 | gsprintf(STDERR, "$plugoutdesc\n\n");
|
---|
336 | }
|
---|
337 | gsprintf(STDERR, " {common.usage}: plugout $plugoutname [{common.options}]\n\n");
|
---|
338 | }
|
---|
339 |
|
---|
340 | # Display the download options, if there are some
|
---|
341 | if (defined($plugoutargs)) {
|
---|
342 | # Calculate the column offset of the option descriptions
|
---|
343 | my $optiondescoffset = $descoffset + 2; # 2 spaces between options & descriptions
|
---|
344 |
|
---|
345 | if ($isleafclass) {
|
---|
346 | gsprintf(STDERR, " {common.specific_options}:\n");
|
---|
347 | }
|
---|
348 | else {
|
---|
349 | gsprintf(STDERR, " {common.general_options}:\n", $plugoutname);
|
---|
350 | }
|
---|
351 |
|
---|
352 | # Display the download options
|
---|
353 | &PrintUsage::print_options_txt($plugoutargs, $optiondescoffset);
|
---|
354 | }
|
---|
355 |
|
---|
356 | # Recurse up the download hierarchy
|
---|
357 | $self->print_plugout_usage($descoffset, 0);
|
---|
358 | $self->{'option_list'} = \@optionlist;
|
---|
359 | }
|
---|
360 |
|
---|
361 |
|
---|
362 | sub error
|
---|
363 | {
|
---|
364 | my ($strFunctionName,$strError) = @_;
|
---|
365 | {
|
---|
366 | print "Error occoured in BasePlugout.pm\n".
|
---|
367 | "In Function: ".$strFunctionName."\n".
|
---|
368 | "Error Message: ".$strError."\n";
|
---|
369 | exit(-1);
|
---|
370 | }
|
---|
371 | }
|
---|
372 |
|
---|
373 |
|
---|
374 | # OIDtype may be "hash" or "hash_on_full_filename" or "incremental" or "filename" or "dirname" or "full_filename" or "assigned"
|
---|
375 | sub set_OIDtype {
|
---|
376 | my $self = shift (@_);
|
---|
377 | my ($type, $metadata) = @_;
|
---|
378 |
|
---|
379 | if ($type =~ /^(hash|hash_on_full_filename|incremental|filename|dirname|full_filename|assigned)$/) {
|
---|
380 | $self->{'OIDtype'} = $type;
|
---|
381 | } else {
|
---|
382 | $self->{'OIDtype'} = "hash";
|
---|
383 | }
|
---|
384 | if ($type =~ /^assigned$/) {
|
---|
385 | if (defined $metadata) {
|
---|
386 | $self->{'OIDmetadata'} = $metadata;
|
---|
387 | } else {
|
---|
388 | $self->{'OIDmetadata'} = "dc.Identifier";
|
---|
389 | }
|
---|
390 | }
|
---|
391 | }
|
---|
392 |
|
---|
393 | sub set_output_dir
|
---|
394 | {
|
---|
395 | my $self = shift @_;
|
---|
396 | my ($output_dir) = @_;
|
---|
397 |
|
---|
398 | $self->{'output_dir'} = $output_dir;
|
---|
399 | }
|
---|
400 |
|
---|
401 | sub setoutputdir
|
---|
402 | {
|
---|
403 | my $self = shift @_;
|
---|
404 | my ($output_dir) = @_;
|
---|
405 |
|
---|
406 | $self->{'output_dir'} = $output_dir;
|
---|
407 | }
|
---|
408 |
|
---|
409 | sub get_output_dir
|
---|
410 | {
|
---|
411 | my $self = shift (@_);
|
---|
412 |
|
---|
413 | return $self->{'output_dir'};
|
---|
414 | }
|
---|
415 |
|
---|
416 | sub getoutputdir
|
---|
417 | {
|
---|
418 | my $self = shift (@_);
|
---|
419 |
|
---|
420 | return $self->{'output_dir'};
|
---|
421 | }
|
---|
422 |
|
---|
423 | sub getoutputinfo
|
---|
424 | {
|
---|
425 | my $self = shift (@_);
|
---|
426 |
|
---|
427 | return $self->{'output_info'};
|
---|
428 | }
|
---|
429 |
|
---|
430 |
|
---|
431 | sub get_output_handler
|
---|
432 | {
|
---|
433 | my $self = shift (@_);
|
---|
434 |
|
---|
435 | my ($output_file_name) = @_;
|
---|
436 |
|
---|
437 | my $fh;
|
---|
438 | &FileUtils::openFileHandle($output_file_name, '>', \$fh) or die('Can not open a file handler for: ' . $output_file_name . "\n");
|
---|
439 |
|
---|
440 | return $fh;
|
---|
441 | }
|
---|
442 |
|
---|
443 | sub release_output_handler
|
---|
444 | {
|
---|
445 | my $self = shift (@_);
|
---|
446 | my ($outhandler) = @_;
|
---|
447 |
|
---|
448 | close($outhandler);
|
---|
449 |
|
---|
450 | }
|
---|
451 |
|
---|
452 | sub output_xml_header {
|
---|
453 | my $self = shift (@_);
|
---|
454 | my ($handle,$docroot,$nondoctype) = @_;
|
---|
455 |
|
---|
456 | #For Dspace must be UTF in lower case
|
---|
457 | print $handle '<?xml version="1.0" encoding="utf-8" standalone="no"?>' . "\n";
|
---|
458 |
|
---|
459 | if (!defined $nondoctype){
|
---|
460 | my $doctype = (defined $docroot) ? $docroot : "Section";
|
---|
461 |
|
---|
462 | # Used to be '<!DOCTYPE Archive SYSTEM ...'
|
---|
463 |
|
---|
464 | print $handle "<!DOCTYPE $doctype SYSTEM \"https://greenstone.org/dtd/Archive/1.0/Archive.dtd\">\n";
|
---|
465 | }
|
---|
466 |
|
---|
467 | print $handle "<$docroot>\n" if defined $docroot;
|
---|
468 | }
|
---|
469 |
|
---|
470 | sub output_xml_footer {
|
---|
471 | my $self = shift (@_);
|
---|
472 | my ($handle,$docroot) = @_;
|
---|
473 | print $handle "</$docroot>\n" if defined $docroot;
|
---|
474 | }
|
---|
475 |
|
---|
476 |
|
---|
477 | sub output_general_xml_header
|
---|
478 | {
|
---|
479 | my $self = shift (@_);
|
---|
480 | my ($handle,$docroot,$opt_attributes,$opt_dtd, $opt_doctype) = @_;
|
---|
481 |
|
---|
482 | print $handle '<?xml version="1.0" encoding="utf-8" standalone="no"?>' . "\n";
|
---|
483 |
|
---|
484 | if (defined $opt_dtd) {
|
---|
485 | my $doctype = (defined $opt_doctype) ? $opt_doctype : $docroot;
|
---|
486 | print $handle "<!DOCTYPE $doctype SYSTEM \"$opt_dtd\">\n";
|
---|
487 | }
|
---|
488 |
|
---|
489 | if (defined $docroot) {
|
---|
490 | my $full_docroot = $docroot;
|
---|
491 | if (defined $opt_attributes) {
|
---|
492 | $full_docroot .= " $opt_attributes";
|
---|
493 | }
|
---|
494 |
|
---|
495 | print $handle "<$full_docroot>\n"
|
---|
496 | }
|
---|
497 | }
|
---|
498 |
|
---|
499 | sub output_general_xml_footer
|
---|
500 | {
|
---|
501 | output_xml_footer(@_);
|
---|
502 | }
|
---|
503 |
|
---|
504 | # This is called by the plugins after read_into_doc_obj generates the doc_obj.
|
---|
505 | sub process {
|
---|
506 | my $self = shift (@_);
|
---|
507 | my ($doc_obj) = @_;
|
---|
508 |
|
---|
509 | my $output_info = $self->{'output_info'};
|
---|
510 | return if (!defined $output_info);
|
---|
511 |
|
---|
512 | # for OAI purposes
|
---|
513 | $doc_obj->set_lastmodified();
|
---|
514 | $doc_obj->set_oailastmodified();
|
---|
515 |
|
---|
516 | # find out which directory to save to
|
---|
517 | my $doc_dir = "";
|
---|
518 | if ($self->is_group()) {
|
---|
519 | $doc_dir = $self->get_group_doc_dir($doc_obj);
|
---|
520 | } else {
|
---|
521 | $doc_dir = $self->get_doc_dir($doc_obj);
|
---|
522 | }
|
---|
523 |
|
---|
524 | ##############################
|
---|
525 | # call subclass' saveas method
|
---|
526 | ##############################
|
---|
527 | $self->saveas($doc_obj,$doc_dir);
|
---|
528 |
|
---|
529 | # write out data to archiveinf-doc.db
|
---|
530 | if ($self->{'generate_databases'}) {
|
---|
531 | $self->archiveinf_db($doc_obj);
|
---|
532 | }
|
---|
533 | if ($self->is_group()) {
|
---|
534 | $self->{'gs_count'}++; # do we want this for all cases?
|
---|
535 | $self->{'group_position'}++;
|
---|
536 | }
|
---|
537 | }
|
---|
538 |
|
---|
539 | sub store_output_info_reference {
|
---|
540 | my $self = shift (@_);
|
---|
541 | my ($doc_obj) = @_;
|
---|
542 |
|
---|
543 | my $output_info = $self->{'output_info'};
|
---|
544 | my $metaname = $self->{'sortmeta'};
|
---|
545 |
|
---|
546 | my $group_position;
|
---|
547 | if ($self->is_group()) {
|
---|
548 | $group_position = $self->{'group_position'};
|
---|
549 | }
|
---|
550 | if (!defined $metaname || $metaname !~ /\S/) {
|
---|
551 | my $OID = $doc_obj->get_OID();
|
---|
552 | $output_info->add_info($OID,$self->{'short_doc_file'}, undef, "", $group_position);
|
---|
553 | return;
|
---|
554 | }
|
---|
555 |
|
---|
556 | if ($metaname eq "OID") { # sort by OID
|
---|
557 | my $OID = $doc_obj->get_OID();
|
---|
558 | $output_info->add_info($OID,$self->{'short_doc_file'}, undef, $OID, undef);
|
---|
559 | return;
|
---|
560 | }
|
---|
561 |
|
---|
562 | my $metadata = "";
|
---|
563 | my $top_section = $doc_obj->get_top_section();
|
---|
564 |
|
---|
565 | my @commameta_list = split(/,/, $metaname);
|
---|
566 | foreach my $cmn (@commameta_list) {
|
---|
567 | my $meta = $doc_obj->get_metadata_element($top_section, $cmn);
|
---|
568 | if ($meta) {
|
---|
569 | # do remove prefix/suffix - this will apply to all values
|
---|
570 | $meta =~ s/^$self->{'removeprefix'}// if defined $self->{'removeprefix'};
|
---|
571 | $meta =~ s/$self->{'removesuffix'}$// if defined $self->{'removesuffix'};
|
---|
572 | $meta = &sorttools::format_metadata_for_sorting($cmn, $meta, $doc_obj);
|
---|
573 | $metadata .= $meta if ($meta);
|
---|
574 | }
|
---|
575 | }
|
---|
576 |
|
---|
577 | # store reference in the output_info
|
---|
578 | $output_info->add_info($doc_obj->get_OID(),$self->{'short_doc_file'}, undef, $metadata,undef);
|
---|
579 | }
|
---|
580 |
|
---|
581 |
|
---|
582 |
|
---|
583 | sub saveas {
|
---|
584 | my $self = shift (@_);
|
---|
585 | my ($doc_obj, $doc_dir) = @_;
|
---|
586 |
|
---|
587 | die "BasePlugout::saveas function must be implemented in sub classes\n";
|
---|
588 | }
|
---|
589 |
|
---|
590 | sub get_group_doc_dir {
|
---|
591 | my $self = shift (@_);
|
---|
592 | my ($doc_obj) = @_;
|
---|
593 |
|
---|
594 | my $outhandle = $self->{'output_handle'};
|
---|
595 | my $OID = $doc_obj->get_OID();
|
---|
596 | $OID = "NULL" unless defined $OID;
|
---|
597 |
|
---|
598 | my $groupsize = $self->{'group_size'};
|
---|
599 | my $gs_count = $self->{'gs_count'};
|
---|
600 |
|
---|
601 | my $open_new_file = (($gs_count % $groupsize)==0);
|
---|
602 |
|
---|
603 | my $doc_dir;
|
---|
604 |
|
---|
605 | if (!$open_new_file && scalar(@{$doc_obj->get_assoc_files()})>0) {
|
---|
606 | # if we have some assoc files, then we will need to start a new file
|
---|
607 | if ($self->{'verbosity'} > 2) {
|
---|
608 | print $outhandle " Starting an archives/export folder for $OID as it has associated files\n";
|
---|
609 | }
|
---|
610 | $open_new_file = 1;
|
---|
611 | }
|
---|
612 |
|
---|
613 | # opening a new file
|
---|
614 | if (($open_new_file) || !defined($self->{'gs_doc_dir'})) {
|
---|
615 | # first we close off the old output
|
---|
616 | if ($gs_count>0)
|
---|
617 | {
|
---|
618 | return if (!$self->close_group_output());
|
---|
619 | }
|
---|
620 |
|
---|
621 | # this will create the directory
|
---|
622 | $doc_dir = $self->get_doc_dir ($doc_obj);
|
---|
623 | $self->{'new_doc_dir'} = 1;
|
---|
624 | $self->{'gs_doc_dir'} = $doc_dir;
|
---|
625 | $self->{'group_position'} = 1;
|
---|
626 | }
|
---|
627 | else {
|
---|
628 | $doc_dir = $self->{'gs_doc_dir'};
|
---|
629 | $self->{'new_doc_dir'} = 0;
|
---|
630 | }
|
---|
631 |
|
---|
632 | return $doc_dir;
|
---|
633 | }
|
---|
634 |
|
---|
635 | sub get_doc_dir {
|
---|
636 |
|
---|
637 | my $self = shift (@_);
|
---|
638 | my ($doc_obj) = @_;
|
---|
639 |
|
---|
640 | my $OID = $doc_obj->get_OID();
|
---|
641 | $OID = "NULL" unless defined $OID;
|
---|
642 |
|
---|
643 | my $working_dir = $self->get_output_dir();
|
---|
644 | my $working_info = $self->{'output_info'};
|
---|
645 | return if (!defined $working_info);
|
---|
646 |
|
---|
647 | my $doc_info = $working_info->get_info($OID);
|
---|
648 | my $doc_dir = '';
|
---|
649 |
|
---|
650 | if (defined $doc_info && scalar(@$doc_info) >= 1)
|
---|
651 | {
|
---|
652 | # This OID already has an archives directory, so use it again
|
---|
653 | $doc_dir = $doc_info->[0];
|
---|
654 | $doc_dir =~ s/\/?((doc(mets|sql)?)|(dublin_core))\.xml(\.gz)?$//;
|
---|
655 | }
|
---|
656 | elsif ($self->{'keep_import_structure'})
|
---|
657 | {
|
---|
658 | my $source_filename = $doc_obj->get_source_filename();
|
---|
659 | $source_filename = &File::Basename::dirname($source_filename);
|
---|
660 | $source_filename =~ s/[\\\/]+/\//g;
|
---|
661 | $source_filename =~ s/\/$//;
|
---|
662 |
|
---|
663 | $doc_dir = substr($source_filename, length($ENV{'GSDLIMPORTDIR'}) + 1);
|
---|
664 | }
|
---|
665 |
|
---|
666 | # We have to use a new archives directory for this document
|
---|
667 | if ($doc_dir eq "")
|
---|
668 | {
|
---|
669 | $doc_dir = $self->get_new_doc_dir ($working_info, $working_dir, $OID);
|
---|
670 | }
|
---|
671 |
|
---|
672 | &FileUtils::makeAllDirectories(&FileUtils::filenameConcatenate($working_dir, $doc_dir));
|
---|
673 |
|
---|
674 | return $doc_dir;
|
---|
675 | }
|
---|
676 |
|
---|
677 |
|
---|
678 | ## @function get_new_doc_dir()
|
---|
679 | #
|
---|
680 | # Once a doc object is ready to write to disk (and hence has a nice OID),
|
---|
681 | # generate a unique subdirectory to write the information to.
|
---|
682 | #
|
---|
683 | # - create the directory as part of this call, to try and avoid race conditions
|
---|
684 | # found in parallel processing [jmt12]
|
---|
685 | #
|
---|
686 | # @todo figure out what the rule regarding $work_info->size() is meant to do
|
---|
687 | #
|
---|
688 | # @todo determine what $self->{'group'} is, and whether it should affect
|
---|
689 | # directory creation
|
---|
690 | #
|
---|
691 | sub get_new_doc_dir
|
---|
692 | {
|
---|
693 | my $self = shift (@_);
|
---|
694 | my($working_info,$working_dir,$OID) = @_;
|
---|
695 |
|
---|
696 | my $doc_dir = "";
|
---|
697 | my $doc_dir_rest = $OID;
|
---|
698 |
|
---|
699 | # remove any \ and / from the OID
|
---|
700 | $doc_dir_rest =~ s/[\\\/]//g;
|
---|
701 |
|
---|
702 | # Remove ":" if we are on Windows OS, as otherwise they get confused with the drive letters
|
---|
703 | if ($ENV{'GSDLOS'} =~ /^windows$/i)
|
---|
704 | {
|
---|
705 | $doc_dir_rest =~ s/\://g;
|
---|
706 | }
|
---|
707 |
|
---|
708 | # we generally create a unique directory by adding consequtive fragments of
|
---|
709 | # the document identifier (split by some predefined length - defaulting to
|
---|
710 | # 8) until we find a directory that doesn't yet exist. Note that directories
|
---|
711 | # that contain a document have a suffix ".dir" (whereas those that contain
|
---|
712 | # only subdirectories have no suffix).
|
---|
713 | my $doc_dir_num = 0; # how many directories deep we are
|
---|
714 | my $created_directory = 0; # have we successfully created a new directory
|
---|
715 | do
|
---|
716 | {
|
---|
717 | # (does this work on windows? - jmt12)
|
---|
718 | if ($doc_dir_num > 0)
|
---|
719 | {
|
---|
720 | $doc_dir .= '/';
|
---|
721 | }
|
---|
722 |
|
---|
723 | # the default matching pattern grabs the next 'subdir_split_length'
|
---|
724 | # characters of the OID to act as the next subdirectory
|
---|
725 | my $pattern = '^(.{1,' . $self->{'subdir_split_length'} . '})';
|
---|
726 |
|
---|
727 | # Do we count any "HASH" prefix against the split length limit?
|
---|
728 | if ($self->{'subdir_hash_prefix'} && $doc_dir_num == 0)
|
---|
729 | {
|
---|
730 | $pattern = '^((HASH)?.{1,' . $self->{'subdir_split_length'} . '})';
|
---|
731 | }
|
---|
732 |
|
---|
733 | # Note the use of 's' to both capture the next chuck of OID and to remove
|
---|
734 | # it from OID at the same time
|
---|
735 | if ($doc_dir_rest =~ s/$pattern//i)
|
---|
736 | {
|
---|
737 | $doc_dir .= $1;
|
---|
738 | $doc_dir_num++;
|
---|
739 |
|
---|
740 | my $full_doc_dir = &FileUtils::filenameConcatenate($working_dir, $doc_dir . '.dir');
|
---|
741 | if(!FileUtils::directoryExists($full_doc_dir))
|
---|
742 | {
|
---|
743 | &FileUtils::makeAllDirectories($full_doc_dir);
|
---|
744 | $created_directory = 1;
|
---|
745 | }
|
---|
746 |
|
---|
747 | ###rint STDERR "[DEBUG] BasePlugout::get_new_doc_dir(<working_info>, $working_dir, $oid)\n";
|
---|
748 | ###rint STDERR " - create directory: $full_doc_dir => $created_directory\n";
|
---|
749 | ###rint STDERR " - rest: $doc_dir_rest\n";
|
---|
750 | ###rint STDERR " - working_info->size(): " . $working_info->size() . " [ < 1024 ?]\n";
|
---|
751 | ###rint STDERR " - doc_dir_num: " . $doc_dir_num . "\n";
|
---|
752 | }
|
---|
753 | }
|
---|
754 | while ($doc_dir_rest ne '' && ($created_directory == 0 || ($working_info->size() >= 1024 && $doc_dir_num < 2)));
|
---|
755 |
|
---|
756 | # not unique yet? Add on an incremental suffix until we are unique
|
---|
757 | my $i = 1;
|
---|
758 | my $doc_dir_base = $doc_dir;
|
---|
759 | while ($created_directory == 0)
|
---|
760 | {
|
---|
761 | $doc_dir = $doc_dir_base . '-' . $i;
|
---|
762 | $created_directory = &FileUtils::makeAllDirectories(&FileUtils::filenameConcatenate($working_dir, $doc_dir . '.dir'));
|
---|
763 | $i++;
|
---|
764 | }
|
---|
765 |
|
---|
766 | # in theory this should never happen
|
---|
767 | if (!$created_directory)
|
---|
768 | {
|
---|
769 | die("Error! Failed to create directory for document: " . $doc_dir_base . "\n");
|
---|
770 | }
|
---|
771 |
|
---|
772 | return $doc_dir . '.dir';
|
---|
773 | }
|
---|
774 | ## get_new_doc_dir()
|
---|
775 |
|
---|
776 |
|
---|
777 | sub process_assoc_files {
|
---|
778 | my $self = shift (@_);
|
---|
779 | my ($doc_obj, $doc_dir, $handle) = @_;
|
---|
780 |
|
---|
781 | my $outhandle = $self->{'output_handle'};
|
---|
782 |
|
---|
783 | my $output_dir = $self->get_output_dir();
|
---|
784 | return if (!defined $output_dir);
|
---|
785 |
|
---|
786 | &FileUtils::makeAllDirectories($output_dir) unless &FileUtils::directoryExists($output_dir);
|
---|
787 |
|
---|
788 | my $working_dir = &FileUtils::filenameConcatenate($output_dir, $doc_dir);
|
---|
789 | &FileUtils::makeAllDirectories($working_dir) unless &FileUtils::directoryExists($working_dir);
|
---|
790 |
|
---|
791 | my @assoc_files = ();
|
---|
792 | my $filename;;
|
---|
793 |
|
---|
794 | my $source_filename = $doc_obj->get_source_filename();
|
---|
795 |
|
---|
796 | my $collect_dir = $ENV{'GSDLCOLLECTDIR'};
|
---|
797 |
|
---|
798 | if (defined $collect_dir) {
|
---|
799 | my $dirsep_regexp = &util::get_os_dirsep();
|
---|
800 |
|
---|
801 | if ($collect_dir !~ /$dirsep_regexp$/) {
|
---|
802 | $collect_dir .= &util::get_dirsep(); # ensure there is a slash at the end
|
---|
803 | }
|
---|
804 |
|
---|
805 | # This test is never going to fail on Windows -- is this a problem?
|
---|
806 |
|
---|
807 | if ($source_filename !~ /^$dirsep_regexp/) {
|
---|
808 | $source_filename = &FileUtils::filenameConcatenate($collect_dir, $source_filename);
|
---|
809 | }
|
---|
810 | }
|
---|
811 |
|
---|
812 |
|
---|
813 | # set the assocfile path (even if we have no assoc files - need this for lucene)
|
---|
814 | $doc_obj->set_utf8_metadata_element ($doc_obj->get_top_section(),
|
---|
815 | "assocfilepath",
|
---|
816 | "$doc_dir");
|
---|
817 | foreach my $assoc_file_rec (@{$doc_obj->get_assoc_files()}) {
|
---|
818 | my ($dir, $afile) = $assoc_file_rec->[1] =~ /^(.*?)([^\/\\]+)$/;
|
---|
819 | $dir = "" unless defined $dir;
|
---|
820 |
|
---|
821 | my $utf8_real_filename = $assoc_file_rec->[0];
|
---|
822 |
|
---|
823 | # for some reasons the image associate file has / before the full path
|
---|
824 | $utf8_real_filename =~ s/^\\(.*)/$1/i;
|
---|
825 |
|
---|
826 | ## my $real_filename = &util::utf8_to_real_filename($utf8_real_filename);
|
---|
827 | my $real_filename = $utf8_real_filename;
|
---|
828 | $real_filename = &util::downgrade_if_dos_filename($real_filename);
|
---|
829 |
|
---|
830 | if (&FileUtils::fileExists($real_filename)) {
|
---|
831 |
|
---|
832 | $filename = &FileUtils::filenameConcatenate($working_dir, $afile);
|
---|
833 |
|
---|
834 | if ($self->{'assocfile_copymode'} eq "hardlink") {
|
---|
835 | &FileUtils::hardLink($real_filename, $filename, $self->{'verbosity'}); # Consider adding in 'strict' option??
|
---|
836 | }
|
---|
837 | else {
|
---|
838 | &FileUtils::copyFilesGeneral([$real_filename], $filename); # Consider adding in 'strict' option??
|
---|
839 | }
|
---|
840 |
|
---|
841 | $doc_obj->add_utf8_metadata ($doc_obj->get_top_section(),
|
---|
842 | "gsdlassocfile",
|
---|
843 | "$afile:$assoc_file_rec->[2]:$dir");
|
---|
844 | } elsif ($self->{'verbosity'} > 1) {
|
---|
845 | print $outhandle "BasePlugout::process couldn't copy the associated file " .
|
---|
846 | "$real_filename to $afile\n";
|
---|
847 | }
|
---|
848 | }
|
---|
849 | }
|
---|
850 |
|
---|
851 |
|
---|
852 | sub process_metafiles_metadata
|
---|
853 | {
|
---|
854 | my $self = shift (@_);
|
---|
855 | my ($doc_obj) = @_;
|
---|
856 |
|
---|
857 | my $top_section = $doc_obj->get_top_section();
|
---|
858 | my $metafiles = $doc_obj->get_metadata($top_section,"gsdlmetafile");
|
---|
859 |
|
---|
860 | foreach my $metafile_pair (@$metafiles) {
|
---|
861 | my ($full_metafile,$metafile) = split(/ : /,$metafile_pair);
|
---|
862 |
|
---|
863 | $doc_obj->metadata_file($full_metafile,$metafile);
|
---|
864 | }
|
---|
865 |
|
---|
866 | $doc_obj->delete_metadata($top_section,"gsdlmetafile");
|
---|
867 | }
|
---|
868 |
|
---|
869 | sub archiveinf_files_to_field
|
---|
870 | {
|
---|
871 | my $self = shift(@_);
|
---|
872 | my ($files,$field,$collect_dir,$oid_files,$reverse_lookups) = @_;
|
---|
873 |
|
---|
874 | foreach my $file_rec (@$files) {
|
---|
875 | my $real_filename = (ref $file_rec eq "ARRAY") ? $file_rec->[0] : $file_rec;
|
---|
876 | my $full_file = (ref $file_rec eq "ARRAY") ? $file_rec->[1] : $file_rec;
|
---|
877 | # for some reasons the image associate file has / before the full path
|
---|
878 | $real_filename =~ s/^\\(.*)/$1/i;
|
---|
879 |
|
---|
880 | my $raw_filename = &util::downgrade_if_dos_filename($real_filename);
|
---|
881 |
|
---|
882 | #print STDERR "**** raw_filename = $raw_filename\n";
|
---|
883 | #print STDERR "**** real_filename = $real_filename\n\n";
|
---|
884 |
|
---|
885 | if (&FileUtils::fileExists($raw_filename)) {
|
---|
886 |
|
---|
887 | # if (defined $collect_dir) {
|
---|
888 | # my $collect_dir_re_safe = $collect_dir;
|
---|
889 | # $collect_dir_re_safe =~ s/\\/\\\\/g; # use &util::filename_to_regex()
|
---|
890 | # $collect_dir_re_safe =~ s/\./\\./g;##
|
---|
891 |
|
---|
892 | # $real_filename =~ s/^$collect_dir_re_safe//;
|
---|
893 | # }
|
---|
894 |
|
---|
895 | if (defined $reverse_lookups) {
|
---|
896 | $reverse_lookups->{$real_filename} = 1;
|
---|
897 | }
|
---|
898 |
|
---|
899 | if($field =~ m@assoc-file|src-file|meta-file@) {
|
---|
900 | $raw_filename = &util::abspath_to_placeholders($raw_filename);
|
---|
901 | }
|
---|
902 |
|
---|
903 | ### push(@{$oid_files->{$field}},$full_file);
|
---|
904 | push(@{$oid_files->{$field}},$raw_filename);
|
---|
905 | }
|
---|
906 | else {
|
---|
907 | print STDERR "Warning: archiveinf_files_to_field()\n $real_filename does not appear to be on the file system\n";
|
---|
908 | }
|
---|
909 | }
|
---|
910 | }
|
---|
911 |
|
---|
912 | sub archiveinf_db
|
---|
913 | {
|
---|
914 | my $self = shift (@_);
|
---|
915 | my ($doc_obj) = @_;
|
---|
916 |
|
---|
917 | my $verbosity = $self->{'verbosity'};
|
---|
918 |
|
---|
919 | my $collect_dir = $ENV{'GSDLCOLLECTDIR'};
|
---|
920 | if (defined $collect_dir) {
|
---|
921 | my $dirsep_regexp = &util::get_os_dirsep();
|
---|
922 |
|
---|
923 | if ($collect_dir !~ /$dirsep_regexp$/) {
|
---|
924 | # ensure there is a slash at the end
|
---|
925 | $collect_dir .= &util::get_dirsep();
|
---|
926 | }
|
---|
927 | }
|
---|
928 |
|
---|
929 | my $oid = $doc_obj->get_OID();
|
---|
930 | my $source_filename = $doc_obj->get_unmodified_source_filename();
|
---|
931 | my $working_info = $self->{'output_info'};
|
---|
932 | my $doc_info = $working_info->get_info($oid);
|
---|
933 |
|
---|
934 | my ($doc_file,$index_status,$sortmeta, $group_position) = @$doc_info;
|
---|
935 | # doc_file is the path to the archive doc.xml. Make sure it has unix
|
---|
936 | # slashes, then if the collection is copied to linux, it can be built without reimport
|
---|
937 | $doc_file =~ s/\\/\//g;
|
---|
938 | my $oid_files = { 'doc-file' => $doc_file,
|
---|
939 | 'index-status' => $index_status,
|
---|
940 | 'src-file' => $source_filename,
|
---|
941 | 'sort-meta' => $sortmeta,
|
---|
942 | 'assoc-file' => [],
|
---|
943 | 'meta-file' => [] };
|
---|
944 | if (defined $group_position) {
|
---|
945 | $oid_files->{'group-position'} = $group_position;
|
---|
946 | }
|
---|
947 | my $reverse_lookups = { $source_filename => "1" };
|
---|
948 |
|
---|
949 |
|
---|
950 | $self->archiveinf_files_to_field($doc_obj->get_source_assoc_files(),"assoc-file",
|
---|
951 | $collect_dir,$oid_files,$reverse_lookups);
|
---|
952 |
|
---|
953 |
|
---|
954 | $self->archiveinf_files_to_field($doc_obj->get_meta_files(),"meta-file",
|
---|
955 | $collect_dir,$oid_files);
|
---|
956 |
|
---|
957 | # Get the infodbtype value for this collection from the arcinfo object
|
---|
958 | my $infodbtype = $self->{'output_info'}->{'infodbtype'};
|
---|
959 | my $output_dir = $self->{'output_dir'};
|
---|
960 |
|
---|
961 | my $doc_db = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $output_dir);
|
---|
962 |
|
---|
963 | ##print STDERR "*** To set in db: \n\t$doc_db\n\t$oid\n\t$doc_db_text\n";
|
---|
964 |
|
---|
965 | if (!$self->{'no_rss'})
|
---|
966 | {
|
---|
967 | if (($oid_files->{'index-status'} eq "I") || ($oid_files->{'index-status'} eq "R")) {
|
---|
968 | my $top_section = $doc_obj->get_top_section();
|
---|
969 |
|
---|
970 | # rss_title can be set in collect.cfg as follows:
|
---|
971 | # plugout GreenstoneXMLPlugout -rss_title "dc.Title; ex.Title"
|
---|
972 | # rss_title is a semi-colon or comma-separated list of the metadata field names that should
|
---|
973 | # be consulted in order to obtain a Title (anchor text) for the RSS document link.
|
---|
974 | # If not specified, rss_title will default to dc.Title, and fall back on Untitled
|
---|
975 | my $metafieldnames = $self->{'rss_title'};
|
---|
976 | my @metafieldarray = split(/[,;] ?/,$metafieldnames); # , or ; separator can be followed by an optional space
|
---|
977 | my $titles;
|
---|
978 | #@$titles=(); # at worst @$titles will be (), as get_metadata(dc.Titles) may return ()
|
---|
979 | foreach my $metafieldname (@metafieldarray) {
|
---|
980 | $metafieldname =~ s@^ex\.@@; # if ex.Title, need to get_metadata() on metafieldname=Title
|
---|
981 | $titles = $doc_obj->get_metadata($top_section,$metafieldname);
|
---|
982 |
|
---|
983 | if(scalar(@$titles) != 0) { # found at least one title for one metafieldname
|
---|
984 | last; # break out of the loop
|
---|
985 | }
|
---|
986 | }
|
---|
987 |
|
---|
988 | # if ex.Title was listed in the metafieldnames, then we'll surely have a value for title for this doc
|
---|
989 | # otherwise, if we have no titles at this point, add in a default of Untitled as this doc's title
|
---|
990 | if(scalar(@$titles) == 0) { #&& $metafieldnames !~ [email protected]@) {
|
---|
991 | push(@$titles, "Untitled");
|
---|
992 | }
|
---|
993 |
|
---|
994 | # encode basic html entities like <>"& in the title(s), since the & char can break RSS links
|
---|
995 | for (my $i = 0; $i < scalar(@$titles); $i++) {
|
---|
996 | &ghtml::htmlsafe(@$titles[$i]);
|
---|
997 | }
|
---|
998 |
|
---|
999 | my $dc_title = join("; ", @$titles);
|
---|
1000 |
|
---|
1001 | if ($oid_files->{'index-status'} eq "R") {
|
---|
1002 | $dc_title .= " (Updated)";
|
---|
1003 | }
|
---|
1004 |
|
---|
1005 | my $rss_entry = "<item>\n";
|
---|
1006 | $rss_entry .= " <title>$dc_title</title>\n";
|
---|
1007 | if(&util::is_gs3()) {
|
---|
1008 | $rss_entry .= " <link>_httpdomain__httpcollection_/document/$oid</link>\n";
|
---|
1009 | } else {
|
---|
1010 | $rss_entry .= " <link>_httpdomainHtmlsafe__httpcollection_/document/$oid</link>\n";
|
---|
1011 | }
|
---|
1012 | $rss_entry .= "</item>";
|
---|
1013 |
|
---|
1014 | if (defined(&dbutil::supportsRSS) && &dbutil::supportsRSS($infodbtype))
|
---|
1015 | {
|
---|
1016 | my $rss_db = &dbutil::get_infodb_file_path($infodbtype, 'rss-items', $output_dir);
|
---|
1017 | my $rss_db_fh = &dbutil::open_infodb_write_handle($infodbtype, $rss_db, 'append');
|
---|
1018 | &dbutil::write_infodb_rawentry($infodbtype, $rss_db_fh, $oid, $rss_entry);
|
---|
1019 | &dbutil::close_infodb_write_handle($infodbtype, $rss_db_fh);
|
---|
1020 | }
|
---|
1021 | else
|
---|
1022 | {
|
---|
1023 | my $rss_filename = &FileUtils::filenameConcatenate($output_dir,"rss-items.rdf");
|
---|
1024 | my $rss_fh;
|
---|
1025 | if (&FileUtils::openFileHandle($rss_filename, '>>', \$rss_fh, "utf8"))
|
---|
1026 | {
|
---|
1027 | print $rss_fh $rss_entry . "\n";
|
---|
1028 | &FileUtils::closeFileHandle($rss_filename, \$rss_fh);
|
---|
1029 | }
|
---|
1030 | else
|
---|
1031 | {
|
---|
1032 | print STDERR "Error: Failed to open $rss_filename\n$!\n";
|
---|
1033 | }
|
---|
1034 | }
|
---|
1035 | }
|
---|
1036 | }
|
---|
1037 |
|
---|
1038 | $oid_files->{'doc-file'} = [ $oid_files->{'doc-file'} ];
|
---|
1039 | $oid_files->{'index-status'} = [ $oid_files->{'index-status'} ];
|
---|
1040 | $oid_files->{'src-file'} = &util::abspath_to_placeholders($oid_files->{'src-file'});
|
---|
1041 | $oid_files->{'src-file'} = [ $oid_files->{'src-file'} ];
|
---|
1042 | $oid_files->{'sort-meta'} = [ $oid_files->{'sort-meta'} ];
|
---|
1043 | if (defined $oid_files->{'group-position'}) {
|
---|
1044 | $oid_files->{'group-position'} = [ $oid_files->{'group-position'} ];
|
---|
1045 | }
|
---|
1046 |
|
---|
1047 | my $infodb_file_handle = &dbutil::open_infodb_write_handle($infodbtype, $doc_db, "append");
|
---|
1048 | &dbutil::write_infodb_entry($infodbtype, $infodb_file_handle, $oid, $oid_files);
|
---|
1049 | &dbutil::close_infodb_write_handle($infodbtype, $infodb_file_handle);
|
---|
1050 |
|
---|
1051 | foreach my $rl (keys %$reverse_lookups) {
|
---|
1052 | $working_info->add_reverseinfo($rl,$oid);
|
---|
1053 | }
|
---|
1054 |
|
---|
1055 | # meta files not set in reverse entry, but need to set the metadata flag
|
---|
1056 | if (defined $doc_obj->get_meta_files()) {
|
---|
1057 | foreach my $meta_file_rec(@{$doc_obj->get_meta_files()}) {
|
---|
1058 | my $full_file = (ref $meta_file_rec eq "ARRAY") ? $meta_file_rec->[0] : $meta_file_rec;
|
---|
1059 | $working_info->set_meta_file_flag($full_file);
|
---|
1060 | }
|
---|
1061 | }
|
---|
1062 | }
|
---|
1063 |
|
---|
1064 | # This sub is called for every metadata.xml accepted for processing by by MetdataXMLPlugin
|
---|
1065 | # and adds an entry into archiveinf-src.db for that file in the form:
|
---|
1066 | # [@THISCOLLECTPATH@/import/metadata.xml]
|
---|
1067 | # <meta-file>1
|
---|
1068 | # This prevents blind reprocessing of the same old docs upon *incremental* building whenever
|
---|
1069 | # we encounter a default empty metadata.xml that has no actual <FileSet> content defined.
|
---|
1070 | sub add_metaxml_file_entry_to_archiveinfsrc {
|
---|
1071 | my $self = shift (@_);
|
---|
1072 | my ($full_file) = @_;
|
---|
1073 |
|
---|
1074 | if ($self->{'verbosity'} > 2) {
|
---|
1075 | my $outhandle = $self->{'output_handle'};
|
---|
1076 | print $outhandle " Adding metaxml file entry for full_file: $full_file\n";
|
---|
1077 | }
|
---|
1078 |
|
---|
1079 | my $working_info = $self->{'output_info'};
|
---|
1080 | $working_info->set_meta_file_flag($full_file);
|
---|
1081 | }
|
---|
1082 |
|
---|
1083 |
|
---|
1084 | sub set_sortmeta {
|
---|
1085 | my $self = shift (@_);
|
---|
1086 | my ($sortmeta, $removeprefix, $removesuffix) = @_;
|
---|
1087 |
|
---|
1088 | $self->{'sortmeta'} = $sortmeta;
|
---|
1089 | if (defined ($removeprefix) && $removeprefix ) {
|
---|
1090 | $removeprefix =~ s/^\^//; # don't need a leading ^
|
---|
1091 | $self->{'removeprefix'} = $removeprefix;
|
---|
1092 | }
|
---|
1093 | if (defined ($removesuffix) && $removesuffix) {
|
---|
1094 | $removesuffix =~ s/\$$//; # don't need a trailing $
|
---|
1095 | $self->{'removesuffix'} = $removesuffix;
|
---|
1096 | }
|
---|
1097 | }
|
---|
1098 |
|
---|
1099 |
|
---|
1100 |
|
---|
1101 | sub open_xslt_pipe
|
---|
1102 | {
|
---|
1103 | my $self = shift @_;
|
---|
1104 | my ($output_file_name, $xslt_file)=@_;
|
---|
1105 |
|
---|
1106 | return unless defined $xslt_file and $xslt_file ne "" and &FileUtils::fileExists($xslt_file);
|
---|
1107 |
|
---|
1108 | my $java_class_path = &FileUtils::filenameConcatenate($ENV{'GSDLHOME'},"bin","java","ApplyXSLT.jar");
|
---|
1109 |
|
---|
1110 | my $mapping_file_path = "";
|
---|
1111 |
|
---|
1112 | if ($ENV{'GSDLOS'} eq "windows"){
|
---|
1113 | $java_class_path .=";".&FileUtils::filenameConcatenate($ENV{'GSDLHOME'},"bin","java","xalan.jar");
|
---|
1114 | # this file:/// bit didn't work for me on windows XP
|
---|
1115 | #$xslt_file = "\"file:///".$xslt_file."\"";
|
---|
1116 | #$mapping_file_path = "\"file:///";
|
---|
1117 | }
|
---|
1118 | else{
|
---|
1119 | $java_class_path .=":".&FileUtils::filenameConcatenate($ENV{'GSDLHOME'},"bin","java","xalan.jar");
|
---|
1120 | }
|
---|
1121 |
|
---|
1122 |
|
---|
1123 | $java_class_path = "\"".$java_class_path."\"";
|
---|
1124 |
|
---|
1125 | my $cmd = "| java -cp $java_class_path org.nzdl.gsdl.ApplyXSLT -t \"$xslt_file\" ";
|
---|
1126 |
|
---|
1127 | if (defined $self->{'mapping_file'} and $self->{'mapping_file'} ne ""){
|
---|
1128 | my $mapping_file_path = "\"".$self->{'mapping_file'}."\"";
|
---|
1129 | $cmd .= "-m $mapping_file_path";
|
---|
1130 | }
|
---|
1131 |
|
---|
1132 | # The follow code is based around File GLOB
|
---|
1133 | # It has been commented out, and replaced with a filehandle variable approach
|
---|
1134 |
|
---|
1135 | # open(*XMLWRITER, $cmd)
|
---|
1136 | # or die "can't open pipe to xslt: $!";
|
---|
1137 | #
|
---|
1138 | # $self->{'xslt_writer'} = *XMLWRITER;
|
---|
1139 | #
|
---|
1140 | # print XMLWRITER "<?DocStart?>\n";
|
---|
1141 | # print XMLWRITER "$output_file_name\n";
|
---|
1142 |
|
---|
1143 | open(my $FH_XMLWRITER, $cmd)
|
---|
1144 | or die "can't open pipe to xslt: $!";
|
---|
1145 |
|
---|
1146 | $self->{'xslt_writer'} = $FH_XMLWRITER;
|
---|
1147 |
|
---|
1148 | print $FH_XMLWRITER "<?DocStart?>\n";
|
---|
1149 | print $FH_XMLWRITER "$output_file_name\n";
|
---|
1150 | }
|
---|
1151 |
|
---|
1152 |
|
---|
1153 | sub close_xslt_pipe
|
---|
1154 | {
|
---|
1155 | my $self = shift @_;
|
---|
1156 |
|
---|
1157 | return unless defined $self->{'xslt_writer'} ;
|
---|
1158 |
|
---|
1159 | my $xsltwriter = $self->{'xslt_writer'};
|
---|
1160 |
|
---|
1161 | print $xsltwriter "<?DocEnd?>\n";
|
---|
1162 | close($xsltwriter);
|
---|
1163 |
|
---|
1164 | undef $self->{'xslt_writer'};
|
---|
1165 | }
|
---|
1166 |
|
---|
1167 |
|
---|
1168 |
|
---|
1169 | #the subclass should implement this method if is_group method could return 1.
|
---|
1170 | sub close_group_output{
|
---|
1171 | my $self = shift (@_);
|
---|
1172 | }
|
---|
1173 |
|
---|
1174 | sub is_group {
|
---|
1175 | my $self = shift (@_);
|
---|
1176 | return 0;
|
---|
1177 | }
|
---|
1178 |
|
---|
1179 | my $dc_set = { Title => 1,
|
---|
1180 | Creator => 1,
|
---|
1181 | Subject => 1,
|
---|
1182 | Description => 1,
|
---|
1183 | Publisher => 1,
|
---|
1184 | Contributor => 1,
|
---|
1185 | Date => 1,
|
---|
1186 | Type => 1,
|
---|
1187 | Format => 1,
|
---|
1188 | Identifier => 1,
|
---|
1189 | Source => 1,
|
---|
1190 | Language => 1,
|
---|
1191 | Relation => 1,
|
---|
1192 | Coverage => 1,
|
---|
1193 | Rights => 1};
|
---|
1194 |
|
---|
1195 |
|
---|
1196 | # returns an XML representation of the dublin core metadata
|
---|
1197 | # if dc meta is not found, try ex meta
|
---|
1198 | # This method is not used by the DSpacePlugout, which has its
|
---|
1199 | # own method to save its dc metadata
|
---|
1200 | sub get_dc_metadata {
|
---|
1201 | my $self = shift(@_);
|
---|
1202 | my ($doc_obj, $section, $version) = @_;
|
---|
1203 |
|
---|
1204 | # build up string of dublin core metadata
|
---|
1205 | $section="" unless defined $section;
|
---|
1206 |
|
---|
1207 | my $section_ptr = $doc_obj->_lookup_section($section);
|
---|
1208 | return "" unless defined $section_ptr;
|
---|
1209 |
|
---|
1210 |
|
---|
1211 | my $explicit_dc = {};
|
---|
1212 | my $explicit_ex_dc = {};
|
---|
1213 | my $explicit_ex = {};
|
---|
1214 |
|
---|
1215 | my $all_text="";
|
---|
1216 |
|
---|
1217 | # We want high quality dc metadata to go in first, so we store all the
|
---|
1218 | # assigned dc.* values first. Then, for all those dc metadata names in
|
---|
1219 | # the official dc set that are as yet unassigned, we look to see whether
|
---|
1220 | # embedded ex.dc.* metadata has defined some values for them. If not,
|
---|
1221 | # then for the same missing dc metadata names, we look in ex metadata.
|
---|
1222 |
|
---|
1223 | foreach my $data (@{$section_ptr->{'metadata'}}){
|
---|
1224 | my $escaped_value = &docprint::escape_text($data->[1]);
|
---|
1225 | if ($data->[0]=~ m/^dc\./) {
|
---|
1226 | $data->[0] =~ tr/[A-Z]/[a-z]/;
|
---|
1227 |
|
---|
1228 | $data->[0] =~ m/^dc\.(.*)/;
|
---|
1229 | my $dc_element = $1;
|
---|
1230 |
|
---|
1231 | if (!defined $explicit_dc->{$dc_element}) {
|
---|
1232 | $explicit_dc->{$dc_element} = [];
|
---|
1233 | }
|
---|
1234 | push(@{$explicit_dc->{$dc_element}},$escaped_value);
|
---|
1235 |
|
---|
1236 | if (defined $version && ($version eq "oai_dc")) {
|
---|
1237 | $all_text .= " <dc:$dc_element>$escaped_value</dc:$dc_element>\n";
|
---|
1238 | }
|
---|
1239 | else {
|
---|
1240 | # qualifier???
|
---|
1241 | $all_text .= ' <dcvalue element="'. $dc_element.'">'. $escaped_value. "</dcvalue>\n";
|
---|
1242 | }
|
---|
1243 |
|
---|
1244 | } elsif ($data->[0]=~ m/^ex\.dc\./) { # now look through ex.dc.* to fill in as yet unassigned fields in dc metaset
|
---|
1245 | $data->[0] =~ m/^ex\.dc\.(.*)/;
|
---|
1246 | my $ex_dc_element = $1;
|
---|
1247 | my $lc_ex_dc_element = lc($ex_dc_element);
|
---|
1248 |
|
---|
1249 | # only store the ex.dc value for this dc metaname if no dc.* was assigned for it
|
---|
1250 | if (defined $dc_set->{$ex_dc_element}) {
|
---|
1251 | if (!defined $explicit_ex_dc->{$lc_ex_dc_element}) {
|
---|
1252 | $explicit_ex_dc->{$lc_ex_dc_element} = [];
|
---|
1253 | }
|
---|
1254 | push(@{$explicit_ex_dc->{$lc_ex_dc_element}},$escaped_value);
|
---|
1255 | }
|
---|
1256 | }
|
---|
1257 | elsif (($data->[0] =~ m/^ex\./) || ($data->[0] !~ m/\./)) { # look through ex. meta (incl. meta without prefix)
|
---|
1258 | $data->[0] =~ m/^(ex\.)?(.*)/;
|
---|
1259 | my $ex_element = $2;
|
---|
1260 | my $lc_ex_element = lc($ex_element);
|
---|
1261 |
|
---|
1262 | if (defined $dc_set->{$ex_element}) {
|
---|
1263 | if (!defined $explicit_ex->{$lc_ex_element}) {
|
---|
1264 | $explicit_ex->{$lc_ex_element} = [];
|
---|
1265 | }
|
---|
1266 | push(@{$explicit_ex->{$lc_ex_element}},$escaped_value);
|
---|
1267 | }
|
---|
1268 | }
|
---|
1269 | }
|
---|
1270 |
|
---|
1271 | # go through dc_set and for any element *not* defined in explicit_dc
|
---|
1272 | # that does exist in explicit_ex, add it in as metadata
|
---|
1273 | foreach my $k ( keys %$dc_set ) {
|
---|
1274 | my $lc_k = lc($k);
|
---|
1275 |
|
---|
1276 | if (!defined $explicit_dc->{$lc_k}) {
|
---|
1277 | # try to find if ex.dc.* defines this dc.* meta,
|
---|
1278 | # if not, then look for whether there's an ex.* equivalent
|
---|
1279 |
|
---|
1280 | if (defined $explicit_ex_dc->{$lc_k}) {
|
---|
1281 | foreach my $v (@{$explicit_ex_dc->{$lc_k}}) {
|
---|
1282 | my $dc_element = $lc_k;
|
---|
1283 | my $escaped_value = $v;
|
---|
1284 |
|
---|
1285 | if (defined $version && ($version eq "oai_dc")) {
|
---|
1286 | $all_text .= " <dc:$dc_element>$escaped_value</dc:$dc_element>\n";
|
---|
1287 | }
|
---|
1288 | else {
|
---|
1289 | $all_text .= ' <dcvalue element="'. $dc_element.'">'. $escaped_value. "</dcvalue>\n";
|
---|
1290 | }
|
---|
1291 | }
|
---|
1292 | } elsif (defined $explicit_ex->{$lc_k}) {
|
---|
1293 | foreach my $v (@{$explicit_ex->{$lc_k}}) {
|
---|
1294 | my $dc_element = $lc_k;
|
---|
1295 | my $escaped_value = $v;
|
---|
1296 |
|
---|
1297 | if (defined $version && ($version eq "oai_dc")) {
|
---|
1298 | $all_text .= " <dc:$dc_element>$escaped_value</dc:$dc_element>\n";
|
---|
1299 | }
|
---|
1300 | else {
|
---|
1301 | $all_text .= ' <dcvalue element="'. $dc_element.'">'. $escaped_value. "</dcvalue>\n";
|
---|
1302 | }
|
---|
1303 | }
|
---|
1304 | }
|
---|
1305 | }
|
---|
1306 | }
|
---|
1307 |
|
---|
1308 | if ($all_text eq "") {
|
---|
1309 | $all_text .= " There is no Dublin Core metatdata in this document\n";
|
---|
1310 | }
|
---|
1311 | $all_text =~ s/[\x00-\x09\x0B\x0C\x0E-\x1F]//g;
|
---|
1312 |
|
---|
1313 | return $all_text;
|
---|
1314 | }
|
---|
1315 |
|
---|
1316 | # Build up dublin_core metadata. Priority given to dc.* over ex.*
|
---|
1317 | # This method was apparently added by Jeffrey and committed by Shaoqun.
|
---|
1318 | # But we don't know why it was added, so not using it anymore.
|
---|
1319 | sub new_get_dc_metadata {
|
---|
1320 |
|
---|
1321 | my $self = shift(@_);
|
---|
1322 | my ($doc_obj, $section, $version) = @_;
|
---|
1323 |
|
---|
1324 | # build up string of dublin core metadata
|
---|
1325 | $section="" unless defined $section;
|
---|
1326 |
|
---|
1327 | my $section_ptr=$doc_obj->_lookup_section($section);
|
---|
1328 | return "" unless defined $section_ptr;
|
---|
1329 |
|
---|
1330 | my $all_text = "";
|
---|
1331 | foreach my $data (@{$section_ptr->{'metadata'}}){
|
---|
1332 | my $escaped_value = &docprint::escape_text($data->[1]);
|
---|
1333 | my $dc_element = $data->[0];
|
---|
1334 |
|
---|
1335 | my @array = split('\.',$dc_element);
|
---|
1336 | my ($type,$name);
|
---|
1337 |
|
---|
1338 | if(defined $array[1])
|
---|
1339 | {
|
---|
1340 | $type = $array[0];
|
---|
1341 | $name = $array[1];
|
---|
1342 | }
|
---|
1343 | else
|
---|
1344 | {
|
---|
1345 | $type = "ex";
|
---|
1346 | $name = $array[0];
|
---|
1347 | }
|
---|
1348 |
|
---|
1349 | $all_text .= ' <Metadata Type="'. $type.'" Name="'.$name.'">'. $escaped_value. "</Metadata>\n";
|
---|
1350 | }
|
---|
1351 | return $all_text;
|
---|
1352 | }
|
---|
1353 |
|
---|
1354 |
|
---|
1355 | 1;
|
---|