1 | ###########################################################################
|
---|
2 | #
|
---|
3 | # BasePlugout.pm -- base class for all the plugout modules
|
---|
4 | # A component of the Greenstone digital library software
|
---|
5 | # from the New Zealand Digital Library Project at the
|
---|
6 | # University of Waikato, New Zealand.
|
---|
7 | #
|
---|
8 | # Copyright (C) 2006 New Zealand Digital Library Project
|
---|
9 | #
|
---|
10 | # This program is free software; you can redistribute it and/or modify
|
---|
11 | # it under the terms of the GNU General Public License as published by
|
---|
12 | # the Free Software Foundation; either version 2 of the License, or
|
---|
13 | # (at your option) any later version.
|
---|
14 | #
|
---|
15 | # This program is distributed in the hope that it will be useful,
|
---|
16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
18 | # GNU General Public License for more details.
|
---|
19 | #
|
---|
20 | # You should have received a copy of the GNU General Public License
|
---|
21 | # along with this program; if not, write to the Free Software
|
---|
22 | # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
---|
23 | #
|
---|
24 | ###########################################################################
|
---|
25 |
|
---|
26 | package BasePlugout;
|
---|
27 |
|
---|
28 | eval {require bytes};
|
---|
29 |
|
---|
30 | use strict;
|
---|
31 | no strict 'subs';
|
---|
32 | no strict 'refs';
|
---|
33 |
|
---|
34 | use dbutil;
|
---|
35 | use gsprintf 'gsprintf';
|
---|
36 | use printusage;
|
---|
37 | use parse2;
|
---|
38 | use util;
|
---|
39 | use FileUtils;
|
---|
40 | use sorttools;
|
---|
41 |
|
---|
42 | # suppress the annoying "subroutine redefined" warning that various
|
---|
43 | # gets cause under perl 5.6
|
---|
44 | $SIG{__WARN__} = sub {warn($_[0]) unless ($_[0] =~ /Subroutine\s+\S+\sredefined/)};
|
---|
45 |
|
---|
46 | my $arguments = [
|
---|
47 | { 'name' => "site",
|
---|
48 | 'desc' => "{BasPlugout.site}",
|
---|
49 | 'type' => "string",
|
---|
50 | 'reqd' => "no",
|
---|
51 | 'hiddengli' => "yes" },
|
---|
52 | { 'name' => "xslt_file",
|
---|
53 | 'desc' => "{BasPlugout.xslt_file}",
|
---|
54 | 'type' => "string",
|
---|
55 | 'reqd' => "no",
|
---|
56 | 'deft' => "",
|
---|
57 | 'hiddengli' => "no"},
|
---|
58 | { 'name' => "subdir_split_length",
|
---|
59 | 'desc' => "{BasPlugout.subdir_split_length}",
|
---|
60 | 'type' => "int",
|
---|
61 | 'reqd' => "no",
|
---|
62 | 'deft' => "8",
|
---|
63 | 'hiddengli' => "no"},
|
---|
64 | { 'name' => "subdir_hash_prefix",
|
---|
65 | 'desc' => "{BasPlugout.subdir_hash_prefix}",
|
---|
66 | 'type' => "flag",
|
---|
67 | 'reqd' => "no",
|
---|
68 | 'deft' => "0",
|
---|
69 | 'hiddengli' => "no"},
|
---|
70 | { 'name' => "gzip_output",
|
---|
71 | 'desc' => "{BasPlugout.gzip_output}",
|
---|
72 | 'type' => "flag",
|
---|
73 | 'reqd' => "no",
|
---|
74 | 'hiddengli' => "no"},
|
---|
75 | { 'name' => "verbosity",
|
---|
76 | 'desc' => "{BasPlugout.verbosity}",
|
---|
77 | 'type' => "int",
|
---|
78 | 'deft' => "0",
|
---|
79 | 'reqd' => "no",
|
---|
80 | 'hiddengli' => "no"},
|
---|
81 | { 'name' => "output_info",
|
---|
82 | 'desc' => "{BasPlugout.output_info}",
|
---|
83 | 'type' => "string",
|
---|
84 | 'reqd' => "yes",
|
---|
85 | 'hiddengli' => "yes"},
|
---|
86 | { 'name' => "output_handle",
|
---|
87 | 'desc' => "{BasPlugout.output_handle}",
|
---|
88 | 'type' => "string",
|
---|
89 | 'deft' => 'STDERR',
|
---|
90 | 'reqd' => "no",
|
---|
91 | 'hiddengli' => "yes"},
|
---|
92 | { 'name' => "debug",
|
---|
93 | 'desc' => "{BasPlugout.debug}",
|
---|
94 | 'type' => "flag",
|
---|
95 | 'reqd' => "no",
|
---|
96 | 'hiddengli' => "yes"},
|
---|
97 | { 'name' => 'no_rss',
|
---|
98 | 'desc' => "{BasPlugout.no_rss}",
|
---|
99 | 'type' => 'flag',
|
---|
100 | 'reqd' => 'no',
|
---|
101 | 'hiddengli' => 'yes'},
|
---|
102 | { 'name' => 'rss_title',
|
---|
103 | 'desc' => "{BasPlugout.rss_title}",
|
---|
104 | 'type' => 'string',
|
---|
105 | 'deft' => 'dc.Title',
|
---|
106 | 'reqd' => 'no',
|
---|
107 | 'hiddengli' => 'yes'},
|
---|
108 | { 'name' => "no_auxiliary_databases",
|
---|
109 | 'desc' => "{BasPlugout.no_auxiliary_databases}",
|
---|
110 | 'type' => "flag",
|
---|
111 | 'reqd' => "no",
|
---|
112 | 'hiddengli' => "yes"}
|
---|
113 |
|
---|
114 | ];
|
---|
115 |
|
---|
116 | my $options = { 'name' => "BasePlugout",
|
---|
117 | 'desc' => "{BasPlugout.desc}",
|
---|
118 | 'abstract' => "yes",
|
---|
119 | 'inherits' => "no",
|
---|
120 | 'args' => $arguments};
|
---|
121 |
|
---|
122 | sub new
|
---|
123 | {
|
---|
124 | my $class = shift (@_);
|
---|
125 |
|
---|
126 | my ($plugoutlist,$args,$hashArgOptLists) = @_;
|
---|
127 | push(@$plugoutlist, $class);
|
---|
128 |
|
---|
129 | my $plugout_name = (defined $plugoutlist->[0]) ? $plugoutlist->[0] : $class;
|
---|
130 |
|
---|
131 | push(@{$hashArgOptLists->{"ArgList"}},@{$arguments});
|
---|
132 | push(@{$hashArgOptLists->{"OptList"}},$options);
|
---|
133 |
|
---|
134 | my $self = {};
|
---|
135 | $self->{'plugout_type'} = $class;
|
---|
136 | $self->{'option_list'} = $hashArgOptLists->{"OptList"};
|
---|
137 | $self->{"info_only"} = 0;
|
---|
138 |
|
---|
139 | # Check if gsdlinfo is in the argument list or not - if it is, don't parse
|
---|
140 | # the args, just return the object.
|
---|
141 | #print STDERR "#### " . join(",", @${args}) . "\n\n";
|
---|
142 | my $v=0;
|
---|
143 | foreach my $strArg (@{$args})
|
---|
144 | {
|
---|
145 | if(defined $strArg) {
|
---|
146 | if($strArg eq "-gsdlinfo")
|
---|
147 | {
|
---|
148 | $self->{"info_only"} = 1;
|
---|
149 | return bless $self, $class;
|
---|
150 | }
|
---|
151 | elsif ($strArg eq "-site") {
|
---|
152 | $v = $strArg;
|
---|
153 | }
|
---|
154 | elsif($v eq "-site") {
|
---|
155 | $self->{'site'} = $strArg;
|
---|
156 | }
|
---|
157 | }
|
---|
158 | }
|
---|
159 |
|
---|
160 | delete $self->{"info_only"};
|
---|
161 |
|
---|
162 | if(parse2::parse($args,$hashArgOptLists->{"ArgList"},$self) == -1)
|
---|
163 | {
|
---|
164 | my $classTempClass = bless $self, $class;
|
---|
165 | print STDERR "<BadPlugout d=$plugout_name>\n";
|
---|
166 | &gsprintf(STDERR, "\n{BasPlugout.bad_general_option}\n", $plugout_name);
|
---|
167 | $classTempClass->print_txt_usage(""); # Use default resource bundle
|
---|
168 | die "\n";
|
---|
169 | }
|
---|
170 |
|
---|
171 |
|
---|
172 | if(defined $self->{'xslt_file'} && $self->{'xslt_file'} ne "")
|
---|
173 | {
|
---|
174 | my $full_file_path = &util::locate_config_file($self->{'xslt_file'});
|
---|
175 | if (!defined $full_file_path) {
|
---|
176 | print STDERR "Can not find $self->{'xslt_file'}, please make sure you have supplied the correct file path or put the file into the collection's etc or greenstone's etc folder\n";
|
---|
177 | die "\n";
|
---|
178 | }
|
---|
179 | $self->{'xslt_file'} = $full_file_path;
|
---|
180 | }
|
---|
181 |
|
---|
182 | # for group processing
|
---|
183 | $self->{'gs_count'} = 0;
|
---|
184 | $self->{'group_position'} = 1;
|
---|
185 |
|
---|
186 | $self->{'keep_import_structure'} = 0;
|
---|
187 |
|
---|
188 | $self->{'generate_databases'} = 1;
|
---|
189 | if ($self->{'no_auxiliary_databases'}) {
|
---|
190 | $self->{'generate_databases'} = 0;
|
---|
191 | }
|
---|
192 | undef $self->{'no_auxiliary_databases'};
|
---|
193 |
|
---|
194 | &sorttools::setup_custom_sort();
|
---|
195 | return bless $self, $class;
|
---|
196 |
|
---|
197 | }
|
---|
198 |
|
---|
199 | # implement this in subclass if you want to do some initialization after
|
---|
200 | # loading and setting parameters, and before processing the documents.
|
---|
201 | sub begin {
|
---|
202 |
|
---|
203 | my $self= shift (@_);
|
---|
204 |
|
---|
205 | }
|
---|
206 | # implement in subclasses if it needs some non-group related cleanup (post-group cleanup
|
---|
207 | # Like begin(), end() is also called by inexport.pm
|
---|
208 | sub end {
|
---|
209 | my $self= shift (@_);
|
---|
210 |
|
---|
211 | }
|
---|
212 | sub print_xml_usage
|
---|
213 | {
|
---|
214 | my $self = shift(@_);
|
---|
215 | my $header = shift(@_);
|
---|
216 | my $high_level_information_only = shift(@_);
|
---|
217 |
|
---|
218 | # XML output is always in UTF-8
|
---|
219 | gsprintf::output_strings_in_UTF8;
|
---|
220 |
|
---|
221 | if ($header) {
|
---|
222 | &PrintUsage::print_xml_header("plugout");
|
---|
223 | }
|
---|
224 | $self->print_xml($high_level_information_only);
|
---|
225 | }
|
---|
226 |
|
---|
227 |
|
---|
228 | sub print_xml
|
---|
229 | {
|
---|
230 | my $self = shift(@_);
|
---|
231 | my $high_level_information_only = shift(@_);
|
---|
232 |
|
---|
233 | my $optionlistref = $self->{'option_list'};
|
---|
234 | my @optionlist = @$optionlistref;
|
---|
235 | my $plugoutoptions = shift(@$optionlistref);
|
---|
236 | return if (!defined($plugoutoptions));
|
---|
237 |
|
---|
238 | gsprintf(STDERR, "<PlugoutInfo>\n");
|
---|
239 | gsprintf(STDERR, " <Name>$plugoutoptions->{'name'}</Name>\n");
|
---|
240 | my $desc = gsprintf::lookup_string($plugoutoptions->{'desc'});
|
---|
241 | $desc =~ s/</&lt;/g; # doubly escaped
|
---|
242 | $desc =~ s/>/&gt;/g;
|
---|
243 | gsprintf(STDERR, " <Desc>$desc</Desc>\n");
|
---|
244 | gsprintf(STDERR, " <Abstract>$plugoutoptions->{'abstract'}</Abstract>\n");
|
---|
245 | gsprintf(STDERR, " <Inherits>$plugoutoptions->{'inherits'}</Inherits>\n");
|
---|
246 | unless (defined($high_level_information_only)) {
|
---|
247 | gsprintf(STDERR, " <Arguments>\n");
|
---|
248 | if (defined($plugoutoptions->{'args'})) {
|
---|
249 | &PrintUsage::print_options_xml($plugoutoptions->{'args'});
|
---|
250 | }
|
---|
251 | gsprintf(STDERR, " </Arguments>\n");
|
---|
252 |
|
---|
253 | # Recurse up the plugout hierarchy
|
---|
254 | $self->print_xml();
|
---|
255 | }
|
---|
256 | gsprintf(STDERR, "</PlugoutInfo>\n");
|
---|
257 | }
|
---|
258 |
|
---|
259 |
|
---|
260 | sub print_txt_usage
|
---|
261 | {
|
---|
262 | my $self = shift(@_);
|
---|
263 |
|
---|
264 | # Print the usage message for a plugout (recursively)
|
---|
265 | my $descoffset = $self->determine_description_offset(0);
|
---|
266 | $self->print_plugout_usage($descoffset, 1);
|
---|
267 | }
|
---|
268 |
|
---|
269 | sub determine_description_offset
|
---|
270 | {
|
---|
271 | my $self = shift(@_);
|
---|
272 | my $maxoffset = shift(@_);
|
---|
273 |
|
---|
274 | my $optionlistref = $self->{'option_list'};
|
---|
275 | my @optionlist = @$optionlistref;
|
---|
276 | my $plugoutoptions = pop(@$optionlistref);
|
---|
277 | return $maxoffset if (!defined($plugoutoptions));
|
---|
278 |
|
---|
279 | # Find the length of the longest option string of this download
|
---|
280 | my $plugoutargs = $plugoutoptions->{'args'};
|
---|
281 | if (defined($plugoutargs)) {
|
---|
282 | my $longest = &PrintUsage::find_longest_option_string($plugoutargs);
|
---|
283 | if ($longest > $maxoffset) {
|
---|
284 | $maxoffset = $longest;
|
---|
285 | }
|
---|
286 | }
|
---|
287 |
|
---|
288 | # Recurse up the download hierarchy
|
---|
289 | $maxoffset = $self->determine_description_offset($maxoffset);
|
---|
290 | $self->{'option_list'} = \@optionlist;
|
---|
291 | return $maxoffset;
|
---|
292 | }
|
---|
293 |
|
---|
294 |
|
---|
295 | sub print_plugout_usage
|
---|
296 | {
|
---|
297 | my $self = shift(@_);
|
---|
298 | my $descoffset = shift(@_);
|
---|
299 | my $isleafclass = shift(@_);
|
---|
300 |
|
---|
301 | my $optionlistref = $self->{'option_list'};
|
---|
302 | my @optionlist = @$optionlistref;
|
---|
303 | my $plugoutoptions = shift(@$optionlistref);
|
---|
304 | return if (!defined($plugoutoptions));
|
---|
305 |
|
---|
306 | my $plugoutname = $plugoutoptions->{'name'};
|
---|
307 | my $plugoutargs = $plugoutoptions->{'args'};
|
---|
308 | my $plugoutdesc = $plugoutoptions->{'desc'};
|
---|
309 |
|
---|
310 | # Produce the usage information using the data structure above
|
---|
311 | if ($isleafclass) {
|
---|
312 | if (defined($plugoutdesc)) {
|
---|
313 | gsprintf(STDERR, "$plugoutdesc\n\n");
|
---|
314 | }
|
---|
315 | gsprintf(STDERR, " {common.usage}: plugout $plugoutname [{common.options}]\n\n");
|
---|
316 | }
|
---|
317 |
|
---|
318 | # Display the download options, if there are some
|
---|
319 | if (defined($plugoutargs)) {
|
---|
320 | # Calculate the column offset of the option descriptions
|
---|
321 | my $optiondescoffset = $descoffset + 2; # 2 spaces between options & descriptions
|
---|
322 |
|
---|
323 | if ($isleafclass) {
|
---|
324 | gsprintf(STDERR, " {common.specific_options}:\n");
|
---|
325 | }
|
---|
326 | else {
|
---|
327 | gsprintf(STDERR, " {common.general_options}:\n", $plugoutname);
|
---|
328 | }
|
---|
329 |
|
---|
330 | # Display the download options
|
---|
331 | &PrintUsage::print_options_txt($plugoutargs, $optiondescoffset);
|
---|
332 | }
|
---|
333 |
|
---|
334 | # Recurse up the download hierarchy
|
---|
335 | $self->print_plugout_usage($descoffset, 0);
|
---|
336 | $self->{'option_list'} = \@optionlist;
|
---|
337 | }
|
---|
338 |
|
---|
339 |
|
---|
340 | sub error
|
---|
341 | {
|
---|
342 | my ($strFunctionName,$strError) = @_;
|
---|
343 | {
|
---|
344 | print "Error occoured in BasePlugout.pm\n".
|
---|
345 | "In Function: ".$strFunctionName."\n".
|
---|
346 | "Error Message: ".$strError."\n";
|
---|
347 | exit(-1);
|
---|
348 | }
|
---|
349 | }
|
---|
350 |
|
---|
351 |
|
---|
352 | # OIDtype may be "hash" or "hash_on_full_filename" or "incremental" or "filename" or "dirname" or "full_filename" or "assigned"
|
---|
353 | sub set_OIDtype {
|
---|
354 | my $self = shift (@_);
|
---|
355 | my ($type, $metadata) = @_;
|
---|
356 |
|
---|
357 | if ($type =~ /^(hash|hash_on_full_filename|incremental|filename|dirname|full_filename|assigned)$/) {
|
---|
358 | $self->{'OIDtype'} = $type;
|
---|
359 | } else {
|
---|
360 | $self->{'OIDtype'} = "hash";
|
---|
361 | }
|
---|
362 | if ($type =~ /^assigned$/) {
|
---|
363 | if (defined $metadata) {
|
---|
364 | $self->{'OIDmetadata'} = $metadata;
|
---|
365 | } else {
|
---|
366 | $self->{'OIDmetadata'} = "dc.Identifier";
|
---|
367 | }
|
---|
368 | }
|
---|
369 | }
|
---|
370 |
|
---|
371 | sub set_output_dir
|
---|
372 | {
|
---|
373 | my $self = shift @_;
|
---|
374 | my ($output_dir) = @_;
|
---|
375 |
|
---|
376 | $self->{'output_dir'} = $output_dir;
|
---|
377 | }
|
---|
378 |
|
---|
379 | sub setoutputdir
|
---|
380 | {
|
---|
381 | my $self = shift @_;
|
---|
382 | my ($output_dir) = @_;
|
---|
383 |
|
---|
384 | $self->{'output_dir'} = $output_dir;
|
---|
385 | }
|
---|
386 |
|
---|
387 | sub get_output_dir
|
---|
388 | {
|
---|
389 | my $self = shift (@_);
|
---|
390 |
|
---|
391 | return $self->{'output_dir'};
|
---|
392 | }
|
---|
393 |
|
---|
394 | sub getoutputdir
|
---|
395 | {
|
---|
396 | my $self = shift (@_);
|
---|
397 |
|
---|
398 | return $self->{'output_dir'};
|
---|
399 | }
|
---|
400 |
|
---|
401 | sub getoutputinfo
|
---|
402 | {
|
---|
403 | my $self = shift (@_);
|
---|
404 |
|
---|
405 | return $self->{'output_info'};
|
---|
406 | }
|
---|
407 |
|
---|
408 |
|
---|
409 | sub get_output_handler
|
---|
410 | {
|
---|
411 | my $self = shift (@_);
|
---|
412 |
|
---|
413 | my ($output_file_name) = @_;
|
---|
414 |
|
---|
415 | my $fh;
|
---|
416 | &FileUtils::openFileHandle($output_file_name, '>', \$fh) or die('Can not open a file handler for: ' . $output_file_name . "\n");
|
---|
417 |
|
---|
418 | return $fh;
|
---|
419 | }
|
---|
420 |
|
---|
421 | sub release_output_handler
|
---|
422 | {
|
---|
423 | my $self = shift (@_);
|
---|
424 | my ($outhandler) = @_;
|
---|
425 |
|
---|
426 | close($outhandler);
|
---|
427 |
|
---|
428 | }
|
---|
429 |
|
---|
430 | sub output_xml_header {
|
---|
431 | my $self = shift (@_);
|
---|
432 | my ($handle,$docroot,$nondoctype) = @_;
|
---|
433 |
|
---|
434 |
|
---|
435 | #print $handle '<?xml version="1.0" encoding="UTF-8" standalone="no"?>' . "\n";
|
---|
436 |
|
---|
437 | #For Dspace must be UTF in lower case
|
---|
438 | print $handle '<?xml version="1.0" encoding="utf-8" standalone="no"?>' . "\n";
|
---|
439 |
|
---|
440 | if (!defined $nondoctype){
|
---|
441 | my $doctype = (defined $docroot) ? $docroot : "Section";
|
---|
442 |
|
---|
443 | # Used to be '<!DOCTYPE Archive SYSTEM ...'
|
---|
444 |
|
---|
445 | print $handle "<!DOCTYPE $doctype SYSTEM \"http://greenstone.org/dtd/Archive/1.0/Archive.dtd\">\n";
|
---|
446 | }
|
---|
447 |
|
---|
448 | print $handle "<$docroot>\n" if defined $docroot;
|
---|
449 | }
|
---|
450 |
|
---|
451 | sub output_xml_footer {
|
---|
452 | my $self = shift (@_);
|
---|
453 | my ($handle,$docroot) = @_;
|
---|
454 | print $handle "</$docroot>\n" if defined $docroot;
|
---|
455 | }
|
---|
456 |
|
---|
457 |
|
---|
458 | sub output_general_xml_header
|
---|
459 | {
|
---|
460 | my $self = shift (@_);
|
---|
461 | my ($handle,$docroot,$opt_attributes,$opt_dtd, $opt_doctype) = @_;
|
---|
462 |
|
---|
463 | print $handle '<?xml version="1.0" encoding="utf-8" standalone="no"?>' . "\n";
|
---|
464 |
|
---|
465 | if (defined $opt_dtd) {
|
---|
466 | my $doctype = (defined $opt_doctype) ? $opt_doctype : $docroot;
|
---|
467 | print $handle "<!DOCTYPE $doctype SYSTEM \"$opt_dtd\">\n";
|
---|
468 | }
|
---|
469 |
|
---|
470 | if (defined $docroot) {
|
---|
471 | my $full_docroot = $docroot;
|
---|
472 | if (defined $opt_attributes) {
|
---|
473 | $full_docroot .= " $opt_attributes";
|
---|
474 | }
|
---|
475 |
|
---|
476 | print $handle "<$full_docroot>\n"
|
---|
477 | }
|
---|
478 | }
|
---|
479 |
|
---|
480 | sub output_general_xml_footer
|
---|
481 | {
|
---|
482 | output_xml_footer(@_);
|
---|
483 | }
|
---|
484 |
|
---|
485 | # This is called by the plugins after read_into_doc_obj generates the doc_obj.
|
---|
486 | sub process {
|
---|
487 | my $self = shift (@_);
|
---|
488 | my ($doc_obj) = @_;
|
---|
489 |
|
---|
490 | my $output_info = $self->{'output_info'};
|
---|
491 | return if (!defined $output_info);
|
---|
492 |
|
---|
493 | # for OAI purposes
|
---|
494 | $doc_obj->set_lastmodified();
|
---|
495 | $doc_obj->set_oailastmodified();
|
---|
496 |
|
---|
497 | # find out which directory to save to
|
---|
498 | my $doc_dir = "";
|
---|
499 | if ($self->is_group()) {
|
---|
500 | $doc_dir = $self->get_group_doc_dir($doc_obj);
|
---|
501 | } else {
|
---|
502 | $doc_dir = $self->get_doc_dir($doc_obj);
|
---|
503 | }
|
---|
504 |
|
---|
505 | ##############################
|
---|
506 | # call subclass' saveas method
|
---|
507 | ##############################
|
---|
508 | $self->saveas($doc_obj,$doc_dir);
|
---|
509 |
|
---|
510 | # write out data to archiveinf-doc.db
|
---|
511 | if ($self->{'generate_databases'}) {
|
---|
512 | $self->archiveinf_db($doc_obj);
|
---|
513 | }
|
---|
514 | if ($self->is_group()) {
|
---|
515 | $self->{'gs_count'}++; # do we want this for all cases?
|
---|
516 | $self->{'group_position'}++;
|
---|
517 | }
|
---|
518 | }
|
---|
519 |
|
---|
520 | sub store_output_info_reference {
|
---|
521 | my $self = shift (@_);
|
---|
522 | my ($doc_obj) = @_;
|
---|
523 |
|
---|
524 | my $output_info = $self->{'output_info'};
|
---|
525 | my $metaname = $self->{'sortmeta'};
|
---|
526 |
|
---|
527 | my $group_position;
|
---|
528 | if ($self->is_group()) {
|
---|
529 | $group_position = $self->{'group_position'};
|
---|
530 | }
|
---|
531 | if (!defined $metaname || $metaname !~ /\S/) {
|
---|
532 | my $OID = $doc_obj->get_OID();
|
---|
533 | $output_info->add_info($OID,$self->{'short_doc_file'}, undef, "", $group_position);
|
---|
534 | return;
|
---|
535 | }
|
---|
536 |
|
---|
537 | if ($metaname eq "OID") { # sort by OID
|
---|
538 | my $OID = $doc_obj->get_OID();
|
---|
539 | $output_info->add_info($OID,$self->{'short_doc_file'}, undef, $OID, undef);
|
---|
540 | return;
|
---|
541 | }
|
---|
542 |
|
---|
543 | my $metadata = "";
|
---|
544 | my $top_section = $doc_obj->get_top_section();
|
---|
545 |
|
---|
546 | my @commameta_list = split(/,/, $metaname);
|
---|
547 | foreach my $cmn (@commameta_list) {
|
---|
548 | my $meta = $doc_obj->get_metadata_element($top_section, $cmn);
|
---|
549 | if ($meta) {
|
---|
550 | # do remove prefix/suffix - this will apply to all values
|
---|
551 | $meta =~ s/^$self->{'removeprefix'}// if defined $self->{'removeprefix'};
|
---|
552 | $meta =~ s/$self->{'removesuffix'}$// if defined $self->{'removesuffix'};
|
---|
553 | $meta = &sorttools::format_metadata_for_sorting($cmn, $meta, $doc_obj);
|
---|
554 | $metadata .= $meta if ($meta);
|
---|
555 | }
|
---|
556 | }
|
---|
557 |
|
---|
558 | # store reference in the output_info
|
---|
559 | $output_info->add_info($doc_obj->get_OID(),$self->{'short_doc_file'}, undef, $metadata,undef);
|
---|
560 |
|
---|
561 | }
|
---|
562 |
|
---|
563 |
|
---|
564 |
|
---|
565 | sub saveas {
|
---|
566 | my $self = shift (@_);
|
---|
567 | my ($doc_obj, $doc_dir) = @_;
|
---|
568 |
|
---|
569 | die "Basplug::saveas function must be implemented in sub classes\n";
|
---|
570 | }
|
---|
571 |
|
---|
572 | sub get_group_doc_dir {
|
---|
573 | my $self = shift (@_);
|
---|
574 | my ($doc_obj) = @_;
|
---|
575 |
|
---|
576 | my $outhandle = $self->{'output_handle'};
|
---|
577 | my $OID = $doc_obj->get_OID();
|
---|
578 | $OID = "NULL" unless defined $OID;
|
---|
579 |
|
---|
580 | my $groupsize = $self->{'group_size'};
|
---|
581 | my $gs_count = $self->{'gs_count'};
|
---|
582 | my $open_new_file = (($gs_count % $groupsize)==0);
|
---|
583 |
|
---|
584 | my $doc_dir;
|
---|
585 |
|
---|
586 | if (!$open_new_file && scalar(@{$doc_obj->get_assoc_files()})>0) {
|
---|
587 | # if we have some assoc files, then we will need to start a new file
|
---|
588 | if ($self->{'verbosity'} > 2) {
|
---|
589 | print $outhandle " Starting a archives folder for $OID as it has associated files\n";
|
---|
590 | }
|
---|
591 | $open_new_file = 1;
|
---|
592 | }
|
---|
593 |
|
---|
594 | # opening a new file
|
---|
595 | if (($open_new_file) || !defined($self->{'gs_doc_dir'})) {
|
---|
596 | # first we close off the old output
|
---|
597 | if ($gs_count>0)
|
---|
598 | {
|
---|
599 | return if (!$self->close_group_output());
|
---|
600 | }
|
---|
601 |
|
---|
602 | # this will create the directory
|
---|
603 | $doc_dir = $self->get_doc_dir ($doc_obj);
|
---|
604 | $self->{'new_doc_dir'} = 1;
|
---|
605 | $self->{'gs_doc_dir'} = $doc_dir;
|
---|
606 | $self->{'group_position'} = 1;
|
---|
607 | }
|
---|
608 | else {
|
---|
609 | $doc_dir = $self->{'gs_doc_dir'};
|
---|
610 | $self->{'new_doc_dir'} = 0;
|
---|
611 | }
|
---|
612 | return $doc_dir;
|
---|
613 |
|
---|
614 | }
|
---|
615 | sub get_doc_dir {
|
---|
616 |
|
---|
617 | my $self = shift (@_);
|
---|
618 | my ($doc_obj) = @_;
|
---|
619 |
|
---|
620 | my $OID = $doc_obj->get_OID();
|
---|
621 | $OID = "NULL" unless defined $OID;
|
---|
622 |
|
---|
623 | my $working_dir = $self->get_output_dir();
|
---|
624 | my $working_info = $self->{'output_info'};
|
---|
625 | return if (!defined $working_info);
|
---|
626 |
|
---|
627 | my $doc_info = $working_info->get_info($OID);
|
---|
628 | my $doc_dir = '';
|
---|
629 |
|
---|
630 | if (defined $doc_info && scalar(@$doc_info) >= 1)
|
---|
631 | {
|
---|
632 | # This OID already has an archives directory, so use it again
|
---|
633 | $doc_dir = $doc_info->[0];
|
---|
634 | $doc_dir =~ s/\/?((doc(mets|sql)?)|(dublin_core))\.xml(\.gz)?$//;
|
---|
635 | }
|
---|
636 | elsif ($self->{'keep_import_structure'})
|
---|
637 | {
|
---|
638 | my $source_filename = $doc_obj->get_source_filename();
|
---|
639 | $source_filename = &File::Basename::dirname($source_filename);
|
---|
640 | $source_filename =~ s/[\\\/]+/\//g;
|
---|
641 | $source_filename =~ s/\/$//;
|
---|
642 |
|
---|
643 | $doc_dir = substr($source_filename, length($ENV{'GSDLIMPORTDIR'}) + 1);
|
---|
644 | }
|
---|
645 |
|
---|
646 | # We have to use a new archives directory for this document
|
---|
647 | if ($doc_dir eq "")
|
---|
648 | {
|
---|
649 | $doc_dir = $self->get_new_doc_dir ($working_info, $working_dir, $OID);
|
---|
650 | }
|
---|
651 |
|
---|
652 | &FileUtils::makeAllDirectories(&FileUtils::filenameConcatenate($working_dir, $doc_dir));
|
---|
653 |
|
---|
654 | return $doc_dir;
|
---|
655 | }
|
---|
656 |
|
---|
657 |
|
---|
658 | ## @function get_new_doc_dir()
|
---|
659 | #
|
---|
660 | # Once a doc object is ready to write to disk (and hence has a nice OID),
|
---|
661 | # generate a unique subdirectory to write the information to.
|
---|
662 | #
|
---|
663 | # - create the directory as part of this call, to try and avoid race conditions
|
---|
664 | # found in parallel processing [jmt12]
|
---|
665 | #
|
---|
666 | # @todo figure out what the rule regarding $work_info->size() is meant to do
|
---|
667 | #
|
---|
668 | # @todo determine what $self->{'group'} is, and whether it should affect
|
---|
669 | # directory creation
|
---|
670 | #
|
---|
671 | sub get_new_doc_dir
|
---|
672 | {
|
---|
673 | my $self = shift (@_);
|
---|
674 | my($working_info,$working_dir,$OID) = @_;
|
---|
675 |
|
---|
676 | my $doc_dir = "";
|
---|
677 | my $doc_dir_rest = $OID;
|
---|
678 |
|
---|
679 | # remove any \ and / from the OID
|
---|
680 | $doc_dir_rest =~ s/[\\\/]//g;
|
---|
681 |
|
---|
682 | # Remove ":" if we are on Windows OS, as otherwise they get confused with the drive letters
|
---|
683 | if ($ENV{'GSDLOS'} =~ /^windows$/i)
|
---|
684 | {
|
---|
685 | $doc_dir_rest =~ s/\://g;
|
---|
686 | }
|
---|
687 |
|
---|
688 | # we generally create a unique directory by adding consequtive fragments of
|
---|
689 | # the document identifier (split by some predefined length - defaulting to
|
---|
690 | # 8) until we find a directory that doesn't yet exist. Note that directories
|
---|
691 | # that contain a document have a suffix ".dir" (whereas those that contain
|
---|
692 | # only subdirectories have no suffix).
|
---|
693 | my $doc_dir_num = 0; # how many directories deep we are
|
---|
694 | my $created_directory = 0; # have we successfully created a new directory
|
---|
695 | do
|
---|
696 | {
|
---|
697 | # (does this work on windows? - jmt12)
|
---|
698 | if ($doc_dir_num > 0)
|
---|
699 | {
|
---|
700 | $doc_dir .= '/';
|
---|
701 | }
|
---|
702 |
|
---|
703 | # the default matching pattern grabs the next 'subdir_split_length'
|
---|
704 | # characters of the OID to act as the next subdirectory
|
---|
705 | my $pattern = '^(.{1,' . $self->{'subdir_split_length'} . '})';
|
---|
706 |
|
---|
707 | # Do we count any "HASH" prefix against the split length limit?
|
---|
708 | if ($self->{'subdir_hash_prefix'} && $doc_dir_num == 0)
|
---|
709 | {
|
---|
710 | $pattern = '^((HASH)?.{1,' . $self->{'subdir_split_length'} . '})';
|
---|
711 | }
|
---|
712 |
|
---|
713 | # Note the use of 's' to both capture the next chuck of OID and to remove
|
---|
714 | # it from OID at the same time
|
---|
715 | if ($doc_dir_rest =~ s/$pattern//i)
|
---|
716 | {
|
---|
717 | $doc_dir .= $1;
|
---|
718 | $doc_dir_num++;
|
---|
719 |
|
---|
720 | my $full_doc_dir = &FileUtils::filenameConcatenate($working_dir, $doc_dir . '.dir');
|
---|
721 | if(!FileUtils::directoryExists($full_doc_dir))
|
---|
722 | {
|
---|
723 | &FileUtils::makeAllDirectories($full_doc_dir);
|
---|
724 | $created_directory = 1;
|
---|
725 | }
|
---|
726 |
|
---|
727 | ###rint STDERR "[DEBUG] BasePlugout::get_new_doc_dir(<working_info>, $working_dir, $oid)\n";
|
---|
728 | ###rint STDERR " - create directory: $full_doc_dir => $created_directory\n";
|
---|
729 | ###rint STDERR " - rest: $doc_dir_rest\n";
|
---|
730 | ###rint STDERR " - working_info->size(): " . $working_info->size() . " [ < 1024 ?]\n";
|
---|
731 | ###rint STDERR " - doc_dir_num: " . $doc_dir_num . "\n";
|
---|
732 | }
|
---|
733 | }
|
---|
734 | while ($doc_dir_rest ne '' && ($created_directory == 0 || ($working_info->size() >= 1024 && $doc_dir_num < 2)));
|
---|
735 |
|
---|
736 | # not unique yet? Add on an incremental suffix until we are unique
|
---|
737 | my $i = 1;
|
---|
738 | my $doc_dir_base = $doc_dir;
|
---|
739 | while ($created_directory == 0)
|
---|
740 | {
|
---|
741 | $doc_dir = $doc_dir_base . '-' . $i;
|
---|
742 | $created_directory = &FileUtils::makeAllDirectories(&FileUtils::filenameConcatenate($working_dir, $doc_dir . '.dir'));
|
---|
743 | $i++;
|
---|
744 | }
|
---|
745 |
|
---|
746 | # in theory this should never happen
|
---|
747 | if (!$created_directory)
|
---|
748 | {
|
---|
749 | die("Error! Failed to create directory for document: " . $doc_dir_base . "\n");
|
---|
750 | }
|
---|
751 |
|
---|
752 | return $doc_dir . '.dir';
|
---|
753 | }
|
---|
754 | ## get_new_doc_dir()
|
---|
755 |
|
---|
756 |
|
---|
757 | sub process_assoc_files {
|
---|
758 | my $self = shift (@_);
|
---|
759 | my ($doc_obj, $doc_dir, $handle) = @_;
|
---|
760 |
|
---|
761 | my $outhandle = $self->{'output_handle'};
|
---|
762 |
|
---|
763 | my $output_dir = $self->get_output_dir();
|
---|
764 | return if (!defined $output_dir);
|
---|
765 |
|
---|
766 | &FileUtils::makeAllDirectories($output_dir) unless &FileUtils::directoryExists($output_dir);
|
---|
767 |
|
---|
768 | my $working_dir = &FileUtils::filenameConcatenate($output_dir, $doc_dir);
|
---|
769 | &FileUtils::makeAllDirectories($working_dir) unless &FileUtils::directoryExists($working_dir);
|
---|
770 |
|
---|
771 | my @assoc_files = ();
|
---|
772 | my $filename;;
|
---|
773 |
|
---|
774 | my $source_filename = $doc_obj->get_source_filename();
|
---|
775 |
|
---|
776 | my $collect_dir = $ENV{'GSDLCOLLECTDIR'};
|
---|
777 |
|
---|
778 | if (defined $collect_dir) {
|
---|
779 | my $dirsep_regexp = &util::get_os_dirsep();
|
---|
780 |
|
---|
781 | if ($collect_dir !~ /$dirsep_regexp$/) {
|
---|
782 | $collect_dir .= &util::get_dirsep(); # ensure there is a slash at the end
|
---|
783 | }
|
---|
784 |
|
---|
785 | # This test is never going to fail on Windows -- is this a problem?
|
---|
786 |
|
---|
787 | if ($source_filename !~ /^$dirsep_regexp/) {
|
---|
788 | $source_filename = &FileUtils::filenameConcatenate($collect_dir, $source_filename);
|
---|
789 | }
|
---|
790 | }
|
---|
791 |
|
---|
792 |
|
---|
793 | # set the assocfile path (even if we have no assoc files - need this for lucene)
|
---|
794 | $doc_obj->set_utf8_metadata_element ($doc_obj->get_top_section(),
|
---|
795 | "assocfilepath",
|
---|
796 | "$doc_dir");
|
---|
797 | foreach my $assoc_file_rec (@{$doc_obj->get_assoc_files()}) {
|
---|
798 | my ($dir, $afile) = $assoc_file_rec->[1] =~ /^(.*?)([^\/\\]+)$/;
|
---|
799 | $dir = "" unless defined $dir;
|
---|
800 |
|
---|
801 | my $utf8_real_filename = $assoc_file_rec->[0];
|
---|
802 |
|
---|
803 | # for some reasons the image associate file has / before the full path
|
---|
804 | $utf8_real_filename =~ s/^\\(.*)/$1/i;
|
---|
805 |
|
---|
806 | ## my $real_filename = &util::utf8_to_real_filename($utf8_real_filename);
|
---|
807 | my $real_filename = $utf8_real_filename;
|
---|
808 | $real_filename = &util::downgrade_if_dos_filename($real_filename);
|
---|
809 |
|
---|
810 | if (&FileUtils::fileExists($real_filename)) {
|
---|
811 |
|
---|
812 | $filename = &FileUtils::filenameConcatenate($working_dir, $afile);
|
---|
813 |
|
---|
814 | &FileUtils::hardLink($real_filename, $filename, $self->{'verbosity'});
|
---|
815 |
|
---|
816 | $doc_obj->add_utf8_metadata ($doc_obj->get_top_section(),
|
---|
817 | "gsdlassocfile",
|
---|
818 | "$afile:$assoc_file_rec->[2]:$dir");
|
---|
819 | } elsif ($self->{'verbosity'} > 1) {
|
---|
820 | print $outhandle "BasePlugout::process couldn't copy the associated file " .
|
---|
821 | "$real_filename to $afile\n";
|
---|
822 | }
|
---|
823 | }
|
---|
824 | }
|
---|
825 |
|
---|
826 |
|
---|
827 | sub process_metafiles_metadata
|
---|
828 | {
|
---|
829 | my $self = shift (@_);
|
---|
830 | my ($doc_obj) = @_;
|
---|
831 |
|
---|
832 | my $top_section = $doc_obj->get_top_section();
|
---|
833 | my $metafiles = $doc_obj->get_metadata($top_section,"gsdlmetafile");
|
---|
834 |
|
---|
835 | foreach my $metafile_pair (@$metafiles) {
|
---|
836 | my ($full_metafile,$metafile) = split(/ : /,$metafile_pair);
|
---|
837 |
|
---|
838 | $doc_obj->metadata_file($full_metafile,$metafile);
|
---|
839 | }
|
---|
840 |
|
---|
841 | $doc_obj->delete_metadata($top_section,"gsdlmetafile");
|
---|
842 | }
|
---|
843 |
|
---|
844 | sub archiveinf_files_to_field
|
---|
845 | {
|
---|
846 | my $self = shift(@_);
|
---|
847 | my ($files,$field,$collect_dir,$oid_files,$reverse_lookups) = @_;
|
---|
848 |
|
---|
849 | foreach my $file_rec (@$files) {
|
---|
850 | my $real_filename = (ref $file_rec eq "ARRAY") ? $file_rec->[0] : $file_rec;
|
---|
851 | my $full_file = (ref $file_rec eq "ARRAY") ? $file_rec->[1] : $file_rec;
|
---|
852 | # for some reasons the image associate file has / before the full path
|
---|
853 | $real_filename =~ s/^\\(.*)/$1/i;
|
---|
854 |
|
---|
855 | my $raw_filename = &util::downgrade_if_dos_filename($real_filename);
|
---|
856 |
|
---|
857 | if (&FileUtils::fileExists($raw_filename)) {
|
---|
858 |
|
---|
859 | # if (defined $collect_dir) {
|
---|
860 | # my $collect_dir_re_safe = $collect_dir;
|
---|
861 | # $collect_dir_re_safe =~ s/\\/\\\\/g; # use &util::filename_to_regex()
|
---|
862 | # $collect_dir_re_safe =~ s/\./\\./g;##
|
---|
863 |
|
---|
864 | # $real_filename =~ s/^$collect_dir_re_safe//;
|
---|
865 | # }
|
---|
866 |
|
---|
867 | if (defined $reverse_lookups) {
|
---|
868 | $reverse_lookups->{$real_filename} = 1;
|
---|
869 | }
|
---|
870 |
|
---|
871 | if($field =~ m@assoc-file|src-file|meta-file@) {
|
---|
872 | $raw_filename = &util::abspath_to_placeholders($raw_filename);
|
---|
873 | }
|
---|
874 |
|
---|
875 | ### push(@{$oid_files->{$field}},$full_file);
|
---|
876 | push(@{$oid_files->{$field}},$raw_filename);
|
---|
877 | }
|
---|
878 | else {
|
---|
879 | print STDERR "Warning: archiveinf_files_to_field()\n $real_filename does not appear to be on the file system\n";
|
---|
880 | }
|
---|
881 | }
|
---|
882 | }
|
---|
883 |
|
---|
884 | sub archiveinf_db
|
---|
885 | {
|
---|
886 | my $self = shift (@_);
|
---|
887 | my ($doc_obj) = @_;
|
---|
888 |
|
---|
889 | my $verbosity = $self->{'verbosity'};
|
---|
890 |
|
---|
891 | my $collect_dir = $ENV{'GSDLCOLLECTDIR'};
|
---|
892 | if (defined $collect_dir) {
|
---|
893 | my $dirsep_regexp = &util::get_os_dirsep();
|
---|
894 |
|
---|
895 | if ($collect_dir !~ /$dirsep_regexp$/) {
|
---|
896 | # ensure there is a slash at the end
|
---|
897 | $collect_dir .= &util::get_dirsep();
|
---|
898 | }
|
---|
899 | }
|
---|
900 |
|
---|
901 | my $oid = $doc_obj->get_OID();
|
---|
902 | my $source_filename = $doc_obj->get_unmodified_source_filename();
|
---|
903 | my $working_info = $self->{'output_info'};
|
---|
904 | my $doc_info = $working_info->get_info($oid);
|
---|
905 |
|
---|
906 | my ($doc_file,$index_status,$sortmeta, $group_position) = @$doc_info;
|
---|
907 | # doc_file is the path to the archive doc.xml. Make sure it has unix
|
---|
908 | # slashes, then if the collection is copied to linux, it can be built without reimport
|
---|
909 | $doc_file =~ s/\\/\//g;
|
---|
910 | my $oid_files = { 'doc-file' => $doc_file,
|
---|
911 | 'index-status' => $index_status,
|
---|
912 | 'src-file' => $source_filename,
|
---|
913 | 'sort-meta' => $sortmeta,
|
---|
914 | 'assoc-file' => [],
|
---|
915 | 'meta-file' => [] };
|
---|
916 | if (defined $group_position) {
|
---|
917 | $oid_files->{'group-position'} = $group_position;
|
---|
918 | }
|
---|
919 | my $reverse_lookups = { $source_filename => "1" };
|
---|
920 |
|
---|
921 |
|
---|
922 | $self->archiveinf_files_to_field($doc_obj->get_source_assoc_files(),"assoc-file",
|
---|
923 | $collect_dir,$oid_files,$reverse_lookups);
|
---|
924 |
|
---|
925 |
|
---|
926 | $self->archiveinf_files_to_field($doc_obj->get_meta_files(),"meta-file",
|
---|
927 | $collect_dir,$oid_files);
|
---|
928 |
|
---|
929 | # Get the infodbtype value for this collection from the arcinfo object
|
---|
930 | my $infodbtype = $self->{'output_info'}->{'infodbtype'};
|
---|
931 | my $output_dir = $self->{'output_dir'};
|
---|
932 |
|
---|
933 | my $doc_db = &dbutil::get_infodb_file_path($infodbtype, "archiveinf-doc", $output_dir);
|
---|
934 |
|
---|
935 | ##print STDERR "*** To set in db: \n\t$doc_db\n\t$oid\n\t$doc_db_text\n";
|
---|
936 |
|
---|
937 | if (!$self->{'no_rss'})
|
---|
938 | {
|
---|
939 | if (($oid_files->{'index-status'} eq "I") || ($oid_files->{'index-status'} eq "R")) {
|
---|
940 | my $top_section = $doc_obj->get_top_section();
|
---|
941 |
|
---|
942 | # rss_title can be set in collect.cfg as follows:
|
---|
943 | # plugout GreenstoneXMLPlugout -rss_title "dc.Title; ex.Title"
|
---|
944 | # rss_title is a semi-colon or comma-separated list of the metadata field names that should
|
---|
945 | # be consulted in order to obtain a Title (anchor text) for the RSS document link.
|
---|
946 | # If not specified, rss_title will default to dc.Title, and fall back on Untitled
|
---|
947 | my $metafieldnames = $self->{'rss_title'};
|
---|
948 | my @metafieldarray = split(/[,;] ?/,$metafieldnames); # , or ; separator can be followed by an optional space
|
---|
949 | my $titles;
|
---|
950 | #@$titles=(); # at worst @$titles will be (), as get_metadata(dc.Titles) may return ()
|
---|
951 | foreach my $metafieldname (@metafieldarray) {
|
---|
952 | $metafieldname =~ s@^ex\.@@; # if ex.Title, need to get_metadata() on metafieldname=Title
|
---|
953 | $titles = $doc_obj->get_metadata($top_section,$metafieldname);
|
---|
954 |
|
---|
955 | if(scalar(@$titles) != 0) { # found at least one title for one metafieldname
|
---|
956 | last; # break out of the loop
|
---|
957 | }
|
---|
958 | }
|
---|
959 |
|
---|
960 | # if ex.Title was listed in the metafieldnames, then we'll surely have a value for title for this doc
|
---|
961 | # otherwise, if we have no titles at this point, add in a default of Untitled as this doc's title
|
---|
962 | if(scalar(@$titles) == 0) { #&& $metafieldnames !~ [email protected]@) {
|
---|
963 | push(@$titles, "Untitled");
|
---|
964 | }
|
---|
965 |
|
---|
966 | # encode basic html entities like <>"& in the title(s), since the & char can break RSS links
|
---|
967 | for (my $i = 0; $i < scalar(@$titles); $i++) {
|
---|
968 | &ghtml::htmlsafe(@$titles[$i]);
|
---|
969 | }
|
---|
970 |
|
---|
971 | my $dc_title = join("; ", @$titles);
|
---|
972 |
|
---|
973 | if ($oid_files->{'index-status'} eq "R") {
|
---|
974 | $dc_title .= " (Updated)";
|
---|
975 | }
|
---|
976 |
|
---|
977 | my $rss_entry = "<item>\n";
|
---|
978 | $rss_entry .= " <title>$dc_title</title>\n";
|
---|
979 | if(&util::is_gs3()) {
|
---|
980 | $rss_entry .= " <link>_httpdomain__httpcollection_/document/$oid</link>\n";
|
---|
981 | } else {
|
---|
982 | $rss_entry .= " <link>_httpdomainHtmlsafe__httpcollection_/document/$oid</link>\n";
|
---|
983 | }
|
---|
984 | $rss_entry .= "</item>";
|
---|
985 |
|
---|
986 | if (defined(&dbutil::supportsRSS) && &dbutil::supportsRSS($infodbtype))
|
---|
987 | {
|
---|
988 | my $rss_db = &dbutil::get_infodb_file_path($infodbtype, 'rss-items', $output_dir);
|
---|
989 | my $rss_db_fh = &dbutil::open_infodb_write_handle($infodbtype, $rss_db, 'append');
|
---|
990 | &dbutil::write_infodb_rawentry($infodbtype, $rss_db_fh, $oid, $rss_entry);
|
---|
991 | &dbutil::close_infodb_write_handle($infodbtype, $rss_db_fh);
|
---|
992 | }
|
---|
993 | else
|
---|
994 | {
|
---|
995 | my $rss_filename = &FileUtils::filenameConcatenate($output_dir,"rss-items.rdf");
|
---|
996 | my $rss_fh;
|
---|
997 | if (&FileUtils::openFileHandle($rss_filename, '>>', \$rss_fh, "utf8"))
|
---|
998 | {
|
---|
999 | print $rss_fh $rss_entry . "\n";
|
---|
1000 | &FileUtils::closeFileHandle($rss_filename, \$rss_fh);
|
---|
1001 | }
|
---|
1002 | else
|
---|
1003 | {
|
---|
1004 | print STDERR "**** Failed to open $rss_filename\n$!\n";
|
---|
1005 | }
|
---|
1006 | }
|
---|
1007 | }
|
---|
1008 | }
|
---|
1009 |
|
---|
1010 | $oid_files->{'doc-file'} = [ $oid_files->{'doc-file'} ];
|
---|
1011 | $oid_files->{'index-status'} = [ $oid_files->{'index-status'} ];
|
---|
1012 | $oid_files->{'src-file'} = &util::abspath_to_placeholders($oid_files->{'src-file'});
|
---|
1013 | $oid_files->{'src-file'} = [ $oid_files->{'src-file'} ];
|
---|
1014 | $oid_files->{'sort-meta'} = [ $oid_files->{'sort-meta'} ];
|
---|
1015 | if (defined $oid_files->{'group-position'}) {
|
---|
1016 | $oid_files->{'group-position'} = [ $oid_files->{'group-position'} ];
|
---|
1017 | }
|
---|
1018 |
|
---|
1019 | my $infodb_file_handle = &dbutil::open_infodb_write_handle($infodbtype, $doc_db, "append");
|
---|
1020 | &dbutil::write_infodb_entry($infodbtype, $infodb_file_handle, $oid, $oid_files);
|
---|
1021 | &dbutil::close_infodb_write_handle($infodbtype, $infodb_file_handle);
|
---|
1022 |
|
---|
1023 | foreach my $rl (keys %$reverse_lookups) {
|
---|
1024 | $working_info->add_reverseinfo($rl,$oid);
|
---|
1025 | }
|
---|
1026 |
|
---|
1027 | # meta files not set in reverse entry, but need to set the metadata flag
|
---|
1028 | if (defined $doc_obj->get_meta_files()) {
|
---|
1029 | foreach my $meta_file_rec(@{$doc_obj->get_meta_files()}) {
|
---|
1030 | my $full_file = (ref $meta_file_rec eq "ARRAY") ? $meta_file_rec->[0] : $meta_file_rec;
|
---|
1031 | $working_info->set_meta_file_flag($full_file);
|
---|
1032 | }
|
---|
1033 | }
|
---|
1034 | }
|
---|
1035 |
|
---|
1036 | # This sub is called for every metadata.xml accepted for processing by by MetdataXMLPlugin
|
---|
1037 | # and adds an entry into archiveinf-src.db for that file in the form:
|
---|
1038 | # [@THISCOLLECTPATH@/import/metadata.xml]
|
---|
1039 | # <meta-file>1
|
---|
1040 | # This prevents blind reprocessing of the same old docs upon *incremental* building whenever
|
---|
1041 | # we encounter a default empty metadata.xml that has no actual <FileSet> content defined.
|
---|
1042 | sub add_metaxml_file_entry_to_archiveinfsrc {
|
---|
1043 | my $self = shift (@_);
|
---|
1044 | my ($full_file) = @_;
|
---|
1045 |
|
---|
1046 | print STDERR "**** Adding metaxml file entry for full_file: $full_file\n";
|
---|
1047 | my $working_info = $self->{'output_info'};
|
---|
1048 | $working_info->set_meta_file_flag($full_file);
|
---|
1049 | }
|
---|
1050 |
|
---|
1051 |
|
---|
1052 | sub set_sortmeta {
|
---|
1053 | my $self = shift (@_);
|
---|
1054 | my ($sortmeta, $removeprefix, $removesuffix) = @_;
|
---|
1055 |
|
---|
1056 | $self->{'sortmeta'} = $sortmeta;
|
---|
1057 | if (defined ($removeprefix) && $removeprefix ) {
|
---|
1058 | $removeprefix =~ s/^\^//; # don't need a leading ^
|
---|
1059 | $self->{'removeprefix'} = $removeprefix;
|
---|
1060 | }
|
---|
1061 | if (defined ($removesuffix) && $removesuffix) {
|
---|
1062 | $removesuffix =~ s/\$$//; # don't need a trailing $
|
---|
1063 | $self->{'removesuffix'} = $removesuffix;
|
---|
1064 | }
|
---|
1065 | }
|
---|
1066 |
|
---|
1067 |
|
---|
1068 |
|
---|
1069 | sub open_xslt_pipe
|
---|
1070 | {
|
---|
1071 | my $self = shift @_;
|
---|
1072 | my ($output_file_name, $xslt_file)=@_;
|
---|
1073 |
|
---|
1074 | return unless defined $xslt_file and $xslt_file ne "" and &FileUtils::fileExists($xslt_file);
|
---|
1075 |
|
---|
1076 | my $java_class_path = &FileUtils::filenameConcatenate($ENV{'GSDLHOME'},"bin","java","ApplyXSLT.jar");
|
---|
1077 |
|
---|
1078 | my $mapping_file_path = "";
|
---|
1079 |
|
---|
1080 | if ($ENV{'GSDLOS'} eq "windows"){
|
---|
1081 | $java_class_path .=";".&FileUtils::filenameConcatenate($ENV{'GSDLHOME'},"bin","java","xalan.jar");
|
---|
1082 | # this file:/// bit didn't work for me on windows XP
|
---|
1083 | #$xslt_file = "\"file:///".$xslt_file."\"";
|
---|
1084 | #$mapping_file_path = "\"file:///";
|
---|
1085 | }
|
---|
1086 | else{
|
---|
1087 | $java_class_path .=":".&FileUtils::filenameConcatenate($ENV{'GSDLHOME'},"bin","java","xalan.jar");
|
---|
1088 | }
|
---|
1089 |
|
---|
1090 |
|
---|
1091 | $java_class_path = "\"".$java_class_path."\"";
|
---|
1092 |
|
---|
1093 | my $cmd = "| java -cp $java_class_path org.nzdl.gsdl.ApplyXSLT -t \"$xslt_file\" ";
|
---|
1094 |
|
---|
1095 | if (defined $self->{'mapping_file'} and $self->{'mapping_file'} ne ""){
|
---|
1096 | my $mapping_file_path = "\"".$self->{'mapping_file'}."\"";
|
---|
1097 | $cmd .= "-m $mapping_file_path";
|
---|
1098 | }
|
---|
1099 |
|
---|
1100 | open(*XMLWRITER, $cmd)
|
---|
1101 | or die "can't open pipe to xslt: $!";
|
---|
1102 |
|
---|
1103 |
|
---|
1104 | $self->{'xslt_writer'} = *XMLWRITER;
|
---|
1105 |
|
---|
1106 | print XMLWRITER "<?DocStart?>\n";
|
---|
1107 | print XMLWRITER "$output_file_name\n";
|
---|
1108 |
|
---|
1109 |
|
---|
1110 | }
|
---|
1111 |
|
---|
1112 |
|
---|
1113 | sub close_xslt_pipe
|
---|
1114 | {
|
---|
1115 | my $self = shift @_;
|
---|
1116 |
|
---|
1117 |
|
---|
1118 | return unless defined $self->{'xslt_writer'} ;
|
---|
1119 |
|
---|
1120 | my $xsltwriter = $self->{'xslt_writer'};
|
---|
1121 |
|
---|
1122 | print $xsltwriter "<?DocEnd?>\n";
|
---|
1123 | close($xsltwriter);
|
---|
1124 |
|
---|
1125 | undef $self->{'xslt_writer'};
|
---|
1126 |
|
---|
1127 | }
|
---|
1128 |
|
---|
1129 |
|
---|
1130 |
|
---|
1131 | #the subclass should implement this method if is_group method could return 1.
|
---|
1132 | sub close_group_output{
|
---|
1133 | my $self = shift (@_);
|
---|
1134 | }
|
---|
1135 |
|
---|
1136 | sub is_group {
|
---|
1137 | my $self = shift (@_);
|
---|
1138 | return 0;
|
---|
1139 | }
|
---|
1140 |
|
---|
1141 | my $dc_set = { Title => 1,
|
---|
1142 | Creator => 1,
|
---|
1143 | Subject => 1,
|
---|
1144 | Description => 1,
|
---|
1145 | Publisher => 1,
|
---|
1146 | Contributor => 1,
|
---|
1147 | Date => 1,
|
---|
1148 | Type => 1,
|
---|
1149 | Format => 1,
|
---|
1150 | Identifier => 1,
|
---|
1151 | Source => 1,
|
---|
1152 | Language => 1,
|
---|
1153 | Relation => 1,
|
---|
1154 | Coverage => 1,
|
---|
1155 | Rights => 1};
|
---|
1156 |
|
---|
1157 |
|
---|
1158 | # returns an XML representation of the dublin core metadata
|
---|
1159 | # if dc meta is not found, try ex meta
|
---|
1160 | # This method is not used by the DSpacePlugout, which has its
|
---|
1161 | # own method to save its dc metadata
|
---|
1162 | sub get_dc_metadata {
|
---|
1163 | my $self = shift(@_);
|
---|
1164 | my ($doc_obj, $section, $version) = @_;
|
---|
1165 |
|
---|
1166 | # build up string of dublin core metadata
|
---|
1167 | $section="" unless defined $section;
|
---|
1168 |
|
---|
1169 | my $section_ptr = $doc_obj->_lookup_section($section);
|
---|
1170 | return "" unless defined $section_ptr;
|
---|
1171 |
|
---|
1172 |
|
---|
1173 | my $explicit_dc = {};
|
---|
1174 | my $explicit_ex_dc = {};
|
---|
1175 | my $explicit_ex = {};
|
---|
1176 |
|
---|
1177 | my $all_text="";
|
---|
1178 |
|
---|
1179 | # We want high quality dc metadata to go in first, so we store all the
|
---|
1180 | # assigned dc.* values first. Then, for all those dc metadata names in
|
---|
1181 | # the official dc set that are as yet unassigned, we look to see whether
|
---|
1182 | # embedded ex.dc.* metadata has defined some values for them. If not,
|
---|
1183 | # then for the same missing dc metadata names, we look in ex metadata.
|
---|
1184 |
|
---|
1185 | foreach my $data (@{$section_ptr->{'metadata'}}){
|
---|
1186 | my $escaped_value = &docprint::escape_text($data->[1]);
|
---|
1187 | if ($data->[0]=~ m/^dc\./) {
|
---|
1188 | $data->[0] =~ tr/[A-Z]/[a-z]/;
|
---|
1189 |
|
---|
1190 | $data->[0] =~ m/^dc\.(.*)/;
|
---|
1191 | my $dc_element = $1;
|
---|
1192 |
|
---|
1193 | if (!defined $explicit_dc->{$dc_element}) {
|
---|
1194 | $explicit_dc->{$dc_element} = [];
|
---|
1195 | }
|
---|
1196 | push(@{$explicit_dc->{$dc_element}},$escaped_value);
|
---|
1197 |
|
---|
1198 | if (defined $version && ($version eq "oai_dc")) {
|
---|
1199 | $all_text .= " <dc:$dc_element>$escaped_value</dc:$dc_element>\n";
|
---|
1200 | }
|
---|
1201 | else {
|
---|
1202 | # qualifier???
|
---|
1203 | $all_text .= ' <dcvalue element="'. $dc_element.'">'. $escaped_value. "</dcvalue>\n";
|
---|
1204 | }
|
---|
1205 |
|
---|
1206 | } elsif ($data->[0]=~ m/^ex\.dc\./) { # now look through ex.dc.* to fill in as yet unassigned fields in dc metaset
|
---|
1207 | $data->[0] =~ m/^ex\.dc\.(.*)/;
|
---|
1208 | my $ex_dc_element = $1;
|
---|
1209 | my $lc_ex_dc_element = lc($ex_dc_element);
|
---|
1210 |
|
---|
1211 | # only store the ex.dc value for this dc metaname if no dc.* was assigned for it
|
---|
1212 | if (defined $dc_set->{$ex_dc_element}) {
|
---|
1213 | if (!defined $explicit_ex_dc->{$lc_ex_dc_element}) {
|
---|
1214 | $explicit_ex_dc->{$lc_ex_dc_element} = [];
|
---|
1215 | }
|
---|
1216 | push(@{$explicit_ex_dc->{$lc_ex_dc_element}},$escaped_value);
|
---|
1217 | }
|
---|
1218 | }
|
---|
1219 | elsif (($data->[0] =~ m/^ex\./) || ($data->[0] !~ m/\./)) { # look through ex. meta (incl. meta without prefix)
|
---|
1220 | $data->[0] =~ m/^(ex\.)?(.*)/;
|
---|
1221 | my $ex_element = $2;
|
---|
1222 | my $lc_ex_element = lc($ex_element);
|
---|
1223 |
|
---|
1224 | if (defined $dc_set->{$ex_element}) {
|
---|
1225 | if (!defined $explicit_ex->{$lc_ex_element}) {
|
---|
1226 | $explicit_ex->{$lc_ex_element} = [];
|
---|
1227 | }
|
---|
1228 | push(@{$explicit_ex->{$lc_ex_element}},$escaped_value);
|
---|
1229 | }
|
---|
1230 | }
|
---|
1231 | }
|
---|
1232 |
|
---|
1233 | # go through dc_set and for any element *not* defined in explicit_dc
|
---|
1234 | # that does exist in explicit_ex, add it in as metadata
|
---|
1235 | foreach my $k ( keys %$dc_set ) {
|
---|
1236 | my $lc_k = lc($k);
|
---|
1237 |
|
---|
1238 | if (!defined $explicit_dc->{$lc_k}) {
|
---|
1239 | # try to find if ex.dc.* defines this dc.* meta,
|
---|
1240 | # if not, then look for whether there's an ex.* equivalent
|
---|
1241 |
|
---|
1242 | if (defined $explicit_ex_dc->{$lc_k}) {
|
---|
1243 | foreach my $v (@{$explicit_ex_dc->{$lc_k}}) {
|
---|
1244 | my $dc_element = $lc_k;
|
---|
1245 | my $escaped_value = $v;
|
---|
1246 |
|
---|
1247 | if (defined $version && ($version eq "oai_dc")) {
|
---|
1248 | $all_text .= " <dc:$dc_element>$escaped_value</dc:$dc_element>\n";
|
---|
1249 | }
|
---|
1250 | else {
|
---|
1251 | $all_text .= ' <dcvalue element="'. $dc_element.'">'. $escaped_value. "</dcvalue>\n";
|
---|
1252 | }
|
---|
1253 | }
|
---|
1254 | } elsif (defined $explicit_ex->{$lc_k}) {
|
---|
1255 | foreach my $v (@{$explicit_ex->{$lc_k}}) {
|
---|
1256 | my $dc_element = $lc_k;
|
---|
1257 | my $escaped_value = $v;
|
---|
1258 |
|
---|
1259 | if (defined $version && ($version eq "oai_dc")) {
|
---|
1260 | $all_text .= " <dc:$dc_element>$escaped_value</dc:$dc_element>\n";
|
---|
1261 | }
|
---|
1262 | else {
|
---|
1263 | $all_text .= ' <dcvalue element="'. $dc_element.'">'. $escaped_value. "</dcvalue>\n";
|
---|
1264 | }
|
---|
1265 | }
|
---|
1266 | }
|
---|
1267 | }
|
---|
1268 | }
|
---|
1269 |
|
---|
1270 | if ($all_text eq "") {
|
---|
1271 | $all_text .= " There is no Dublin Core metatdata in this document\n";
|
---|
1272 | }
|
---|
1273 | $all_text =~ s/[\x00-\x09\x0B\x0C\x0E-\x1F]//g;
|
---|
1274 |
|
---|
1275 | return $all_text;
|
---|
1276 | }
|
---|
1277 |
|
---|
1278 | # Build up dublin_core metadata. Priority given to dc.* over ex.*
|
---|
1279 | # This method was apparently added by Jeffrey and committed by Shaoqun.
|
---|
1280 | # But we don't know why it was added, so not using it anymore.
|
---|
1281 | sub new_get_dc_metadata {
|
---|
1282 |
|
---|
1283 | my $self = shift(@_);
|
---|
1284 | my ($doc_obj, $section, $version) = @_;
|
---|
1285 |
|
---|
1286 | # build up string of dublin core metadata
|
---|
1287 | $section="" unless defined $section;
|
---|
1288 |
|
---|
1289 | my $section_ptr=$doc_obj->_lookup_section($section);
|
---|
1290 | return "" unless defined $section_ptr;
|
---|
1291 |
|
---|
1292 | my $all_text = "";
|
---|
1293 | foreach my $data (@{$section_ptr->{'metadata'}}){
|
---|
1294 | my $escaped_value = &docprint::escape_text($data->[1]);
|
---|
1295 | my $dc_element = $data->[0];
|
---|
1296 |
|
---|
1297 | my @array = split('\.',$dc_element);
|
---|
1298 | my ($type,$name);
|
---|
1299 |
|
---|
1300 | if(defined $array[1])
|
---|
1301 | {
|
---|
1302 | $type = $array[0];
|
---|
1303 | $name = $array[1];
|
---|
1304 | }
|
---|
1305 | else
|
---|
1306 | {
|
---|
1307 | $type = "ex";
|
---|
1308 | $name = $array[0];
|
---|
1309 | }
|
---|
1310 |
|
---|
1311 | $all_text .= ' <Metadata Type="'. $type.'" Name="'.$name.'">'. $escaped_value. "</Metadata>\n";
|
---|
1312 | }
|
---|
1313 | return $all_text;
|
---|
1314 | }
|
---|
1315 |
|
---|
1316 |
|
---|
1317 | 1;
|
---|