########################################################################### # # inexport.pm -- useful utilities to support import.pl and export.pl # A component of the Greenstone digital library software # from the New Zealand Digital Library Project at the # University of Waikato, New Zealand. # # Copyright (C) 1999 New Zealand Digital Library Project # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # ########################################################################### package inexport; use strict; use File::Basename; use util; use GDBMUtils; sub prime_doc_oid_count { my ($archivedir) = @_; my $oid_count_filename = &util::filename_cat ($archivedir, "OIDcount"); if (-e $oid_count_filename) { if (open(OIDIN,"<$oid_count_filename")) { my $OIDcount = ; chomp $OIDcount; close(OIDIN); $doc::OIDcount = $OIDcount; } else { print STDERR "Warning: unable to read document OID count from $oid_count_filename\n"; print STDERR "Setting value to 0\n"; } } } sub store_doc_oid_count { # Use the file "OIDcount" in the archives directory to record # what value doc.pm got up to my ($archivedir) = @_; my $oid_count_filename = &util::filename_cat ($archivedir, "OIDcount"); if (open(OIDOUT,">$oid_count_filename")) { print OIDOUT $doc::OIDcount, "\n"; close(OIDOUT); } else { print STDERR "Warning: unable to store document OID count\n"; } } sub new_vs_old_import_diff { my ($archive_info,$block_hash,$importdir,$archivedir,$verbosity,$incremental_mode) = @_; # in this method, we want to know if metadata files are modified or not. my $doc_db = "archiveinf-doc.gdb"; my $arcinfo_doc_filename = &util::filename_cat ($archivedir, $doc_db); my $archiveinf_timestamp = -M $arcinfo_doc_filename; # First convert all files to absolute form # This is to support the situation where the import folder is not # the default my $prev_all_files = $archive_info->{'prev_import_filelist'}; my $full_prev_all_files = {}; foreach my $prev_file (keys %$prev_all_files) { if (!&util::filename_is_absolute($prev_file)) { my $full_prev_file = &util::filename_cat($ENV{'GSDLCOLLECTDIR'},$prev_file); $full_prev_all_files->{$full_prev_file} = $prev_file; } else { $full_prev_all_files->{$prev_file} = $prev_file; } } # Figure out which are the new files, existing files and so # by implication the files from the previous import that are not # there any more => mark them for deletion foreach my $curr_file (keys %{$block_hash->{'all_files'}}) { my $full_curr_file = $curr_file; # entry in 'all_files' is moved to either 'existing_files', # 'deleted_files', 'new_files', or 'new_or_modified_metadata_files' if (!&util::filename_is_absolute($curr_file)) { # add in import dir to make absolute $full_curr_file = &util::filename_cat($importdir,$curr_file); } # figure out if new file or not if (defined $full_prev_all_files->{$full_curr_file}) { # delete it so that only files that need deleting are left delete $full_prev_all_files->{$full_curr_file}; # had it before. is it a metadata file? if ($block_hash->{'metadata_files'}->{$full_curr_file}) { # is it modified?? if (-M $full_curr_file < $archiveinf_timestamp) { print STDERR "*** Detected a modified metadata file: $full_curr_file\n" if $verbosity > 2; # its newer than last build $block_hash->{'new_or_modified_metadata_files'}->{$full_curr_file} = 1; } } else { if ($incremental_mode eq "all") { # had it before $block_hash->{'existing_files'}->{$full_curr_file} = 1; } else { # Warning in "onlyadd" mode, but had it before! print STDERR "Warning: File $full_curr_file previously imported.\n"; print STDERR " Treating as new file\n"; $block_hash->{'new_files'}->{$full_curr_file} = 1; } } } else { if ($block_hash->{'metadata_files'}->{$full_curr_file}) { # the new file is the special sort of file greenstone uses # to attach metadata to src documents # i.e metadata.xml # (but note, the filename used is not constrained in # Greenstone to always be this) print STDERR "***** Detected new metadata file: $full_curr_file\n" if $verbosity > 2; $block_hash->{'new_or_modified_metadata_files'}->{$full_curr_file} = 1; } else { $block_hash->{'new_files'}->{$full_curr_file} = 1; } } delete $block_hash->{'all_files'}->{$curr_file}; } # Deal with complication of new or modified metadata files by forcing # everything from this point down in the file hierarchy to # be freshly imported. # # This may mean files that have not changed are reindexed, but does # guarantee by the end of processing all new metadata is correctly # associated with the relevant document(s). foreach my $new_mdf (keys %{$block_hash->{'new_or_modified_metadata_files'}}) { my ($fileroot,$situated_dir,$ext) = fileparse($new_mdf, "\\.[^\\.]+\$"); $situated_dir =~ s/[\\\/]+$//; # remove tailing slashes $situated_dir =~ s/\\/\\\\/g; # need to protect windows slash \ in regular expression # Go through existing_files, and mark anything that is contained # within 'situated_dir' to be reindexed (in case some of the metadata # attaches to one of these files) my $reindex_files = []; foreach my $existing_f (keys %{$block_hash->{'existing_files'}}) { if ($existing_f =~ m/^$situated_dir/) { push(@$reindex_files,$existing_f); $block_hash->{'reindex_files'}->{$existing_f} = 1; } } # Reindexing is accomplished by putting them in the list for reindexing (line above) # and then tagging the arcinfo version as to be deleted. _mark_docs_for_deletion($archive_info,$block_hash,$reindex_files,$archivedir,$verbosity, "reindex"); # metadata file needs to be in new_files list so parsed by MetadataXMLPlug # (or equivalent) $block_hash->{'new_files'}->{$new_mdf} = 1; } # By this point full_prev_all_files contains the files # mentioned in archiveinf-src.db but are not in the 'import' # folder (or whatever was specified through -importdir ...) # This list can contain files that were created in the 'tmp' or # 'cache' areas (such as screen-size and thumbnail images). # # In building the final list of files to delete, we test to see if # it exists on the filesystem and if it does (unusual for a "normal" # file in import, but possible in the case of 'tmp' files), # supress it from going into the final list my $collectdir = $ENV{'GSDLCOLLECTDIR'}; my @deleted_files = values %$full_prev_all_files; map { my $curr_file = $_; my $full_curr_file = $curr_file; if (!&util::filename_is_absolute($curr_file)) { # add in import dir to make absolute $full_curr_file = &util::filename_cat($collectdir,$curr_file); } if (!-e $full_curr_file) { $block_hash->{'deleted_files'}->{$curr_file} = 1; } } @deleted_files; } # not used anymore sub is_assoc_file { my ($file,$doc_rec) = @_; my ($file_root,$dirname,$suffix) = fileparse($file, "\\.[^\\.]+\$"); foreach my $af (@{$doc_rec->{'assoc-file'}}) { my $full_af = &util::filename_cat($dirname,$af); return 1 if ($full_af eq $file); } return 0; } # this is used to delete "deleted" docs, and to remove old versions of "changed" docs sub _mark_docs_for_deletion { my ($archive_info,$block_hash,$deleted_files,$archivedir,$verbosity,$mode_text) = @_; my $doc_db = "archiveinf-doc.gdb"; my $src_db = "archiveinf-src.gdb"; my $arcinfo_doc_filename = &util::filename_cat ($archivedir, $doc_db); my $arcinfo_src_filename = &util::filename_cat ($archivedir, $src_db); # record files marked for deletion in arcinfo foreach my $file (@$deleted_files) { # use 'archiveinf-src' GDBM file to look up all the OIDs # that this file is used in (note in most cases, it's just one OID) my $src_rec = GDBMUtils::gdbmRecordToHash($arcinfo_src_filename,$file); my $oids = $src_rec->{'oid'}; my $file_record_deleted = 0; # delete the src record GDBMUtils::gdbmDatabaseRemove($arcinfo_src_filename,$file); foreach my $oid (@$oids) { # find the source doc (the primary file that becomes this oid) my $doc_rec = GDBMUtils::gdbmRecordToHash($arcinfo_doc_filename,$oid); my $doc_source_file = $doc_rec->{'src-file'}->[0]; if (!&util::filename_is_absolute($doc_source_file)) { $doc_source_file = &util::filename_cat($ENV{'GSDLCOLLECTDIR'},$doc_source_file); } if ($doc_source_file ne $file) { # its an associated or metadata file # mark source doc for reimport as one of its assoc files has changed or deleted $block_hash->{'reindex_files'}->{$doc_source_file} = 1; } my $curr_status = $archive_info->get_status_info($oid); if (defined($curr_status) && (($curr_status ne "D"))) { if ($verbosity>1) { print STDERR "$oid ($doc_source_file) marked to be $mode_text on next buildcol.pl\n"; } # mark oid for deletion (it will be deleted or reimported) $archive_info->set_status_info($oid,"D"); my $val = &GDBMUtils::gdbmDatabaseGet($arcinfo_doc_filename,$oid); $val =~ s/^(.*)$/D/m; &GDBMUtils::gdbmDatabaseSet($arcinfo_doc_filename,$oid,$val); } } } } sub mark_docs_for_deletion { _mark_docs_for_deletion(@_,"deleted from index"); } sub mark_docs_for_reindex { my ($archive_info,$block_hash,$archivedir,$verbosity) = @_; # Reindexing is accomplished by deleting the previously indexed # version of the document, and then allowing the new version to # be indexed (as would a new document be indexed). # # The first step (marking for deletion) is implemented by this routine. # # By default in Greenstone a new version of an index will hash to # a new unique OID, and the above strategy of reindex=delete+add # works fine. A special case arises when a persistent OID is # allocated to a document (for instance through a metadata field), # and the second step to reindexing (see XXXX) detects this and # deals with it appropriately. my @existing_files = sort keys %{$block_hash->{'existing_files'}}; my $doc_db = "archiveinf-doc.gdb"; my $arcinfo_doc_filename = &util::filename_cat ($archivedir, $doc_db); my $archiveinf_timestamp = -M $arcinfo_doc_filename; my $reindex_files = []; foreach my $existing_filename (@existing_files) { if (-M $existing_filename < $archiveinf_timestamp) { # file is newer than last build my $existing_file = $existing_filename; #my $collectdir = &util::filename_cat($ENV{'GSDLCOLLECTDIR'}); #my $collectdir_resafe = &util::filename_to_regex($collectdir); #$existing_file =~ s/^$collectdir_resafe(\\|\/)?//; print STDERR "**** Reindexing existing file: $existing_file\n"; push(@$reindex_files,$existing_file); $block_hash->{'reindex_files'}->{$existing_filename} = 1; } } _mark_docs_for_deletion($archive_info,$block_hash,$reindex_files,$archivedir,$verbosity, "reindex"); } 1;