########################################################################### # # inexport.pm -- useful utilities to support import.pl and export.pl # A component of the Greenstone digital library software # from the New Zealand Digital Library Project at the # University of Waikato, New Zealand. # # Copyright (C) 1999 New Zealand Digital Library Project # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # ########################################################################### package inexport; use strict; use File::Basename; use util; use GDBMUtils; sub src_db_file { my ($archivedir) = @_; return &util::filename_cat ($archivedir, "archiveinf-src.gdb"); } sub doc_db_file { my ($archivedir) = @_; return &util::filename_cat ($archivedir, "archiveinf-doc.gdb"); } sub oid_count_file { my ($archivedir) = @_; return &util::filename_cat ($archivedir, "OIDcount"); } sub prime_doc_oid_count { my ($archivedir) = @_; my $oid_count_filename = &oid_count_file($archivedir); if (-e $oid_count_filename) { if (open(OIDIN,"<$oid_count_filename")) { my $OIDcount = ; chomp $OIDcount; close(OIDIN); $doc::OIDcount = $OIDcount; } else { print STDERR "Warning: unable to read document OID count from $oid_count_filename\n"; print STDERR "Setting value to 0\n"; } } } sub store_doc_oid_count { # Use the file "OIDcount" in the archives directory to record # what value doc.pm got up to my ($archivedir) = @_; my $oid_count_filename = &oid_count_file($archivedir); if (open(OIDOUT,">$oid_count_filename")) { print OIDOUT $doc::OIDcount, "\n"; close(OIDOUT); } else { print STDERR "Warning: unable to store document OID count\n"; } } sub new_vs_old_import_diff { my ($archive_info,$block_hash,$importdir,$archivedir,$verbosity,$incremental_mode) = @_; # in this method, we want to know if metadata files are modified or not. my $arcinfo_doc_filename = &doc_db_file($archivedir); my $archiveinf_timestamp = -M $arcinfo_doc_filename; # First convert all files to absolute form # This is to support the situation where the import folder is not # the default my $prev_all_files = $archive_info->{'prev_import_filelist'}; my $full_prev_all_files = {}; foreach my $prev_file (keys %$prev_all_files) { if (!&util::filename_is_absolute($prev_file)) { my $full_prev_file = &util::filename_cat($ENV{'GSDLCOLLECTDIR'},$prev_file); $full_prev_all_files->{$full_prev_file} = $prev_file; } else { $full_prev_all_files->{$prev_file} = $prev_file; } } # Figure out which are the new files, existing files and so # by implication the files from the previous import that are not # there any more => mark them for deletion foreach my $curr_file (keys %{$block_hash->{'all_files'}}) { my $full_curr_file = $curr_file; # entry in 'all_files' is moved to either 'existing_files', # 'deleted_files', 'new_files', or 'new_or_modified_metadata_files' if (!&util::filename_is_absolute($curr_file)) { # add in import dir to make absolute $full_curr_file = &util::filename_cat($importdir,$curr_file); } # figure out if new file or not if (defined $full_prev_all_files->{$full_curr_file}) { # delete it so that only files that need deleting are left delete $full_prev_all_files->{$full_curr_file}; # had it before. is it a metadata file? if ($block_hash->{'metadata_files'}->{$full_curr_file}) { # is it modified?? if (-M $full_curr_file < $archiveinf_timestamp) { print STDERR "*** Detected a modified metadata file: $full_curr_file\n" if $verbosity > 2; # its newer than last build $block_hash->{'new_or_modified_metadata_files'}->{$full_curr_file} = 1; } } else { if ($incremental_mode eq "all") { # had it before $block_hash->{'existing_files'}->{$full_curr_file} = 1; } else { # Warning in "onlyadd" mode, but had it before! print STDERR "Warning: File $full_curr_file previously imported.\n"; print STDERR " Treating as new file\n"; $block_hash->{'new_files'}->{$full_curr_file} = 1; } } } else { if ($block_hash->{'metadata_files'}->{$full_curr_file}) { # the new file is the special sort of file greenstone uses # to attach metadata to src documents # i.e metadata.xml # (but note, the filename used is not constrained in # Greenstone to always be this) print STDERR "***** Detected new metadata file: $full_curr_file\n" if $verbosity > 2; $block_hash->{'new_or_modified_metadata_files'}->{$full_curr_file} = 1; } else { $block_hash->{'new_files'}->{$full_curr_file} = 1; } } delete $block_hash->{'all_files'}->{$curr_file}; } # Deal with complication of new or modified metadata files by forcing # everything from this point down in the file hierarchy to # be freshly imported. # # This may mean files that have not changed are reindexed, but does # guarantee by the end of processing all new metadata is correctly # associated with the relevant document(s). foreach my $new_mdf (keys %{$block_hash->{'new_or_modified_metadata_files'}}) { my ($fileroot,$situated_dir,$ext) = fileparse($new_mdf, "\\.[^\\.]+\$"); $situated_dir =~ s/[\\\/]+$//; # remove tailing slashes $situated_dir =~ s/\\/\\\\/g; # need to protect windows slash \ in regular expression # Go through existing_files, and mark anything that is contained # within 'situated_dir' to be reindexed (in case some of the metadata # attaches to one of these files) my $reindex_files = []; foreach my $existing_f (keys %{$block_hash->{'existing_files'}}) { if ($existing_f =~ m/^$situated_dir/) { push(@$reindex_files,$existing_f); $block_hash->{'reindex_files'}->{$existing_f} = 1; delete $block_hash->{'existing_files'}->{$existing_f}; } } # metadata file needs to be in new_files list so parsed by MetadataXMLPlug # (or equivalent) $block_hash->{'new_files'}->{$new_mdf} = 1; } # go through remaining existing files and work out what has changed and needs to be reindexed. my @existing_files = sort keys %{$block_hash->{'existing_files'}}; my $reindex_files = []; foreach my $existing_filename (@existing_files) { if (-M $existing_filename < $archiveinf_timestamp) { # file is newer than last build my $existing_file = $existing_filename; #my $collectdir = &util::filename_cat($ENV{'GSDLCOLLECTDIR'}); #my $collectdir_resafe = &util::filename_to_regex($collectdir); #$existing_file =~ s/^$collectdir_resafe(\\|\/)?//; print STDERR "**** Reindexing existing file: $existing_file\n"; push(@$reindex_files,$existing_file); $block_hash->{'reindex_files'}->{$existing_filename} = 1; } } # By this point full_prev_all_files contains the files # mentioned in archiveinf-src.db but are not in the 'import' # folder (or whatever was specified through -importdir ...) # This list can contain files that were created in the 'tmp' or # 'cache' areas (such as screen-size and thumbnail images). # # In building the final list of files to delete, we test to see if # it exists on the filesystem and if it does (unusual for a "normal" # file in import, but possible in the case of 'tmp' files), # supress it from going into the final list my $collectdir = $ENV{'GSDLCOLLECTDIR'}; my @deleted_files = values %$full_prev_all_files; map { my $curr_file = $_; my $full_curr_file = $curr_file; if (!&util::filename_is_absolute($curr_file)) { # add in import dir to make absolute $full_curr_file = &util::filename_cat($collectdir,$curr_file); } if (!-e $full_curr_file) { $block_hash->{'deleted_files'}->{$curr_file} = 1; } } @deleted_files; } # this is used to delete "deleted" docs, and to remove old versions of "changed" docs # $mode is 'delete' or 'reindex' sub mark_docs_for_deletion { my ($archive_info,$block_hash,$deleted_files,$archivedir,$verbosity,$mode) = @_; my $mode_text = "deleted from index"; if ($mode eq "reindex") { $mode_text = "reindexed"; } my $arcinfo_doc_filename = &doc_db_file($archivedir); my $arcinfo_src_filename = &src_db_file($archivedir); # record files marked for deletion in arcinfo foreach my $file (@$deleted_files) { # use 'archiveinf-src' GDBM file to look up all the OIDs # that this file is used in (note in most cases, it's just one OID) my $src_rec = GDBMUtils::gdbmRecordToHash($arcinfo_src_filename,$file); my $oids = $src_rec->{'oid'}; my $file_record_deleted = 0; # delete the src record GDBMUtils::gdbmDatabaseRemove($arcinfo_src_filename,$file); foreach my $oid (@$oids) { # find the source doc (the primary file that becomes this oid) my $doc_rec = GDBMUtils::gdbmRecordToHash($arcinfo_doc_filename,$oid); my $doc_source_file = $doc_rec->{'src-file'}->[0]; if (!&util::filename_is_absolute($doc_source_file)) { $doc_source_file = &util::filename_cat($ENV{'GSDLCOLLECTDIR'},$doc_source_file); } if ($doc_source_file ne $file) { # its an associated or metadata file # mark source doc for reimport as one of its assoc files has changed or deleted $block_hash->{'reindex_files'}->{$doc_source_file} = 1; } my $curr_status = $archive_info->get_status_info($oid); if (defined($curr_status) && (($curr_status ne "D"))) { if ($verbosity>1) { print STDERR "$oid ($doc_source_file) marked to be $mode_text on next buildcol.pl\n"; } # mark oid for deletion (it will be deleted or reimported) $archive_info->set_status_info($oid,"D"); my $val = &dbutil::read_infodb_entry("gdbm", $arcinfo_doc_filename, $oid); $val =~ s/^(.*)$/D/m; &GDBMUtils::gdbmDatabaseSet($arcinfo_doc_filename,$oid,$val); } } } } 1;