########################################################################### # # GreenstoneSQLPlugin.pm -- reads into doc_obj from SQL db and docsql.xml # Metadata and/or fulltext are stored in SQL db, the rest may be stored in # the docsql .xml files. # A component of the Greenstone digital library software # from the New Zealand Digital Library Project at the # University of Waikato, New Zealand. # # Copyright (C) 2001 New Zealand Digital Library Project # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. # ########################################################################### package GreenstoneSQLPlugin; use strict; no strict 'refs'; # allow filehandles to be variables and viceversa use DBI; use docprint; # for new unescape_text() subroutine use GreenstoneXMLPlugin; use gssql; # TODO: # - Run TODOs here, in Plugout and in gssql.pm by Dr Bainbridge. # - "Courier" demo documents in lucene-sql collection: character (degree symbol) not preserved in title. Is this because we encode in utf8 when putting into db and reading back in? # Test doc with meta and text like macron in Maori text. # - Have not yet tested writing out just meta or just fulltxt to sql db and reading just that # back in from the sql db while the remainder is to be read back in from the docsql .xml files. # TODO: deal with incremental vs removeold. If docs removed from import folder, then import step # won't delete it from archives but buildcol step will. Need to implement this with this database plugin or wherever the actual flow is # TODO: Add public instructions on using this plugin and its plugout: start with installing mysql binary, changing pwd, running the server (and the client against it for checking, basic cmds like create and drop). Then discuss db name, table names (per coll), db cols and col types, and how the plugout and plugin work. # Discuss the plugin/plugout parameters. # + NOTTODO: when db is not running GLI is paralyzed -> can we set timeout on DBI connection attempt? # NOT A PROBLEM: Tested to find DBI connection attempt fails immediately when MySQL server not # running. The GLI "paralyzing" incident last time was not because of the gs sql connection code, # but because my computer was freezing on-and-off. # TODO Q: is "reindex" = del from db + add to db? # - is this okay for reindexing, or will it need to modify existing values (update table) # - if it's okay, what does reindex need to accomplish (and how) if the OID changes because hash id produced is different? # - delete is accomplished in GS SQL Plugin, during buildcol.pl. When should reindexing take place? # during SQL plugout/import.pl or during plugin? If adding is done by GSSQLPlugout, does it need to # be reimplemented in GSSQLPlugin to support the adding portion of reindexing. # TODO Q: During import, the GS SQL Plugin is called before the GS SQL Plugout with undesirable side # effect that if the db doesn't exist, gssql::use_db() fails, as it won't create db. # + TODO: Incremental delete can't work until GSSQLPlugout has implemented build_mode = incremental # (instead of tossing away db on every build) # + Ask about docsql naming convention adopted to identify OID. Better way? # collection names -> table names: it seems hyphens not allowed. Changed to underscores. # + Startup parameters (except removeold/build_mode) # + how do we detect we're to do removeold during plugout in import.pl phase # + incremental building: where do we need to add code to delete rows from our sql table after # incrementally importing a coll with fewer docs (for instance)? What about deleted/modified meta? # + Ask if I can assume that all SQL dbs (not just MySQL) will preserve the order of inserted nodes # (sections) which in this case had made it easy to reconstruct the doc_obj in memory in the correct order. # YES: Otherwise for later db types (drivers), can set order by primary key column and then order by did column ######################################################################################## # GreenstoneSQLPlugin inherits from GreenstoneXMLPlugin so that it if meta or fulltext # is still written out to doc.xml (docsql .xml), that will be processed as usual, # whereas GreenstoneSQLPlugin will process all the rest (full text and/or meta, whichever # is written out by GreenstoneSQLPlugout into the SQL db). sub BEGIN { @GreenstoneSQLPlugin::ISA = ('GreenstoneXMLPlugin'); } # This plugin must be in the document plugins pipeline IN PLACE OF GreenstoneXMLPlugin # So we won't have a process exp conflict here. # The structure of docsql.xml files is identical to doc.xml and the contents are similar except: # - since metadata and/or fulltxt are stored in mysql db instead, just XML comments indicating # this are left inside docsql.xml within the (for meta) and/or (for txt) # - the root element Archive now has a docoid attribute: sub get_default_process_exp { my $self = shift (@_); return q^(?i)docsql(-\d+)?\.xml$^; # regex based on this method in GreenstoneXMLPlugin #return q^(?i)docsql(-.+)?\.xml$^; # no longer storing the OID embedded in docsql .xml filename } my $process_mode_list = [ { 'name' => "meta_only", 'desc' => "{GreenstoneSQLPlug.process_mode.meta_only}" }, { 'name' => "text_only", 'desc' => "{GreenstoneSQLPlug.process_mode.text_only}" }, { 'name' => "all", 'desc' => "{GreenstoneSQLPlug.process_mode.all}" } ]; my $arguments = [ { 'name' => "process_exp", 'desc' => "{BaseImporter.process_exp}", 'type' => "regexp", 'deft' => &get_default_process_exp(), 'reqd' => "no" }, { 'name' => "process_mode", 'desc' => "{GreenstoneSQLPlug.process_mode}", 'type' => "enum", 'list' => $process_mode_list, 'deft' => "all", 'reqd' => "no"}, { 'name' => "db_driver", 'desc' => "{GreenstoneSQLPlug.db_driver}", 'type' => "string", 'deft' => "mysql", 'reqd' => "yes"}, { 'name' => "db_client_user", 'desc' => "{GreenstoneSQLPlug.db_client_user}", 'type' => "string", 'deft' => "root", 'reqd' => "yes"}, { 'name' => "db_client_pwd", 'desc' => "{GreenstoneSQLPlug.db_client_pwd}", 'type' => "string", 'deft' => "", 'reqd' => "yes"}, # pwd required? { 'name' => "db_host", 'desc' => "{GreenstoneSQLPlug.db_host}", 'type' => "string", 'deft' => "127.0.0.1", 'reqd' => "yes"}, ]; my $options = { 'name' => "GreenstoneSQLPlugin", 'desc' => "{GreenstoneSQLPlugin.desc}", 'abstract' => "no", 'inherits' => "yes", 'args' => $arguments }; # TODO: For on cancel, add a SIGTERM handler or so to call end() # or to explicitly call gs_sql->close_connection if $gs_sql def sub new { my ($class) = shift (@_); my ($pluginlist,$inputargs,$hashArgOptLists) = @_; push(@$pluginlist, $class); push(@{$hashArgOptLists->{"ArgList"}},@{$arguments}); push(@{$hashArgOptLists->{"OptList"}},$options); my $self = new GreenstoneXMLPlugin($pluginlist, $inputargs, $hashArgOptLists); #return bless $self, $class; $self = bless $self, $class; if ($self->{'info_only'}) { # If running pluginfo, we don't need to go further. return $self; } # do anything else that needs to be done here when not pluginfo #$self->{'delete_docids'} = (); # list of doc oids to delete during deinit() return $self; } sub xml_start_tag { my $self = shift(@_); my ($expat, $element) = @_; my $outhandle = $self->{'outhandle'}; $self->{'element'} = $element; if ($element eq "Archive") { # docsql.xml files contain a OID attribute on Archive element # the element's attributes are in %_ as per ReadXMLFile::xml_start_tag() (while $_ # contains the tag) # Don't access %_{'docoid'} directly: keep getting a warning message to # use $_{'docoid'} for scalar contexts, but %_ is the element's attr hashmap # whereas $_ has the tag info. So we don't want to do $_{'docoid'}. my %attr_hash = %_; # right way, see OAIPlugin.pm $self->{'doc_oid'} = $attr_hash{'docoid'}; print STDERR "XXXXXXXXXXXXXX in SQLPlugin::xml_start_tag()\n"; print $outhandle "Extracted OID from docsql.xml: ".$self->{'doc_oid'}."\n" if $self->{'verbosity'} > 2; } else { # let superclass GreenstoneXMLPlugin continue to process
and elements $self->SUPER::xml_start_tag(@_); } } # TODO Q: Why are there 4 passes when we're only indexing at doc and section level (2 passes)? What's the dummy pass, why is there a pass for infodb? # At the end of superclass GreenstoneXMLPlugin.pm's close_document() method, # the doc_obj in memory is processed (indexed) and then made undef. # So we have to work with doc_obj before superclass close_document() is finished. sub close_document { my $self = shift(@_); print STDERR "XXXXXXXXX in SQLPlugin::close_doc()\n"; my $gs_sql = $self->get_gssql_instance(); my $outhandle = $self->{'outhandle'}; my $doc_obj = $self->{'doc_obj'}; my $build_proc_mode = $self->{'processor'}->get_mode(); # can be "text" as per basebuildproc or infodb my $oid = $self->{'doc_oid'}; # we stored current doc's OID during sub xml_start_tag() my $proc_mode = $self->{'process_mode'}; print $outhandle "++++ OID of document (meta|text) to be del or read in from DB: ".$self->{'doc_oid'}."\n" if $self->{'verbosity'} > 2; # For now, we have access to doc_obj (until just before super::close_document() terminates) # no need to call $self->{'doc_obj'}->set_OID($oid); # because either the OID is stored in the SQL db as meta 'Identifier' alongside other metadata # or it's stored in the doc.xml as metadata 'Identifier' alongside other metadata # Either way, Identifier meta will be read into the docobj automatically with other meta. if ($self->{'verbosity'} > 2) { print STDERR "+++++++++++ buildproc_mode: $build_proc_mode\n"; print STDERR "+++++++++++ SQLPlug proc_mode: $proc_mode\n"; } # TODO: where does reindexing take place, GreenstoneSQL -Plugout or -Plugin? #if($build_proc_mode =~ m/(delete|reindex)$/) { # doc denoted by current OID has been marked for deletion or reindexing (=delete + add) if($build_proc_mode =~ m/(delete)$/) { # doc denoted by current OID has been marked for deletion or reindexing (=delete + add) # build_proc_mode could be "(infodb|text)(delete|reindex)" # "...delete" or "...reindex" as per ArchivesInfPlugin print STDERR "@@@@ DELETING DOC FROM SQL DB\n"; if($proc_mode eq "all" || $proc_mode eq "meta_only") { print STDERR "@@@@@@@@ Deleting $oid from meta table\n" if $self->{'verbosity'} > 2; $gs_sql->delete_recs_from_metatable_with_docid($oid); } if($proc_mode eq "all" || $proc_mode eq "text_only") { print STDERR "@@@@@@@@ Deleting $oid from fulltxt table\n" if $self->{'verbosity'} > 2; $gs_sql->delete_recs_from_texttable_with_docid($oid); } # If we're reindexing the current doc, we will we want to continue: which # will add this doc ID back into the db with the new meta/full txt values # But if we're deleting, then we're done processing the document, so set doc_oid to undef # to prevent adding it back into db #undef $self->{'doc_oid'} if($build_proc_mode =~ m/delete$/); } # done deleting doc from SQL db else {#if($self->{'doc_oid'}) { # if loading doc from SQL db print STDERR "@@@@ LOADING DOC FROM SQL DB\n"; if($proc_mode eq "all" || $proc_mode eq "meta_only") { # read in meta for the collection (i.e. select * from _metadata table my $sth = $gs_sql->select_from_metatable_matching_docid($oid); print $outhandle "### SQL select stmt: ".$sth->{'Statement'}."\n" if $self->{'verbosity'} > 2; print $outhandle "----------SQL DB contains meta-----------\n" if $self->{'verbosity'} > 2; # https://www.effectiveperlprogramming.com/2010/07/set-custom-dbi-error-handlers/ while( my @row = $sth->fetchrow_array() ) { #print $outhandle "row: @row\n"; my ($primary_key, $did, $sid, $metaname, $metaval) = @row; # get rid of the artificial "root" introduced in section id when saving to sql db $sid =~ s@^root@@; $sid = $doc_obj->get_top_section() unless $sid; print $outhandle "### did: $did, sid: |$sid|, meta: $metaname, val: $metaval\n" if $self->{'verbosity'} > 2; # TODO: we accessed the db in utf8 mode, so, we can call doc_obj->add_utf8_meta directly: $doc_obj->add_utf8_metadata($sid, $metaname, &docprint::unescape_text($metaval)); } print $outhandle "----------FIN READING DOC's META FROM SQL DB------------\n" if $self->{'verbosity'} > 2; } if($proc_mode eq "all" || $proc_mode eq "text_only") { # read in fulltxt for the collection (i.e. select * from _fulltxt table my $fulltxt_table = $gs_sql->get_fulltext_table_name(); my $sth = $gs_sql->select_from_texttable_matching_docid($oid); print $outhandle "### stmt: ".$sth->{'Statement'}."\n" if $self->{'verbosity'} > 2; print $outhandle "----------\nSQL DB contains txt entries for-----------\n" if $self->{'verbosity'} > 2; while( my ($primary_key, $did, $sid, $text) = $sth->fetchrow_array() ) { # get rid of the artificial "root" introduced in section id when saving to sql db #$sid =~ s@^root@@; $sid = $doc_obj->get_top_section() if ($sid eq "root"); print $outhandle "### did: $did, sid: |$sid|, fulltext: \n" if $self->{'verbosity'} > 2; # TODO - pass by ref? # TODO: we accessed the db in utf8 mode, so, we can call doc_obj->add_utf8_text directly: $doc_obj->add_utf8_text($sid, &docprint::unescape_text($text)); } print $outhandle "----------FIN READING DOC's TXT FROM SQL DB------------\n" if $self->{'verbosity'} > 2; } } # done reading into docobj from SQL db # don't forget to clean up on close() in superclass # It will get the doc_obj indexed then make it undef $self->SUPER::close_document(@_); } # We want SQLPlugin to connect to db only during buildcol.pl phase, not during import.pl # This works out okay, as close_document() (called by read()) is only invoked during buildcol.pl # # Further, we want a single db connection for the GS SQL Plugin to be used for # the multiple plugin passes: for "dummy" pass, and for doc level and for section level indexing # By calling the lazy loading get_sql_instance() from close_document(), # we connect to the SQL database once per GSSQLPlugin and only during the buildcol phase. # # get_gssql_instance() is a lazy loading method that returns singleton db connection for a GreenstoneSQLPlugin object. ("Code pattern" get instance vs singleton.) # One instance of db connection that can be used for all the many doc_objects processed by this plugin # # Except in methods get_gssql_instance() and deinit(), don't access self->{'_gs_sql'} directly. # Instead, call method get_gssql_instance() and store return value in a local variable, my $gs_sql # sub get_gssql_instance { my $self = shift(@_); # if we failed to successfully connect once before, don't bother attempting to connect again #return undef if(defined $self->{'failed'}); # plugin/process would have terminated with die() # if we couldn't succeed connecting on any connection attempt return $self->{'_gs_sql'} if($self->{'_gs_sql'}); # assume we'll fail to connect $self->{'failed'} = 1; print STDERR "@@@@@@@@@@ LAZY CONNECT CALLED\n"; #################### # print "@@@ SITE NAME: ". $self->{'site_name'} . "\n" if defined $self->{'site_name'}; # print "@@@ COLL NAME: ". $ENV{'GSDLCOLLECTION'} . "\n"; # print STDERR "@@@@ db_pwd: " . $self->{'db_client_pwd'} . "\n"; # print STDERR "@@@@ user: " . $self->{'db_client_user'} . "\n"; # print STDERR "@@@@ db_host: " . $self->{'db_host'} . "\n"; # print STDERR "@@@@ db_driver: " . $self->{'db_driver'} . "\n"; #################### # create gssql object. # collection name will be used for naming tables (site name will be used for naming database) my $gs_sql = new gssql({ 'collection_name' => $ENV{'GSDLCOLLECTION'}, 'verbosity' => $self->{'verbosity'} }); # try connecting to the mysql db, if that fails it will die if(!$gs_sql->connect_to_db({ 'db_driver' => $self->{'db_driver'}, 'db_client_user' => $self->{'db_client_user'}, 'db_client_pwd' => $self->{'db_client_pwd'}, 'db_host' => $self->{'db_host'} }) ) { # This is fatal for the plugout, let's terminate here # PrintError would already have displayed the warning message on connection fail die("Could not connect to db. Can't proceed.\n"); } my $db_name = $self->{'site_name'} || "greenstone2"; # one database per GS3 site, for GS2 the db is called greenstone2 #my $build_mode = $self->{'build_mode'} || "removeold"; # the db and its tables should exist. Attempt to use the db: if(!$gs_sql->use_db($db_name)) { # This is fatal for the plugout, let's terminate here after disconnecting again # PrintError would already have displayed the warning message on load fail $gs_sql->disconnect_from_db() || warn("Unable to disconnect from database.\n"); die("Could not use db $db_name. Can't proceed.\n"); } #undef $self->{'failed'}; # store db handle now that we're connected $self->{'_gs_sql'} = $gs_sql; return $gs_sql; } # This method also runs on import.pl if gs_sql has a value. But we just want to run it on buildcol # Call deinit() not end() because there can be multiple plugin passes: # one for doc level and another for section level indexing # and deinit() should be called before all passes # This way, we can close the SQL database once per buildcol run. sub deinit { my ($self) = shift (@_); print STDERR "@@@@@@@@@@ GreenstoneSQLPlugin::DEINIT CALLED\n"; if($self->{'_gs_sql'}) { # only want to work with sql db if buildcol.pl, gs_sql won't have # a value except during buildcol, so when processor =~ m/buildproc$/. $self->{'_gs_sql'}->disconnect_from_db() || warn("Unable to disconnect from database " . $self->{'site_name'} . "\n"); # explicitly set to undef so all future use has to make the connection again undef $self->{'_gs_sql'}; } $self->SUPER::deinit(@_); }