[24446] | 1 | ###########################################################################
|
---|
| 2 | #
|
---|
| 3 | # solrbuilder.pm -- perl wrapper for building index with Solr
|
---|
| 4 | # A component of the Greenstone digital library software
|
---|
| 5 | # from the New Zealand Digital Library Project at the
|
---|
| 6 | # University of Waikato, New Zealand.
|
---|
| 7 | #
|
---|
| 8 | # Copyright (C) 1999 New Zealand Digital Library Project
|
---|
| 9 | #
|
---|
| 10 | # This program is free software; you can redistribute it and/or modify
|
---|
| 11 | # it under the terms of the GNU General Public License as published by
|
---|
| 12 | # the Free Software Foundation; either version 2 of the License, or
|
---|
| 13 | # (at your option) any later version.
|
---|
| 14 | #
|
---|
| 15 | # This program is distributed in the hope that it will be useful,
|
---|
| 16 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 17 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 18 | # GNU General Public License for more details.
|
---|
| 19 | #
|
---|
| 20 | # You should have received a copy of the GNU General Public License
|
---|
| 21 | # along with this program; if not, write to the Free Software
|
---|
| 22 | # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
|
---|
| 23 | #
|
---|
| 24 | ###########################################################################
|
---|
| 25 |
|
---|
| 26 |
|
---|
| 27 | package solrbuilder;
|
---|
| 28 |
|
---|
| 29 | use strict;
|
---|
| 30 | no strict 'refs';
|
---|
| 31 |
|
---|
| 32 | use lucenebuilder;
|
---|
[24453] | 33 | use solrserver;
|
---|
[24446] | 34 |
|
---|
| 35 | sub BEGIN {
|
---|
| 36 | @solrbuilder::ISA = ('lucenebuilder');
|
---|
| 37 | }
|
---|
| 38 |
|
---|
| 39 |
|
---|
| 40 | sub new {
|
---|
| 41 | my $class = shift(@_);
|
---|
| 42 | my $self = new lucenebuilder (@_);
|
---|
| 43 | $self = bless $self, $class;
|
---|
| 44 |
|
---|
| 45 | $self->{'buildtype'} = "solr";
|
---|
| 46 |
|
---|
| 47 | my $solr_passes_script = "solr_passes.pl";
|
---|
| 48 |
|
---|
| 49 | $self->{'solr_passes'} = "$solr_passes_script";
|
---|
| 50 | # Tack perl on the beginning to ensure execution
|
---|
[29338] | 51 | $self->{'solr_passes_exe'} = "\"".&util::get_perl_exec()."\" -S \"$solr_passes_script\"";
|
---|
[24446] | 52 | return $self;
|
---|
| 53 | }
|
---|
| 54 |
|
---|
| 55 |
|
---|
| 56 | sub default_buildproc {
|
---|
| 57 | my $self = shift (@_);
|
---|
| 58 |
|
---|
| 59 | return "solrbuildproc";
|
---|
| 60 | }
|
---|
| 61 |
|
---|
| 62 | # This writes a nice version of the text docs
|
---|
| 63 | #
|
---|
| 64 | # Essentially the same as the lucenebuilder.pm version, only using solr_passes
|
---|
| 65 | # => refactor and make better use of inheritence
|
---|
| 66 | #
|
---|
| 67 | sub compress_text
|
---|
| 68 | {
|
---|
| 69 | my $self = shift (@_);
|
---|
| 70 | # do nothing if we don't want compressed text
|
---|
| 71 | return if $self->{'no_text'};
|
---|
| 72 |
|
---|
| 73 | my ($textindex) = @_;
|
---|
| 74 |
|
---|
| 75 | # workaround to avoid hard-coding "solr" check into buildcol.pl
|
---|
| 76 | $textindex =~ s/^section://;
|
---|
| 77 |
|
---|
| 78 | my $outhandle = $self->{'outhandle'};
|
---|
| 79 |
|
---|
| 80 | # the text directory
|
---|
[27781] | 81 | my $text_dir = &FileUtils::filenameConcatenate($self->{'build_dir'}, "text");
|
---|
| 82 | my $build_dir = &FileUtils::filenameConcatenate($self->{'build_dir'},"");
|
---|
| 83 | &FileUtils::makeAllDirectories($text_dir);
|
---|
[24446] | 84 |
|
---|
| 85 | my $osextra = "";
|
---|
| 86 | if ($ENV{'GSDLOS'} =~ /^windows$/i)
|
---|
| 87 | {
|
---|
| 88 | $text_dir =~ s@/@\\@g;
|
---|
| 89 | }
|
---|
| 90 | else
|
---|
| 91 | {
|
---|
| 92 | if ($outhandle ne "STDERR")
|
---|
| 93 | {
|
---|
| 94 | # so solr_passes doesn't print to stderr if we redirect output
|
---|
| 95 | $osextra .= " 2>/dev/null";
|
---|
| 96 | }
|
---|
| 97 | }
|
---|
| 98 |
|
---|
| 99 | # Find the perl script to call to run solr
|
---|
| 100 | my $solr_passes = $self->{'solr_passes'};
|
---|
| 101 | my $solr_passes_exe = $self->{'solr_passes_exe'};
|
---|
| 102 |
|
---|
| 103 | my $solr_passes_sections = "Doc";
|
---|
| 104 |
|
---|
| 105 | my ($handle);
|
---|
| 106 |
|
---|
| 107 | if ($self->{'debug'})
|
---|
| 108 | {
|
---|
| 109 | $handle = *STDOUT;
|
---|
| 110 | }
|
---|
| 111 | else
|
---|
| 112 | {
|
---|
[24501] | 113 | my $site = $self->{'site'};
|
---|
| 114 | my $collect = $self->{'collection'};
|
---|
| 115 | my $core_prefix = (defined $site) ? "$site-$collect" : $collect;
|
---|
| 116 | my $core = $core_prefix; # unused in this call to solr_passes
|
---|
[24446] | 117 |
|
---|
[25889] | 118 | $core = "building-".$core unless $self->{'incremental'}; # core points to building only for force_removeold
|
---|
| 119 |
|
---|
[24446] | 120 | print STDERR "Executable: $solr_passes_exe\n";
|
---|
| 121 | print STDERR "Sections: $solr_passes_sections\n";
|
---|
| 122 | print STDERR "Build Dir: $build_dir\n";
|
---|
[24501] | 123 | print STDERR "Cmd: $solr_passes_exe $core text \"$build_dir\" \"dummy\" $osextra\n";
|
---|
| 124 | if (!open($handle, "| $solr_passes_exe $core text \"$build_dir\" \"dummy\" $osextra"))
|
---|
[24446] | 125 | {
|
---|
| 126 | print STDERR "<FatalError name='NoRunSolrPasses'/>\n</Stage>\n" if $self->{'gli'};
|
---|
| 127 | die "solrbuilder::build_index - couldn't run $solr_passes_exe\n$!\n";
|
---|
| 128 | }
|
---|
| 129 | }
|
---|
| 130 |
|
---|
| 131 | # stored text is always Doc and Sec levels
|
---|
| 132 | my $levels = { 'document' => 1, 'section' => 1 };
|
---|
| 133 | # always do database at section level
|
---|
| 134 | my $db_level = "section";
|
---|
| 135 |
|
---|
| 136 | # set up the document processr
|
---|
| 137 | $self->{'buildproc'}->set_output_handle ($handle);
|
---|
| 138 | $self->{'buildproc'}->set_mode ('text');
|
---|
| 139 | $self->{'buildproc'}->set_index ($textindex);
|
---|
| 140 | $self->{'buildproc'}->set_indexing_text (0);
|
---|
| 141 | #$self->{'buildproc'}->set_indexfieldmap ($self->{'indexfieldmap'});
|
---|
| 142 | $self->{'buildproc'}->set_levels ($levels);
|
---|
| 143 | $self->{'buildproc'}->set_db_level ($db_level);
|
---|
| 144 | $self->{'buildproc'}->reset();
|
---|
| 145 |
|
---|
| 146 | &plugin::begin($self->{'pluginfo'}, $self->{'source_dir'},
|
---|
| 147 | $self->{'buildproc'}, $self->{'maxdocs'});
|
---|
| 148 | &plugin::read ($self->{'pluginfo'}, $self->{'source_dir'},
|
---|
| 149 | "", {}, {}, $self->{'buildproc'}, $self->{'maxdocs'}, 0, $self->{'gli'});
|
---|
| 150 | &plugin::end($self->{'pluginfo'});
|
---|
| 151 |
|
---|
| 152 | close ($handle) unless $self->{'debug'};
|
---|
| 153 | $self->print_stats();
|
---|
| 154 |
|
---|
| 155 | print STDERR "</Stage>\n" if $self->{'gli'};
|
---|
| 156 | }
|
---|
| 157 |
|
---|
| 158 | #----
|
---|
| 159 |
|
---|
| 160 |
|
---|
| 161 |
|
---|
| 162 | sub filter_in_out_file
|
---|
| 163 | {
|
---|
| 164 | my ($in_filename,$out_filename,$replace_rules) = @_;
|
---|
| 165 |
|
---|
| 166 | if (open(SIN,"<$in_filename")) {
|
---|
| 167 |
|
---|
| 168 | if (open(SOUT,">$out_filename")) {
|
---|
| 169 |
|
---|
| 170 | my $line;
|
---|
| 171 | while (defined ($line=<SIN>)) {
|
---|
| 172 | chomp $line;
|
---|
| 173 |
|
---|
| 174 | my $done_insert = 0;
|
---|
| 175 | foreach my $rule (@$replace_rules) {
|
---|
| 176 | my $line_re = $rule->{'regexp'};
|
---|
| 177 | my $insert = $rule->{'insert'};
|
---|
| 178 |
|
---|
| 179 | if ($line =~ m/$line_re/) {
|
---|
| 180 | print SOUT $insert;
|
---|
| 181 | $done_insert = 1;
|
---|
| 182 | last;
|
---|
| 183 | }
|
---|
| 184 | }
|
---|
| 185 | if (!$done_insert) {
|
---|
| 186 | print SOUT "$line\n";;
|
---|
| 187 | }
|
---|
| 188 | }
|
---|
| 189 |
|
---|
| 190 | close(SOUT);
|
---|
| 191 | }
|
---|
| 192 | else {
|
---|
| 193 | print STDERR "Error: Failed to open $out_filename\n";
|
---|
| 194 | print STDERR " $!\n";
|
---|
| 195 | }
|
---|
| 196 |
|
---|
| 197 | close(SIN);
|
---|
| 198 | }
|
---|
| 199 | else {
|
---|
| 200 | print STDERR "Error: Failed to open $in_filename\n";
|
---|
| 201 | print STDERR " $!\n";
|
---|
| 202 | }
|
---|
| 203 |
|
---|
| 204 | }
|
---|
| 205 |
|
---|
[27781] | 206 | # We need to push the list of indexfield to shortname mappings through to the
|
---|
| 207 | # build_cfg as, unlike in MGPP, we need these mappings in advance to configure
|
---|
| 208 | # Lucene/Solr. Unfortunately the original function found in mgbuilder.pm makes
|
---|
| 209 | # a mess of this - it only output fields that have been processed (none have)
|
---|
| 210 | # and it has a hardcoded renaming for 'text' so it becomes 'TX' according to
|
---|
| 211 | # the schema but 'TE' according to XML sent to lucene_passes.pl/solr_passes.pl
|
---|
| 212 | # This version is dumber - just copy them all across verbatum - but works. We
|
---|
| 213 | # do still need to support the special case of 'allfields'
|
---|
| 214 | sub make_final_field_list
|
---|
| 215 | {
|
---|
| 216 | my $self = shift (@_);
|
---|
| 217 | $self->{'build_cfg'} = {};
|
---|
| 218 | my @indexfieldmap = ();
|
---|
| 219 | my @indexfields = ();
|
---|
| 220 |
|
---|
| 221 | # @todo support: $self->{'buildproc'}->{'extraindexfields'}
|
---|
| 222 | foreach my $fields (@{$self->{'collect_cfg'}->{'indexes'}})
|
---|
| 223 | {
|
---|
| 224 | # remove subcoll stuff
|
---|
| 225 | $fields =~ s/:.*$//;
|
---|
| 226 | foreach my $field (split(';', $fields))
|
---|
| 227 | {
|
---|
| 228 | my $shortname = 'ERROR';
|
---|
| 229 | if ($field eq 'allfields')
|
---|
| 230 | {
|
---|
| 231 | $shortname = 'ZZ';
|
---|
| 232 | }
|
---|
| 233 | elsif (defined $self->{'buildproc'}->{'indexfieldmap'}->{$field})
|
---|
| 234 | {
|
---|
| 235 | $shortname = $self->{'buildproc'}->{'indexfieldmap'}->{$field};
|
---|
| 236 | }
|
---|
| 237 | else
|
---|
| 238 | {
|
---|
| 239 | print STDERR 'Error! Couldn\'t find indexfieldmap for field: ' . $field . "\n";
|
---|
| 240 | }
|
---|
| 241 | push (@indexfieldmap, $field . '->' . $shortname);
|
---|
| 242 | push (@indexfields, $field);
|
---|
| 243 | }
|
---|
| 244 | }
|
---|
| 245 |
|
---|
| 246 | if (scalar @indexfieldmap)
|
---|
| 247 | {
|
---|
| 248 | $self->{'build_cfg'}->{'indexfieldmap'} = \@indexfieldmap;
|
---|
| 249 | }
|
---|
| 250 |
|
---|
| 251 | if (scalar @indexfields)
|
---|
| 252 | {
|
---|
| 253 | $self->{'build_cfg'}->{'indexfields'} = \@indexfields;
|
---|
| 254 | }
|
---|
| 255 | }
|
---|
| 256 |
|
---|
[24446] | 257 | # Generate solr schema.xml file based on indexmapfield and other associated
|
---|
| 258 | # config files
|
---|
| 259 | #
|
---|
| 260 | # Unlike make_auxiliary_files(), this needs to be done up-front (rather
|
---|
| 261 | # than at the end) so the data-types in schema.xml are correctly set up
|
---|
| 262 | # prior to document content being pumped through solr_passes.pl
|
---|
| 263 |
|
---|
| 264 |
|
---|
| 265 | sub premake_solr_auxiliary_files
|
---|
| 266 | {
|
---|
| 267 | my $self = shift (@_);
|
---|
[27781] | 268 |
|
---|
[24446] | 269 | # Replace the following marker:
|
---|
| 270 | #
|
---|
| 271 | # <!-- ##GREENSTONE-FIELDS## -->
|
---|
| 272 | #
|
---|
| 273 | # with lines of the form:
|
---|
| 274 | #
|
---|
| 275 | # <field name="<field>" type="string" ... />
|
---|
| 276 | #
|
---|
| 277 | # for each <field> in 'indexfieldmap'
|
---|
[27781] | 278 |
|
---|
[24446] | 279 | my $schema_insert_xml = "";
|
---|
| 280 |
|
---|
| 281 | foreach my $ifm (@{$self->{'build_cfg'}->{'indexfieldmap'}}) {
|
---|
| 282 |
|
---|
[29176] | 283 | my ($fullfieldname, $field) = ($ifm =~ m/^(.*)->(.*)$/);
|
---|
[24446] | 284 |
|
---|
[25846] | 285 | $schema_insert_xml .= " "; # indent
|
---|
| 286 | $schema_insert_xml .= "<field name=\"$field\" ";
|
---|
[24446] | 287 |
|
---|
[25846] | 288 | if($field eq "LA" || $field eq "LO")
|
---|
| 289 | {
|
---|
| 290 | $schema_insert_xml .= "type=\"location\" ";
|
---|
| 291 | }
|
---|
[27802] | 292 | # elsif ($field ne "ZZ" && $field ne "TX")
|
---|
| 293 | # {
|
---|
| 294 | # $schema_insert_xml .= "type=\"string\" ";
|
---|
| 295 | # }
|
---|
[25846] | 296 | else
|
---|
| 297 | {
|
---|
[29176] | 298 | #$schema_insert_xml .= "type=\"text_en_splitting\" ";
|
---|
| 299 |
|
---|
| 300 | # original default solr field type for all fields is text_en_splitting
|
---|
| 301 | my $solrfieldtype = "text_en_splitting";
|
---|
| 302 | if(defined $self->{'collect_cfg'}->{'indexfieldoptions'}->{$fullfieldname}->{'solrfieldtype'}) {
|
---|
| 303 | $solrfieldtype = $self->{'collect_cfg'}->{'indexfieldoptions'}->{$fullfieldname}->{'solrfieldtype'};
|
---|
| 304 | #print STDERR "@@@@#### found TYPE: $solrfieldtype\n";
|
---|
| 305 | }
|
---|
| 306 | $schema_insert_xml .= "type=\"$solrfieldtype\" ";
|
---|
| 307 |
|
---|
[25846] | 308 | }
|
---|
[29170] | 309 | # set termVectors=\"true\" when term vectors info is required,
|
---|
| 310 | # see TermsResponse termResponse = solrResponse.getTermsResponse();
|
---|
[31858] | 311 | $schema_insert_xml .= "indexed=\"true\" stored=\"true\" multiValued=\"true\" termVectors=\"true\" termPositions=\"true\" termOffsets=\"true\" />\n";
|
---|
[24446] | 312 | }
|
---|
| 313 |
|
---|
| 314 | # just the one rule to date
|
---|
| 315 | my $insert_rules
|
---|
| 316 | = [ { 'regexp' => "^\\s*<!--\\s*##GREENSTONE-FIELDS##\\s*-->\\s*\$",
|
---|
| 317 | 'insert' => $schema_insert_xml } ];
|
---|
| 318 |
|
---|
| 319 | my $solr_home = $ENV{'GEXT_SOLR'};
|
---|
[27781] | 320 | ## my $in_dirname = &FileUtils::filenameConcatenate($solr_home,"etc","conf");
|
---|
| 321 | my $in_dirname = &FileUtils::filenameConcatenate($solr_home,"conf");
|
---|
| 322 | my $schema_in_filename = &FileUtils::filenameConcatenate($in_dirname,"schema.xml.in");
|
---|
[24446] | 323 |
|
---|
| 324 | my $collect_home = $ENV{'GSDLCOLLECTDIR'};
|
---|
[27781] | 325 | my $out_dirname = &FileUtils::filenameConcatenate($collect_home,"etc","conf");
|
---|
| 326 | my $schema_out_filename = &FileUtils::filenameConcatenate($out_dirname,"schema.xml");
|
---|
[24446] | 327 |
|
---|
| 328 | # make sure output conf directory exists
|
---|
[29142] | 329 | if (!&FileUtils::directoryExists($out_dirname)) {
|
---|
[27781] | 330 | &FileUtils::makeDirectory($out_dirname);
|
---|
[24446] | 331 | }
|
---|
| 332 |
|
---|
| 333 | filter_in_out_file($schema_in_filename,$schema_out_filename,$insert_rules);
|
---|
| 334 |
|
---|
| 335 | # now do the same for solrconfig.xml, stopwords, ...
|
---|
| 336 | # these are simpler, as they currently do not need any filtering
|
---|
| 337 |
|
---|
| 338 | my @in_file_list = ( "solrconfig.xml", "stopwords.txt", "stopwords_en.txt",
|
---|
[29142] | 339 | "synonyms.txt", "protwords.txt", "currency.xml", "elevate.xml" );
|
---|
[24497] | 340 |
|
---|
[24446] | 341 | foreach my $file ( @in_file_list ) {
|
---|
[27781] | 342 | my $in_filename = &FileUtils::filenameConcatenate($in_dirname,$file.".in");
|
---|
| 343 | my $out_filename = &FileUtils::filenameConcatenate($out_dirname,$file);
|
---|
[29142] | 344 |
|
---|
| 345 | if(&FileUtils::fileExists($in_filename)) {
|
---|
| 346 | filter_in_out_file($in_filename,$out_filename,[]);
|
---|
| 347 | }
|
---|
[24446] | 348 | }
|
---|
[29142] | 349 |
|
---|
| 350 | my @in_dir_list = ( "lang" );
|
---|
| 351 | foreach my $dir ( @in_dir_list ) {
|
---|
| 352 |
|
---|
| 353 | my $full_subdir_name = &FileUtils::filenameConcatenate($in_dirname,$dir);
|
---|
| 354 |
|
---|
| 355 | if(&FileUtils::directoryExists($full_subdir_name)) {
|
---|
| 356 | &FileUtils::copyFilesRecursiveNoSVN($full_subdir_name, $out_dirname);
|
---|
| 357 | }
|
---|
| 358 | }
|
---|
[24446] | 359 | }
|
---|
| 360 |
|
---|
| 361 |
|
---|
| 362 | sub pre_build_indexes
|
---|
| 363 | {
|
---|
| 364 | my $self = shift (@_);
|
---|
| 365 | my ($indexname) = @_;
|
---|
| 366 | my $outhandle = $self->{'outhandle'};
|
---|
| 367 |
|
---|
[24453] | 368 | # If the Solr/Jetty server is not already running, the following starts
|
---|
| 369 | # it up, and only returns when the server is "reading and listening"
|
---|
| 370 |
|
---|
[24501] | 371 | my $solr_server = new solrserver($self->{'build_dir'});
|
---|
[24453] | 372 | $solr_server->start();
|
---|
| 373 | $self->{'solr_server'} = $solr_server;
|
---|
[24446] | 374 |
|
---|
| 375 | my $indexes = [];
|
---|
| 376 | if (defined $indexname && $indexname =~ /\w/) {
|
---|
| 377 | push @$indexes, $indexname;
|
---|
| 378 | } else {
|
---|
| 379 | $indexes = $self->{'collect_cfg'}->{'indexes'};
|
---|
| 380 | }
|
---|
| 381 |
|
---|
| 382 | # skip para-level check, as this is done in the main 'build_indexes'
|
---|
| 383 | # routine
|
---|
| 384 |
|
---|
| 385 | my $all_metadata_specified = 0; # has the user added a 'metadata' index?
|
---|
| 386 | my $allfields_index = 0; # do we have an allfields index?
|
---|
| 387 |
|
---|
[27781] | 388 | # Using a hashmap here would avoid duplications, but while more space
|
---|
[24446] | 389 | # efficient, it's not entirely clear it would be more computationally
|
---|
| 390 | # efficient
|
---|
| 391 | my @all_fields = ();
|
---|
| 392 |
|
---|
| 393 | foreach my $index (@$indexes) {
|
---|
| 394 | if ($self->want_built($index)) {
|
---|
| 395 |
|
---|
| 396 | # get the parameters for the output
|
---|
| 397 | # split on : just in case there is subcoll and lang stuff
|
---|
| 398 | my ($fields) = split (/:/, $index);
|
---|
| 399 |
|
---|
| 400 | foreach my $field (split (/;/, $fields)) {
|
---|
| 401 | if ($field eq "metadata") {
|
---|
| 402 | $all_metadata_specified = 1;
|
---|
| 403 | }
|
---|
| 404 | else {
|
---|
| 405 | push(@all_fields,$field);
|
---|
| 406 | }
|
---|
| 407 | }
|
---|
| 408 | }
|
---|
| 409 | }
|
---|
| 410 |
|
---|
| 411 | if ($all_metadata_specified) {
|
---|
| 412 |
|
---|
| 413 | # (Unforunately) we need to process all the documents in the collection
|
---|
| 414 | # to figure out what the metadata_field_mapping is
|
---|
| 415 |
|
---|
| 416 | # set up the document processr
|
---|
| 417 | $self->{'buildproc'}->set_output_handle (undef);
|
---|
| 418 | $self->{'buildproc'}->set_mode ('index_field_mapping');
|
---|
| 419 | $self->{'buildproc'}->reset();
|
---|
| 420 |
|
---|
| 421 | &plugin::begin($self->{'pluginfo'}, $self->{'source_dir'},
|
---|
| 422 | $self->{'buildproc'}, $self->{'maxdocs'});
|
---|
| 423 | &plugin::read ($self->{'pluginfo'}, $self->{'source_dir'},
|
---|
| 424 | "", {}, {}, $self->{'buildproc'}, $self->{'maxdocs'}, 0, $self->{'gli'});
|
---|
| 425 | &plugin::end($self->{'pluginfo'});
|
---|
| 426 |
|
---|
| 427 | }
|
---|
| 428 |
|
---|
| 429 | else {
|
---|
| 430 | # Field mapping solely dependent of entries in 'indexes'
|
---|
| 431 |
|
---|
| 432 | # No need to explicitly handle "allfields" as create_shortname()
|
---|
| 433 | # will get a fix on it through it's static_indexfield_map
|
---|
| 434 |
|
---|
| 435 | my $buildproc = $self->{'buildproc'};
|
---|
| 436 |
|
---|
[27781] | 437 | foreach my $field (@all_fields)
|
---|
| 438 | {
|
---|
| 439 | if (!defined $buildproc->{'indexfieldmap'}->{$field})
|
---|
| 440 | {
|
---|
| 441 | my $shortname = '';
|
---|
| 442 | if (defined $buildproc->{'fieldnamemap'}->{$field})
|
---|
| 443 | {
|
---|
| 444 | $shortname = $buildproc->{'fieldnamemap'}->{$field};
|
---|
| 445 | }
|
---|
| 446 | else
|
---|
| 447 | {
|
---|
| 448 | $shortname = $buildproc->create_shortname($field);
|
---|
| 449 | }
|
---|
| 450 | $buildproc->{'indexfieldmap'}->{$field} = $shortname;
|
---|
| 451 | $buildproc->{'indexfieldmap'}->{$shortname} = 1;
|
---|
| 452 | }
|
---|
| 453 | }
|
---|
[24446] | 454 | }
|
---|
| 455 |
|
---|
[24453] | 456 | # Write out solr 'schema.xml' (and related) file
|
---|
| 457 | #
|
---|
[24446] | 458 | $self->make_final_field_list();
|
---|
| 459 | $self->premake_solr_auxiliary_files();
|
---|
| 460 |
|
---|
[24453] | 461 | # Now update the solr-core information in solr.xml
|
---|
| 462 | # => at most two cores <colname>-Doc and <colname>-Sec
|
---|
[24446] | 463 |
|
---|
[24501] | 464 | my $site = $self->{'site'};
|
---|
| 465 | my $collect = $self->{'collection'};
|
---|
| 466 | my $core_prefix = (defined $site) ? "$site-$collect" : $collect;
|
---|
[24453] | 467 |
|
---|
[24456] | 468 | # my $idx = $self->{'index_mapping'}->{$index};
|
---|
| 469 | my $idx = "idx";
|
---|
| 470 |
|
---|
[24501] | 471 | my $build_dir = $self->{'build_dir'};
|
---|
[24497] | 472 |
|
---|
[24453] | 473 | foreach my $level (keys %{$self->{'levels'}}) {
|
---|
[24456] | 474 |
|
---|
| 475 | my ($pindex) = $level =~ /^(.)/;
|
---|
[24483] | 476 |
|
---|
[24501] | 477 | my $index_dir = $pindex.$idx;
|
---|
| 478 | my $core = "$core_prefix-$index_dir";
|
---|
[24497] | 479 |
|
---|
[24643] | 480 | # force_removeold == opposite of being run in 'incremental' mode
|
---|
[24501] | 481 | my $force_removeold = ($self->{'incremental'}) ? 0 : 1;
|
---|
[24643] | 482 |
|
---|
[24501] | 483 | if ($force_removeold) {
|
---|
| 484 | print $outhandle "\n-removeold set (new index will be created)\n";
|
---|
| 485 |
|
---|
[25889] | 486 | # create cores under temporary core names, corresponding to building directory
|
---|
| 487 | $core = "building-".$core;
|
---|
| 488 |
|
---|
[27781] | 489 | my $full_index_dir = &FileUtils::filenameConcatenate($build_dir,$index_dir);
|
---|
| 490 | &FileUtils::removeFilesRecursive($full_index_dir);
|
---|
| 491 | &FileUtils::makeDirectory($full_index_dir);
|
---|
[24643] | 492 |
|
---|
[29142] | 493 | my $full_tlog_dir = &FileUtils::filenameConcatenate($full_index_dir, "tlog");
|
---|
| 494 | &FileUtils::makeDirectory($full_tlog_dir);
|
---|
| 495 |
|
---|
[24643] | 496 | # Solr then wants an "index" folder within this general index area!
|
---|
[27781] | 497 | # my $full_index_index_dir = &FileUtils::filenameConcatenate($full_index_dir,"index");
|
---|
| 498 | # &FileUtils::makeDirectory($full_index_index_dir);
|
---|
[24643] | 499 |
|
---|
| 500 |
|
---|
| 501 | # now go on and create new index
|
---|
| 502 | print $outhandle "Creating Solr core: $core\n";
|
---|
| 503 | $solr_server->admin_create_core($core);
|
---|
| 504 |
|
---|
[24501] | 505 | }
|
---|
[24643] | 506 | else {
|
---|
| 507 | # if collect==core already in solr.xml (check with STATUS)
|
---|
| 508 | # => use RELOAD call to refresh fields now expressed in schema.xml
|
---|
| 509 | #
|
---|
| 510 | # else
|
---|
| 511 | # => use CREATE API to add to solr.xml
|
---|
[24483] | 512 |
|
---|
[24643] | 513 | my $check_core_exists = $solr_server->admin_ping_core($core);
|
---|
[24483] | 514 |
|
---|
[24643] | 515 | if ($check_core_exists) {
|
---|
[32179] | 516 | print $outhandle "Unloading Solr core: $core\n";
|
---|
| 517 | $solr_server->admin_unload_core($core);
|
---|
[24643] | 518 | }
|
---|
[32179] | 519 |
|
---|
[24643] | 520 | print $outhandle "Creating Solr core: $core\n";
|
---|
| 521 | $solr_server->admin_create_core($core);
|
---|
[32179] | 522 |
|
---|
[24456] | 523 | }
|
---|
[24453] | 524 | }
|
---|
| 525 |
|
---|
[24446] | 526 | }
|
---|
| 527 |
|
---|
| 528 | # Essentially the same as the lucenebuilder.pm version, only using solr_passes
|
---|
| 529 | # => refactor and make better use of inheritence
|
---|
| 530 |
|
---|
| 531 | sub build_index {
|
---|
| 532 | my $self = shift (@_);
|
---|
| 533 | my ($index,$llevel) = @_;
|
---|
| 534 | my $outhandle = $self->{'outhandle'};
|
---|
| 535 | my $build_dir = $self->{'build_dir'};
|
---|
| 536 |
|
---|
| 537 | # get the full index directory path and make sure it exists
|
---|
| 538 | my $indexdir = $self->{'index_mapping'}->{$index};
|
---|
[27781] | 539 | &FileUtils::makeAllDirectories(&FileUtils::filenameConcatenate($build_dir, $indexdir));
|
---|
[24446] | 540 |
|
---|
| 541 | # Find the perl script to call to run solr
|
---|
| 542 | my $solr_passes = $self->{'solr_passes'};
|
---|
| 543 | my $solr_passes_exe = $self->{'solr_passes_exe'};
|
---|
| 544 |
|
---|
| 545 | # define the section names for solrpasses
|
---|
| 546 | # define the section names and possibly the doc name for solrpasses
|
---|
| 547 | my $solr_passes_sections = $llevel;
|
---|
| 548 |
|
---|
| 549 | my $osextra = "";
|
---|
| 550 | if ($ENV{'GSDLOS'} =~ /^windows$/i) {
|
---|
| 551 | $build_dir =~ s@/@\\@g;
|
---|
| 552 | } else {
|
---|
| 553 | if ($outhandle ne "STDERR") {
|
---|
| 554 | # so solr_passes doesn't print to stderr if we redirect output
|
---|
| 555 | $osextra .= " 2>/dev/null";
|
---|
| 556 | }
|
---|
| 557 | }
|
---|
| 558 |
|
---|
| 559 | # get the index expression if this index belongs
|
---|
| 560 | # to a subcollection
|
---|
| 561 | my $indexexparr = [];
|
---|
| 562 | my $langarr = [];
|
---|
| 563 |
|
---|
| 564 | # there may be subcollection info, and language info.
|
---|
| 565 | my ($fields, $subcollection, $language) = split (":", $index);
|
---|
| 566 | my @subcollections = ();
|
---|
| 567 | @subcollections = split /,/, $subcollection if (defined $subcollection);
|
---|
| 568 |
|
---|
| 569 | foreach $subcollection (@subcollections) {
|
---|
| 570 | if (defined ($self->{'collect_cfg'}->{'subcollection'}->{$subcollection})) {
|
---|
| 571 | push (@$indexexparr, $self->{'collect_cfg'}->{'subcollection'}->{$subcollection});
|
---|
| 572 | }
|
---|
| 573 | }
|
---|
| 574 |
|
---|
| 575 | # add expressions for languages if this index belongs to
|
---|
| 576 | # a language subcollection - only put languages expressions for the
|
---|
| 577 | # ones we want in the index
|
---|
| 578 | my @languages = ();
|
---|
| 579 | my $languagemetadata = "Language";
|
---|
| 580 | if (defined ($self->{'collect_cfg'}->{'languagemetadata'})) {
|
---|
| 581 | $languagemetadata = $self->{'collect_cfg'}->{'languagemetadata'};
|
---|
| 582 | }
|
---|
| 583 | @languages = split /,/, $language if (defined $language);
|
---|
| 584 | foreach my $language (@languages) {
|
---|
| 585 | my $not=0;
|
---|
| 586 | if ($language =~ s/^\!//) {
|
---|
| 587 | $not = 1;
|
---|
| 588 | }
|
---|
| 589 | if($not) {
|
---|
| 590 | push (@$langarr, "!$language");
|
---|
| 591 | } else {
|
---|
| 592 | push (@$langarr, "$language");
|
---|
| 593 | }
|
---|
| 594 | }
|
---|
| 595 |
|
---|
| 596 | # Build index dictionary. Uses verbatim stem method
|
---|
| 597 | print $outhandle "\n creating index dictionary (solr_passes -I1)\n" if ($self->{'verbosity'} >= 1);
|
---|
| 598 | print STDERR "<Phase name='CreatingIndexDic'/>\n" if $self->{'gli'};
|
---|
| 599 | my ($handle);
|
---|
| 600 |
|
---|
| 601 | if ($self->{'debug'}) {
|
---|
| 602 | $handle = *STDOUT;
|
---|
| 603 | } else {
|
---|
[24501] | 604 | my $site = $self->{'site'};
|
---|
| 605 | my $collect = $self->{'collection'};
|
---|
| 606 | my $core_prefix = (defined $site) ? "$site-$collect" : $collect;
|
---|
| 607 | my $ds_idx = $self->{'index_mapping'}->{$index};
|
---|
| 608 | my $core = "$core_prefix-$ds_idx";
|
---|
[24446] | 609 |
|
---|
[25889] | 610 | $core = "building-".$core unless $self->{'incremental'}; # core points to building only for force_removeold
|
---|
| 611 |
|
---|
[24501] | 612 | print STDERR "Cmd: $solr_passes_exe $core index \"$build_dir\" \"$indexdir\" $osextra\n";
|
---|
| 613 | if (!open($handle, "| $solr_passes_exe $core index \"$build_dir\" \"$indexdir\" $osextra")) {
|
---|
[24446] | 614 | print STDERR "<FatalError name='NoRunSolrPasses'/>\n</Stage>\n" if $self->{'gli'};
|
---|
| 615 | die "solrbuilder::build_index - couldn't run $solr_passes_exe\n!$\n";
|
---|
| 616 | }
|
---|
| 617 | }
|
---|
| 618 |
|
---|
| 619 | my $store_levels = $self->{'levels'};
|
---|
| 620 | my $db_level = "section"; #always
|
---|
| 621 | my $dom_level = "";
|
---|
| 622 | foreach my $key (keys %$store_levels) {
|
---|
| 623 | if ($mgppbuilder::level_map{$key} eq $llevel) {
|
---|
| 624 | $dom_level = $key;
|
---|
| 625 | }
|
---|
| 626 | }
|
---|
| 627 | if ($dom_level eq "") {
|
---|
| 628 | print STDERR "Warning: unrecognized tag level $llevel\n";
|
---|
| 629 | $dom_level = "document";
|
---|
| 630 | }
|
---|
| 631 |
|
---|
| 632 | my $local_levels = { $dom_level => 1 }; # work on one level at a time
|
---|
| 633 |
|
---|
| 634 | # set up the document processr
|
---|
| 635 | $self->{'buildproc'}->set_output_handle ($handle);
|
---|
| 636 | $self->{'buildproc'}->set_mode ('text');
|
---|
| 637 | $self->{'buildproc'}->set_index ($index, $indexexparr);
|
---|
| 638 | $self->{'buildproc'}->set_index_languages ($languagemetadata, $langarr) if (defined $language);
|
---|
| 639 | $self->{'buildproc'}->set_indexing_text (1);
|
---|
| 640 | #$self->{'buildproc'}->set_indexfieldmap ($self->{'indexfieldmap'});
|
---|
| 641 | $self->{'buildproc'}->set_levels ($local_levels);
|
---|
[27802] | 642 | if (defined $self->{'collect_cfg'}->{'sortfields'}) {
|
---|
| 643 | $self->{'buildproc'}->set_sortfields ($self->{'collect_cfg'}->{'sortfields'});
|
---|
| 644 | }
|
---|
[27815] | 645 | if (defined $self->{'collect_cfg'}->{'facetfields'}) {
|
---|
| 646 | $self->{'buildproc'}->set_facetfields ($self->{'collect_cfg'}->{'facetfields'});
|
---|
| 647 | }
|
---|
[24446] | 648 | $self->{'buildproc'}->set_db_level($db_level);
|
---|
| 649 | $self->{'buildproc'}->reset();
|
---|
| 650 |
|
---|
| 651 | print $handle "<update>\n";
|
---|
| 652 |
|
---|
| 653 | &plugin::read ($self->{'pluginfo'}, $self->{'source_dir'},
|
---|
| 654 | "", {}, {}, $self->{'buildproc'}, $self->{'maxdocs'}, 0, $self->{'gli'});
|
---|
[25846] | 655 |
|
---|
[24446] | 656 | print $handle "</update>\n";
|
---|
| 657 |
|
---|
| 658 | close ($handle) unless $self->{'debug'};
|
---|
| 659 |
|
---|
| 660 | $self->print_stats();
|
---|
| 661 |
|
---|
| 662 | $self->{'buildproc'}->set_levels ($store_levels);
|
---|
| 663 | print STDERR "</Stage>\n" if $self->{'gli'};
|
---|
| 664 |
|
---|
| 665 | }
|
---|
| 666 |
|
---|
| 667 |
|
---|
| 668 | sub post_build_indexes {
|
---|
| 669 | my $self = shift(@_);
|
---|
| 670 |
|
---|
| 671 | # deliberately override to prevent the mgpp post_build_index() calling
|
---|
| 672 | # $self->make_final_field_list()
|
---|
| 673 | # as this has been done in our pre_build_indexes() phase for solr
|
---|
[24453] | 674 |
|
---|
| 675 |
|
---|
[29711] | 676 | # Also need to stop the Solr server (be it tomcat or jetty) if it was explicitly started
|
---|
[24453] | 677 | # in pre_build_indexes()
|
---|
[24446] | 678 |
|
---|
[24453] | 679 | my $solr_server = $self->{'solr_server'};
|
---|
| 680 |
|
---|
| 681 | if ($solr_server->explicitly_started()) {
|
---|
| 682 | $solr_server->stop();
|
---|
| 683 | }
|
---|
| 684 |
|
---|
| 685 | $self->{'solr_server'} = undef;
|
---|
| 686 |
|
---|
[24446] | 687 | }
|
---|
| 688 |
|
---|
[27815] | 689 | sub build_cfg_extra {
|
---|
| 690 | my $self = shift (@_);
|
---|
| 691 | my ($build_cfg) = @_;
|
---|
[24446] | 692 |
|
---|
[27815] | 693 | $self->lucenebuilder::build_cfg_extra($build_cfg);
|
---|
| 694 |
|
---|
| 695 | # need to add in facet stuff
|
---|
| 696 | my @facetfields = ();
|
---|
| 697 | my @facetfieldmap = ();
|
---|
| 698 |
|
---|
| 699 | foreach my $sf (@{$self->{'buildproc'}->{'facetfields'}}) {
|
---|
| 700 | if ($sf eq "rank") {
|
---|
| 701 | push(@facetfields, $sf);
|
---|
| 702 | } elsif ($self->{'buildproc'}->{'actualsortfields'}->{$sf}) {
|
---|
| 703 | my $shortname = $self->{'buildproc'}->{'sortfieldnamemap'}->{$sf};
|
---|
| 704 | push(@facetfields, $shortname);
|
---|
| 705 | push (@facetfieldmap, "$sf\-\>$shortname");
|
---|
| 706 | }
|
---|
| 707 |
|
---|
| 708 | }
|
---|
| 709 | $build_cfg->{'indexfacetfields'} = \@facetfields;
|
---|
| 710 | $build_cfg->{'indexfacetfieldmap'} = \@facetfieldmap;
|
---|
| 711 | }
|
---|
[24446] | 712 | 1;
|
---|
| 713 |
|
---|
| 714 |
|
---|