source: gs3-extensions/solr/trunk/src/src/java/org/greenstone/gsdl3/util/SolrQueryWrapper.java@ 29218

Last change on this file since 29218 was 29218, checked in by ak19, 10 years ago

Fix to null pointer exception in recent commit

  • Property svn:executable set to *
File size: 16.6 KB
Line 
1/**********************************************************************
2 *
3 * SolrQueryWrapper.java
4 *
5 * Copyright 2004 The New Zealand Digital Library Project
6 *
7 * A component of the Greenstone digital library software
8 * from the New Zealand Digital Library Project at the
9 * University of Waikato, New Zealand.
10 *
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
15 *
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
24 *
25 *********************************************************************/
26package org.greenstone.gsdl3.util;
27
28import java.lang.reflect.Type;
29import java.net.URLDecoder;
30import java.util.ArrayList;
31import java.util.Collection;
32import java.util.HashMap;
33import java.util.Iterator;
34import java.util.List;
35import java.util.Set;
36import java.util.HashSet;
37
38import org.apache.log4j.Logger;
39import org.apache.solr.client.solrj.SolrQuery; // subclass of ModifiableSolrParams
40import org.apache.solr.client.solrj.SolrServer;
41import org.apache.solr.client.solrj.SolrServerException;
42import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
43import org.apache.solr.client.solrj.response.QueryResponse;
44import org.apache.solr.client.solrj.response.TermsResponse;
45
46import org.apache.solr.core.CoreContainer;
47import org.apache.solr.core.SolrCore;
48
49import org.apache.solr.common.SolrDocument;
50import org.apache.solr.common.SolrDocumentList;
51import org.apache.solr.common.params.ModifiableSolrParams;
52import org.greenstone.LuceneWrapper4.SharedSoleneQuery;
53import org.greenstone.LuceneWrapper4.SharedSoleneQueryResult;
54
55import org.apache.lucene.search.Query; // Query, TermQuery, BooleanQuery, BooleanClause and more
56import org.apache.lucene.index.IndexReader;
57import org.apache.lucene.index.Term;
58import org.apache.solr.search.QParser;
59import org.apache.solr.search.SolrIndexSearcher;
60import org.apache.solr.request.LocalSolrQueryRequest;
61
62import com.google.gson.Gson;
63import com.google.gson.reflect.TypeToken;
64
65public class SolrQueryWrapper extends SharedSoleneQuery
66{
67 public static String SORT_ASCENDING = "asc";
68 public static String SORT_DESCENDING = "desc";
69 public static String SORT_BY_RANK = "score";
70 public static String SORT_BY_INDEX_ORDER = "_docid_";
71
72 static Logger logger = Logger.getLogger(org.greenstone.gsdl3.util.SolrQueryWrapper.class.getName());
73 protected int max_docs = 100;
74 protected String sort_order = SORT_DESCENDING;
75 protected String sort_field = SORT_BY_RANK; // don't want null default for solr
76 protected ArrayList<String> _facets = new ArrayList<String>();
77 protected ArrayList<String> _facetQueries = new ArrayList<String>();
78 SolrServer solr_core = null;
79
80 String collection_core_name_prefix = null;
81
82 public SolrQueryWrapper()
83 {
84 super();
85 start_results = 0;
86 }
87
88 public void setMaxDocs(int max_docs)
89 {
90 this.max_docs = max_docs;
91 }
92
93 public void setSolrCore(SolrServer solr_core)
94 {
95 this.solr_core = solr_core;
96 }
97
98 public void setCollectionCoreNamePrefix(String colCoreNamePrefix) {
99 this.collection_core_name_prefix = colCoreNamePrefix;
100 }
101
102 // make sure its not null.
103 public void setSortField(String sort_field) {
104 if (sort_field != null) {
105 this.sort_field = sort_field;
106 }
107 }
108
109 public void setSortOrder(String order)
110 {
111 this.sort_order = order;
112 }
113 public void addFacet(String facet)
114 {
115 if (!_facets.contains(facet))
116 {
117 _facets.add(facet);
118 }
119 }
120
121 public void clearFacets()
122 {
123 _facets.clear();
124 }
125
126 public void addFacetQuery(String facetQuery)
127 {
128 if (!_facetQueries.contains(facetQuery))
129 {
130 _facetQueries.add(facetQuery);
131 }
132 }
133
134 public void clearFacetQueries()
135 {
136 _facetQueries.clear();
137 }
138
139 public boolean initialise()
140 {
141 if (solr_core == null)
142 {
143 utf8out.println("Solr Core not loaded in ");
144 utf8out.flush();
145 return false;
146 }
147 return true;
148 }
149
150
151 /** Extracts the query terms from the query string. The query string can be a boolean
152 * combination of the various search fields with their search terms or phrases
153 */
154 public Term[] getTerms(SolrQuery solrQuery, String query_string)
155 {
156 Term terms[] = null;
157
158 if(solr_core instanceof EmbeddedSolrServer) {
159 EmbeddedSolrServer solrServer = (EmbeddedSolrServer)solr_core;
160
161 CoreContainer coreContainer = solrServer.getCoreContainer();
162
163 Collection<SolrCore> solrCores = coreContainer.getCores();
164 if(!solrCores.isEmpty()) {
165 Iterator<SolrCore> coreIterator = solrCores.iterator();
166
167 // Just use the first core that matches the collection name, since the term
168 // frequency of any term is the same regardless of whether its didx or sidx core
169 boolean foundCore = false;
170 while(coreIterator.hasNext() && !foundCore) {
171 SolrCore solrCore = coreIterator.next();
172 if(this.collection_core_name_prefix != null) {
173 if(!solrCore.getName().startsWith(this.collection_core_name_prefix)) {
174 //logger.error("### Skipping core not of this collection: " + solrCore.getName());
175 continue;
176 }
177 } else {
178 logger.error("### Collection_core_name_prefix not set. Won't try to find terms");
179 break;
180 }
181
182 //logger.error("### Found core " + solrCore.getName() + " of this collection " + this.collection_core_name_prefix);
183 foundCore = true;
184
185 LocalSolrQueryRequest solrQueryRequest = new LocalSolrQueryRequest(solrCore, solrQuery);
186 Query parsedQuery = null;
187
188 try {
189
190 // get the qparser, default is LuceneQParserPlugin, which is called "lucene" see http://wiki.apache.org/solr/QueryParser
191 QParser qParser = QParser.getParser(query_string, "lucene", solrQueryRequest);
192 parsedQuery = qParser.getQuery();
193
194 // For PrefixQuery or WildCardQuery (a subclass of AutomatonQuery, incl RegexpQ),
195 // like ZZ:econom* and ZZ:*date/regex queries, Query.extractTerms() throws an Exception
196 // because it has not done the Query.rewrite() step yet. So do that manually for them.
197 // This still doesn't provide us with the terms that econom* or *date break down into.
198
199 //if(parsedQuery instanceof PrefixQuery || parsedQuery instanceof AutomatonQuery) {
200 // Should we just check superclass MultiTermQuery?
201 // Can be a BooleanQuery containing PrefixQuery/WildCardQuery among its clauses, so
202 // just test for * in the query_string to determine if we need to do a rewrite() or not
203 if(query_string.contains("*")) {
204 SolrIndexSearcher searcher = solrQueryRequest.getSearcher();
205 IndexReader indexReader = searcher.getIndexReader(); // returns a DirectoryReader
206 parsedQuery = parsedQuery.rewrite(indexReader); // gets rewritten to ConstantScoreQuery
207 }
208
209 //System.err.println("#### Query type was: " + parsedQuery.getClass());
210 //logger.error("#### Query type was: " + parsedQuery.getClass());
211
212 // extract the terms
213 Set<Term> extractedQueryTerms = new HashSet<Term>();
214 parsedQuery.extractTerms(extractedQueryTerms);
215
216 terms = new Term[extractedQueryTerms.size()];
217
218 Iterator<Term> termsIterator = extractedQueryTerms.iterator();
219 for(int i = 0; termsIterator.hasNext(); i++) {
220 Term term = termsIterator.next();
221 ///System.err.println("#### Found query term: " + term);
222 ///logger.error("#### Found query term: " + term);
223
224 terms[i] = term; //(term.field(), term.text());
225 }
226
227 } catch(Exception queryParseException) {
228 queryParseException.printStackTrace();
229 System.err.println("Exception when parsing query: " + queryParseException.getMessage());
230 System.err.println("#### Query type was: " + parsedQuery.getClass());
231 logger.error("#### Query type was: " + parsedQuery.getClass());
232 }
233 }
234
235 } else {
236 System.err.println("#### CoreContainer is empty");
237 logger.error("#### CoreContainer is empty");
238 }
239 } else {
240 System.err.println("#### Not an EmbeddedSolrServer. This shouldn't happen." + solr_core.getClass());
241 logger.error("#### Not an EmbeddedSolrServer. This shouldn't happen." + solr_core.getClass());
242 }
243
244
245 return terms;
246 }
247
248 public SharedSoleneQueryResult runQuery(String query_string)
249 {
250 if (query_string == null || query_string.equals(""))
251 {
252 utf8out.println("The query word is not indicated ");
253 utf8out.flush();
254 return null;
255 }
256
257 SolrQueryResult solr_query_result = new SolrQueryResult();
258 solr_query_result.clear();
259
260 if (_facetQueries.size() > 0)
261 {
262 HashMap<String, ArrayList<String>> grouping = new HashMap<String, ArrayList<String>>();
263 for (String currentQuery : _facetQueries)
264 {
265 //Facet queries are stored in JSON, so we have to decode it
266 Gson gson = new Gson();
267 Type type = new TypeToken<List<String>>()
268 {
269 }.getType();
270 List<String> queryElems = gson.fromJson(currentQuery, type);
271
272 //Group each query segment by the index it uses
273 for (String currentQueryElement : queryElems)
274 {
275 String decodedQueryElement = null;
276 try
277 {
278 decodedQueryElement = URLDecoder.decode(currentQueryElement, "UTF-8");
279 }
280 catch (Exception ex)
281 {
282 continue;
283 }
284
285 int colonIndex = currentQueryElement.indexOf(":");
286 String indexShortName = currentQueryElement.substring(0, colonIndex);
287
288 if (grouping.get(indexShortName) == null)
289 {
290 grouping.put(indexShortName, new ArrayList<String>());
291 }
292 grouping.get(indexShortName).add(decodedQueryElement);
293 }
294 }
295
296 //Construct the facet query string to add to the regular query string
297 StringBuilder facetQueryString = new StringBuilder();
298 int keysetCounter = 0;
299 for (String key : grouping.keySet())
300 {
301 StringBuilder currentFacetString = new StringBuilder("(");
302 int groupCounter = 0;
303 for (String queryElem : grouping.get(key))
304 {
305 currentFacetString.append(queryElem);
306
307 groupCounter++;
308 if (groupCounter < grouping.get(key).size())
309 {
310 currentFacetString.append(" OR ");
311 }
312 }
313 currentFacetString.append(")");
314
315 facetQueryString.append(currentFacetString);
316
317 keysetCounter++;
318 if (keysetCounter < grouping.keySet().size())
319 {
320 facetQueryString.append(" AND ");
321 }
322 }
323
324 if (facetQueryString.length() > 0)
325 {
326 query_string += " AND " + facetQueryString;
327 }
328 }
329
330
331 SolrQuery solrQuery = new SolrQuery(query_string);
332 solrQuery.addSort(this.sort_field, SolrQuery.ORDER.valueOf(this.sort_order)); // sort param, like "score desc" or "byORG asc"
333 solrQuery.setStart(start_results); // which result to start from
334 solrQuery.setRows((end_results - start_results) + 1); // how many results per "page"
335
336 // http://lucene.472066.n3.nabble.com/get-term-frequency-just-only-keywords-search-td4084510.html
337 // WORKS (search didx core):
338 //TI:farming
339 //docOID,score,termfreq(TI,'farming'),totaltermfreq(TI,'farming')
340
341
342 // which fields to return for each document, we'll add the request for totaltermfreq later
343 // fl=docOID score termfreq(TI,'farming') totaltermfreq(TI,'farming')
344 solrQuery.setFields("docOID", "score"); //solrParams.set("fl", "docOID score totaltermfreq(field,'queryterm')");
345
346 //solrQuery.setTerms(true); // turn on the termsComponent
347 //solrQuery.set("terms.fl", "ZZ"); // which field to get the terms from. ModifiableSolrParams method
348
349 // http://wiki.apache.org/solr/TermVectorComponent and https://cwiki.apache.org/confluence/display/solr/The+Term+Vector+Component
350 // http://lucene.472066.n3.nabble.com/get-term-frequency-just-only-keywords-search-td4084510.html
351 // http://stackoverflow.com/questions/13031534/word-frequency-in-solr
352 // http://wiki.apache.org/solr/FunctionQuery#tf and #termfreq and #totaltermfreq
353 // https://wiki.apache.org/solr/TermsComponent
354
355 //solrParams.set("tv.tf", true);// turn on the terms vector Component
356 //solrParams.set("tv.fl", "ZZ");// which field to get the terms from /// ZZ
357
358
359 if (_facets.size() > 0)
360 {
361 // enable facet counts in the query response
362 solrQuery.setFacet(true); //solrParams.set("facet", "true");
363 for (int i = 0; i < _facets.size(); i++)
364 {
365 // add this field as a facet
366 solrQuery.addFacetField(_facets.get(i)); // solrParams.add("facet.field", _facets.get(i));
367 }
368 }
369
370 // get the individual terms that make up the query, then request solr to return the totaltermfreq for each term
371 Term[] terms = getTerms(solrQuery, query_string);
372 if(terms != null) {
373 for(int i = 0; i < terms.length; i++) {
374 Term term = terms[i];
375 String field = term.field();
376 String queryTerm = term.text();
377 // totaltermfreq(TI, 'farming') termfreq(TI, 'farming')
378
379 solrQuery.addField("totaltermfreq(" + field + ",'" + queryTerm + "')");
380 solrQuery.addField("termfreq(" + field + ",'" + queryTerm + "')");
381 }
382 }
383
384 // do the query
385 try
386 {
387 QueryResponse solrResponse = solr_core.query(solrQuery); //solr_core.query(solrParams);
388 SolrDocumentList hits = solrResponse.getResults();
389 //TermsResponse termResponse = solrResponse.getTermsResponse(); // null unless termvectors=true in schema.xml
390
391 if (hits != null)
392 {
393 logger.info("*** hits size = " + hits.size());
394 logger.info("*** num docs found = " + hits.getNumFound());
395
396 logger.info("*** start results = " + start_results);
397 logger.info("*** end results = " + end_results);
398 logger.info("*** max docs = " + max_docs);
399
400 // numDocsFound is the total number of matching docs in the collection
401 // as opposed to the number of documents returned in the hits list
402
403 solr_query_result.setTotalDocs((int) hits.getNumFound());
404
405 solr_query_result.setStartResults(start_results);
406 solr_query_result.setEndResults(start_results + hits.size());
407
408
409 // get the first field we're searching in, this will be the fallback field
410 int sepIndex = query_string.indexOf(":");
411 String defaultField = query_string.substring(0, sepIndex);
412 //String query = query_string.substring(sepIndex + 2, query_string.length() - 1); // Replaced by call to getTerms()
413
414 //solr_query_result.addTerm(query, field, (int) hits.getNumFound(), -1);
415
416 // Output the matching documents
417 for (int i = 0; i < hits.size(); i++)
418 {
419 SolrDocument doc = hits.get(i);
420
421 // Need to think about how to support document term frequency. Make zero for now
422 int doc_term_freq = 0;
423 String docOID = (String) doc.get("docOID");
424 Float score = (Float) doc.get("score");
425
426 logger.info("**** docOID = " + docOID);
427 logger.info("**** score = " + score);
428
429
430 // solr returns each term's totaltermfreq, ttf, at the document level, even though
431 // the ttf is the same for each document. So extract this information just for the first document
432 if(i == 0) { // first document
433
434 if(terms != null) {
435 for(int j = 0; j < terms.length; j++) {
436 Term term = terms[j];
437 String field = term.field();
438 String queryTerm = term.text();
439
440 // totaltermfreq(TI, 'farming') termfreq(TI, 'farming')
441 Long totaltermfreq = (Long)doc.get("totaltermfreq("+field+",'"+queryTerm+"')");
442 Integer termfreq = (Integer)doc.get("termfreq("+field+",'"+queryTerm+"')");
443
444 //System.err.println("**** ttf = " + totaltermfreq);
445 //System.err.println("**** tf = " + termfreq);
446 //logger.info("**** ttf = " + totaltermfreq);
447 //logger.info("**** tf = " + termfreq);
448
449 solr_query_result.addTerm(queryTerm, field, (int) hits.getNumFound(), totaltermfreq.intValue()); // long totaltermfreq to int
450 }
451 } else { // no terms extracted from query_string
452 solr_query_result.addTerm(query_string, defaultField, (int) hits.getNumFound(), -1); // no terms
453 }
454 }
455
456 solr_query_result.addDoc(docOID, score.floatValue(), doc_term_freq); // doc_termfreq for which term????
457 }
458 }
459 else
460 {
461 solr_query_result.setTotalDocs(0);
462
463 solr_query_result.setStartResults(0);
464 solr_query_result.setEndResults(0);
465 }
466
467 solr_query_result.setFacetResults(solrResponse.getFacetFields());
468 }
469 catch (SolrServerException server_exception)
470 {
471 server_exception.printStackTrace();
472 solr_query_result.setError(SolrQueryResult.SERVER_ERROR);
473 }
474
475 return solr_query_result;
476 }
477
478 //Greenstone universe operates with a base of 1 for "start_results"
479 //But Solr operates from 0
480 public void setStartResults(int start_results)
481 {
482 if (start_results < 0)
483 {
484 start_results = 0;
485 }
486 this.start_results = start_results - 1;
487 }
488
489 public void cleanUp()
490 {
491 super.cleanUp();
492 }
493
494}
Note: See TracBrowser for help on using the repository browser.