source: other-projects/hathitrust/wcsa/extracted-features-solr/trunk/solr-ingest/src/main/java/org/hathitrust/extractedfeatures/PerVolumeJSON.java@ 31450

Last change on this file since 31450 was 31450, checked in by davidb, 7 years ago

Some debugging output to help see what is happening with langmap_directory under YARN

  • Property svn:executable set to *
File size: 7.9 KB
Line 
1package org.hathitrust.extractedfeatures;
2
3import java.io.IOException;
4import java.util.ArrayList;
5import java.util.HashMap;
6import java.util.Iterator;
7
8import org.apache.hadoop.io.Text;
9import org.apache.spark.api.java.function.FlatMapFunction;
10import org.apache.spark.api.java.function.Function;
11import org.apache.spark.api.java.function.VoidFunction;
12import org.apache.spark.util.DoubleAccumulator;
13import org.json.JSONArray;
14import org.json.JSONObject;
15
16/*
17class PagedJSON implements Function<String, Boolean> {
18
19 private static final long serialVersionUID = 1L;
20
21 public Boolean call(String s) { return s.contains("a"); }
22}
23 */
24
25
26//public class PerVolumeJSON implements VoidFunction<String>
27public class PerVolumeJSON implements Function<Text,Integer>
28{
29 private static final long serialVersionUID = 1L;
30 protected String _input_dir;
31 protected String _whitelist_filename;
32 protected String _langmap_directory;
33
34 protected String _solr_url;
35 protected String _output_dir;
36
37 protected int _verbosity;
38
39 protected WhitelistBloomFilter _whitelist_bloomfilter;
40 protected UniversalPOSLangMap _universal_langmap;
41
42 boolean _icu_tokenize;
43 boolean _strict_file_io;
44
45 public PerVolumeJSON(String input_dir, String whitelist_filename, String langmap_directory,
46 String solr_url, String output_dir, int verbosity,
47 boolean icu_tokenize, boolean strict_file_io)
48 {
49 System.out.println("*** PerVolumeJSON Constructor, langmap_directory = " + langmap_directory);
50
51 _input_dir = input_dir;
52 _whitelist_filename = whitelist_filename;
53 _langmap_directory = langmap_directory;
54
55 _solr_url = solr_url;
56 _output_dir = output_dir;
57 _verbosity = verbosity;
58
59 _icu_tokenize = icu_tokenize;
60 _strict_file_io = strict_file_io;
61
62 _whitelist_bloomfilter = null;
63 _universal_langmap = null;
64 }
65
66
67 public Integer call(Text json_text) throws IOException
68
69 {
70 if ((_whitelist_filename != null) && (_whitelist_bloomfilter == null)) {
71 _whitelist_bloomfilter = new WhitelistBloomFilter(_whitelist_filename,true);
72 }
73
74 if ((_langmap_directory != null) && (_universal_langmap == null)) {
75 _universal_langmap = new UniversalPOSLangMap(_langmap_directory);
76 }
77 int ef_num_pages = 0;
78
79 try {
80
81
82 JSONObject extracted_feature_record = new JSONObject(json_text.toString());
83
84 if (extracted_feature_record != null) {
85 String volume_id = extracted_feature_record.getString("id");
86
87 //JSONObject ef_metadata = extracted_feature_record.getJSONObject("metadata");
88 //String title= ef_metadata.getString("title");
89
90 JSONObject ef_features = extracted_feature_record.getJSONObject("features");
91
92 int ef_page_count = ef_features.getInt("pageCount");
93
94 if (_verbosity >= 1) {
95 System.out.println("Processing: " + volume_id);
96 System.out.println(" pageCount = " + ef_page_count);
97 }
98
99 JSONArray ef_pages = ef_features.getJSONArray("pages");
100 ef_num_pages = ef_pages.length();
101
102
103 for (int i = 0; i < ef_page_count; i++) {
104 String formatted_i = String.format("page-%06d", i);
105 String page_id = volume_id + "." + formatted_i;
106
107 if (_verbosity >= 2) {
108 System.out.println(" Page: " + page_id);
109 }
110
111
112 JSONObject ef_page = ef_pages.getJSONObject(i);
113
114 if (ef_page != null) {
115 // Convert to Solr add form
116 JSONObject solr_add_doc_json
117 = SolrDocJSON.generateSolrDocJSON(volume_id, page_id, ef_page, _whitelist_bloomfilter, _universal_langmap, _icu_tokenize);
118
119
120 if ((_verbosity >=2) && (i==20)) {
121 System.out.println("==================");
122 System.out.println("Sample output Solr add JSON [page 20]: " + solr_add_doc_json.toString());
123 System.out.println("==================");
124 }
125
126
127 if (_solr_url != null) {
128 if ((_verbosity >=2) && (i==20)) {
129 System.out.println("==================");
130 System.out.println("Posting to: " + _solr_url);
131 System.out.println("==================");
132 }
133 SolrDocJSON.postSolrDoc(_solr_url, solr_add_doc_json);
134 }
135
136
137 }
138 else {
139 System.err.println("Skipping: " + page_id);
140 }
141
142 }
143 }
144 }
145 catch (Exception e) {
146 if (_strict_file_io) {
147 throw e;
148 }
149 else {
150 e.printStackTrace();
151 }
152 }
153
154 return ef_num_pages;
155
156 }
157
158 /*
159 //public void call(String json_file_in) throws IOException
160 public Integer call(String json_file_in) throws IOException
161
162 {
163 if ((_whitelist_filename != null) && (_whitelist_bloomfilter == null)) {
164 _whitelist_bloomfilter = new WhitelistBloomFilter(_whitelist_filename,true);
165 }
166
167 int ef_num_pages = 0;
168
169 ArrayList<String> ids = new ArrayList<String>(); // want it to be non-null so can return valid iterator
170
171 String full_json_file_in = _input_dir + "/" + json_file_in;
172 JSONObject extracted_feature_record = JSONClusterFileIO.readJSONFile(full_json_file_in);
173
174 if (extracted_feature_record != null) {
175 String volume_id = extracted_feature_record.getString("id");
176
177 //JSONObject ef_metadata = extracted_feature_record.getJSONObject("metadata");
178 //String title= ef_metadata.getString("title");
179
180 JSONObject ef_features = extracted_feature_record.getJSONObject("features");
181
182 int ef_page_count = ef_features.getInt("pageCount");
183
184 if (_verbosity >= 1) {
185 System.out.println("Processing: " + json_file_in);
186 System.out.println(" pageCount = " + ef_page_count);
187 }
188
189 JSONArray ef_pages = ef_features.getJSONArray("pages");
190 ef_num_pages = ef_pages.length();
191
192 // Make directory for page-level JSON output
193 String json_dir = ClusterFileIO.removeSuffix(json_file_in,".json.bz2");
194 String page_json_dir = json_dir + "/pages";
195
196 if (_output_dir != null) {
197 ClusterFileIO.createDirectoryAll(_output_dir + "/" + page_json_dir);
198 }
199
200 ids = new ArrayList<String>(ef_num_pages);
201 for (int i = 0; i < ef_page_count; i++) {
202 String formatted_i = String.format("page-%06d", i);
203 String page_id = volume_id + "." + formatted_i;
204
205 if (_verbosity >= 2) {
206 System.out.println(" Page: " + page_id);
207 }
208
209 String output_json_bz2 = page_json_dir +"/" + formatted_i + ".json.bz2";
210 ids.add(page_id);
211
212 if (_verbosity >=2) {
213 if (i==0) {
214 System.out.println("Sample output JSON page file [i=0]: " + output_json_bz2);
215 }
216 }
217 JSONObject ef_page = ef_pages.getJSONObject(i);
218
219 if (ef_page != null) {
220 // Convert to Solr add form
221 JSONObject solr_add_doc_json
222 = SolrDocJSON.generateSolrDocJSON(volume_id, page_id, ef_page, _whitelist_bloomfilter, _icu_tokenize);
223
224
225 if ((_verbosity >=2) && (i==20)) {
226 System.out.println("==================");
227 System.out.println("Sample output Solr add JSON [page 20]: " + solr_add_doc_json.toString());
228 System.out.println("==================");
229 }
230
231
232 if (_solr_url != null) {
233 if ((_verbosity >=2) && (i==20)) {
234 System.out.println("==================");
235 System.out.println("Posting to: " + _solr_url);
236 System.out.println("==================");
237 }
238 SolrDocJSON.postSolrDoc(_solr_url, solr_add_doc_json);
239 }
240
241 if (_output_dir != null) {
242 if ((_verbosity >=2) && (i==20)) {
243 System.out.println("==================");
244 System.out.println("Saving to: " + _output_dir);
245 System.out.println("==================");
246 }
247 SolrDocJSON.saveSolrDoc(solr_add_doc_json, _output_dir + "/" + output_json_bz2);
248 }
249 }
250 else {
251 System.err.println("Skipping: " + page_id);
252 }
253
254 }
255 }
256 else {
257 // File did not exist, or could not be parsed
258 String mess = "Failed to read in bzipped JSON file '" + full_json_file_in + "'";
259 if (_strict_file_io) {
260 throw new IOException(mess);
261 }
262 else {
263 System.err.println("Warning: " + mess);
264 System.out.println("Warning: " + mess);
265 }
266 }
267
268 return ef_num_pages;
269
270 }
271 */
272}
273
Note: See TracBrowser for help on using the repository browser.