package org.hathitrust.extractedfeatures; import java.io.IOException; import org.apache.spark.api.java.function.Function; import org.apache.spark.util.DoubleAccumulator; import org.bson.Document; import org.json.JSONObject; import com.mongodb.MongoClient; import com.mongodb.MongoClientURI; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; class PerVolumeMongoDBDocumentsMap implements Function { private static final long serialVersionUID = 1L; protected String _input_dir; protected int _verbosity; protected DoubleAccumulator _progress_accum; protected double _progress_step; boolean _strict_file_io; public PerVolumeMongoDBDocumentsMap(String input_dir, int verbosity, DoubleAccumulator progress_accum, double progress_step, boolean strict_file_io) { _input_dir = input_dir; _verbosity = verbosity; _progress_accum = progress_accum; _progress_step = progress_step; _strict_file_io = strict_file_io; } public Integer call(String json_file_in) throws IOException { try { MongoClientURI mongo_url = new MongoClientURI("mongodb://gc3:27017,gc4:27017,gc5:27017"); MongoClient mongoClient = new MongoClient(mongo_url); MongoDatabase database = mongoClient.getDatabase("htrc_ef"); MongoCollection collection = database.getCollection("volumes"); String full_json_file_in = _input_dir + "/" + json_file_in; System.out.println("Processing: " + full_json_file_in); String extracted_feature_json_doc = ClusterFileIO.readTextFile(full_json_file_in); Document doc = Document.parse(extracted_feature_json_doc); collection.insertOne(doc); /* //Mongo mongo = new Mongo("localhost", 27017); MongoClient mongo = new MongoClient( "localhost" , 27017 ); DB db = mongo.getDB("yourdb"); DBCollection coll = db.getCollection("dummyColl"); // convert JSON to DBObject directly DBObject dbObject = (DBObject) JSON .parse("{'name':'mkyong', 'age':30}"); coll.insert(dbObject); DBCursor cursorDoc = coll.find(); while (cursorDoc.hasNext()) { System.out.println(cursorDoc.next()); } System.out.println("Done"); */ mongoClient.close(); } catch (MongoException e) { e.printStackTrace(); } return 1; } public Integer callPageCount(String json_file_in) throws IOException { Integer page_count = 0; String full_json_file_in = _input_dir + "/" + json_file_in; JSONObject extracted_feature_record = JSONClusterFileIO.readJSONFile(full_json_file_in); if (extracted_feature_record != null) { String volume_id = extracted_feature_record.getString("id"); JSONObject ef_features = extracted_feature_record.getJSONObject("features"); if (_verbosity >= 1) { System.out.println("Processing: " + json_file_in); } if (ef_features != null) { String page_count_str = ef_features.getString("pageCount"); if (!page_count_str.equals("")) { page_count = Integer.parseInt(page_count_str); } else { System.err.println("No 'pageCount' in 'features' in volume id '" + volume_id + "' => defaulting to 0"); } } else { System.err.println("No 'features' section in JSON file => Skipping id: " + volume_id); } } else { // File did not exist, or could not be parsed String mess = "Failed to read in bzipped JSON file '" + full_json_file_in + "'"; if (_strict_file_io) { throw new IOException(mess); } else { System.err.println("Warning: " + mess); System.out.println("Warning: " + mess); } } _progress_accum.add(_progress_step); return page_count; } }