source: other-projects/hathitrust/wcsa/extracted-features-solr/trunk/solr-ingest/scripts/_RUN.sh@ 31044

Last change on this file since 31044 was 31044, checked in by davidb, 7 years ago

Fixed up error when output_dir is empty

  • Property svn:executable set to *
File size: 2.2 KB
RevLine 
[30912]1#!/bin/bash
2
[30926]3# To work, the follow bash variables need to have been set:
4#
5# json_filelist input_dir output_dir
6#
7# Typically done through running a wrapper script, such as:
8#
9# RUN-PD-CLUSTER.bash
[30912]10
[30926]11if [ "x$json_filelist" = "x" ] ; then
[30927]12 echo "_RUN.bash: Failed to set 'json_filelist'" 1>&2
[30926]13 exit
14fi
[30923]15
[30926]16if [ "x$input_dir" = "x" ] ; then
[30927]17 echo "_RUN.bash: Failed to set 'input_dir'" 1>&2
[30926]18 exit
19fi
20
[30975]21#if [ "x$output_dir" = "x" ] ; then
22# echo "_RUN.bash: Failed to set 'output_dir'" 1>&2
23# exit
24#fi
[30926]25
[30934]26run_jps=0
27run_jps_daemons=""
28run_jps_daemons_suffix="daemon"
[30939]29using_hdfs=0
[30929]30
[30934]31if [ "x${input_dir##hdfs://*}" = "x" ] || [ "x${output_dir##hdfs://*}" = "x" ] ; then
32 # Evidence of running command over HDFS
33 run_jps=1
34 run_jps_daemons="Spark"
[30939]35 using_hdfs=1
[30934]36fi
37
[30935]38if [ "x${master_opt##--master spark://*}" = "x" ] ; then
[30934]39 # Evidence of running command submitted to Spark cluster
40 run_jps=1
41 if [ "x$run_jps_daemons" != "x" ] ; then
42 run_jps_daemons="$run_jps_daemons and Hadoop"
43 run_jps_daemons_suffix="daemons"
44 else
45 run_jps_daemons="Hadoop"
46 fi
47fi
48
49if [ "$run_jps" = "1" ] ; then
50 echo
51 echo "****"
[30952]52 echo "* Checking for $run_jps_daemons $run_jps_daemons_suffix, by running 'jps':"
[30934]53 echo "****"
[30939]54 jps | egrep -v " Jps$" | sed 's/^/* /g'
[30934]55 echo "****"
56 echo "* Done"
57 echo "****"
58 echo
59
[30935]60 sleep 1
[30934]61fi
62
[30939]63if [ "$using_hdfs" = "1" ] ; then
[31044]64 if [ "x$output_dir" != "x" ] ; then
65 hadoop fs -test -d "$output_dir"
[30939]66
[31044]67 if [ $? != 0 ] ; then
[30939]68 echo "Creating directory:"
69 echo " $output_dir"
[31044]70 fi
[30939]71 fi
72fi
73
[30918]74self_contained_jar=target/htrc-ef-ingest-0.9-jar-with-dependencies.jar
[31000]75cmd="spark-submit --class org.hathitrust.extractedfeatures.ProcessForSolrIngest $master_opt $self_contained_jar"
[30918]76
[30975]77if [ "x$solr_url" != "x" ] ; then
78 cmd="$cmd --solr-url $solr_url"
79fi
[30918]80
[30975]81if [ "x$output_dir" != "x" ] ; then
82 cmd="$cmd --output-dir $output_dir"
83fi
84
85
[31028]86cmd="$cmd --properties ef-solr.properties $input_dir $json_filelist $*"
[30975]87
[30929]88echo "****"
89echo "* Lauching:"
90echo "* $cmd"
91echo "****"
[30975]92
[30936]93if [ "$run_jps" = "1" ] ; then
94 echo "* Monitor progress on Spark cluster through:"
95 echo "* http://10.10.0.52:8080/"
96 echo "****"
97fi
[30929]98echo
[30939]99sleep 2
[30929]100
101$cmd
102
Note: See TracBrowser for help on using the repository browser.