[30912] | 1 | #!/bin/bash
|
---|
| 2 |
|
---|
[30926] | 3 | # To work, the follow bash variables need to have been set:
|
---|
| 4 | #
|
---|
| 5 | # json_filelist input_dir output_dir
|
---|
| 6 | #
|
---|
| 7 | # Typically done through running a wrapper script, such as:
|
---|
| 8 | #
|
---|
| 9 | # RUN-PD-CLUSTER.bash
|
---|
[30912] | 10 |
|
---|
[30926] | 11 | if [ "x$json_filelist" = "x" ] ; then
|
---|
[30927] | 12 | echo "_RUN.bash: Failed to set 'json_filelist'" 1>&2
|
---|
[30926] | 13 | exit
|
---|
| 14 | fi
|
---|
[30923] | 15 |
|
---|
[30926] | 16 | if [ "x$input_dir" = "x" ] ; then
|
---|
[30927] | 17 | echo "_RUN.bash: Failed to set 'input_dir'" 1>&2
|
---|
[30926] | 18 | exit
|
---|
| 19 | fi
|
---|
| 20 |
|
---|
[30975] | 21 | #if [ "x$output_dir" = "x" ] ; then
|
---|
| 22 | # echo "_RUN.bash: Failed to set 'output_dir'" 1>&2
|
---|
| 23 | # exit
|
---|
| 24 | #fi
|
---|
[30926] | 25 |
|
---|
[30934] | 26 | run_jps=0
|
---|
| 27 | run_jps_daemons=""
|
---|
| 28 | run_jps_daemons_suffix="daemon"
|
---|
[30939] | 29 | using_hdfs=0
|
---|
[30929] | 30 |
|
---|
[30934] | 31 | if [ "x${input_dir##hdfs://*}" = "x" ] || [ "x${output_dir##hdfs://*}" = "x" ] ; then
|
---|
| 32 | # Evidence of running command over HDFS
|
---|
| 33 | run_jps=1
|
---|
| 34 | run_jps_daemons="Spark"
|
---|
[30939] | 35 | using_hdfs=1
|
---|
[30934] | 36 | fi
|
---|
| 37 |
|
---|
[30935] | 38 | if [ "x${master_opt##--master spark://*}" = "x" ] ; then
|
---|
[30934] | 39 | # Evidence of running command submitted to Spark cluster
|
---|
| 40 | run_jps=1
|
---|
| 41 | if [ "x$run_jps_daemons" != "x" ] ; then
|
---|
| 42 | run_jps_daemons="$run_jps_daemons and Hadoop"
|
---|
| 43 | run_jps_daemons_suffix="daemons"
|
---|
| 44 | else
|
---|
| 45 | run_jps_daemons="Hadoop"
|
---|
| 46 | fi
|
---|
| 47 | fi
|
---|
| 48 |
|
---|
| 49 | if [ "$run_jps" = "1" ] ; then
|
---|
| 50 | echo
|
---|
| 51 | echo "****"
|
---|
[30952] | 52 | echo "* Checking for $run_jps_daemons $run_jps_daemons_suffix, by running 'jps':"
|
---|
[30934] | 53 | echo "****"
|
---|
[30939] | 54 | jps | egrep -v " Jps$" | sed 's/^/* /g'
|
---|
[30934] | 55 | echo "****"
|
---|
| 56 | echo "* Done"
|
---|
| 57 | echo "****"
|
---|
| 58 | echo
|
---|
| 59 |
|
---|
[30935] | 60 | sleep 1
|
---|
[30934] | 61 | fi
|
---|
| 62 |
|
---|
[30939] | 63 | if [ "$using_hdfs" = "1" ] ; then
|
---|
| 64 | hadoop fs -test -d "$output_dir"
|
---|
| 65 |
|
---|
| 66 | if [ $? != 0 ] ; then
|
---|
| 67 | echo "Creating directory:"
|
---|
| 68 | echo " $output_dir"
|
---|
| 69 | fi
|
---|
| 70 | fi
|
---|
| 71 |
|
---|
[30918] | 72 | self_contained_jar=target/htrc-ef-ingest-0.9-jar-with-dependencies.jar
|
---|
[30975] | 73 | cmd="spark-submit --class org.hathitrust.PrepareForIngest $master_opt $self_contained_jar"
|
---|
[30918] | 74 |
|
---|
[30975] | 75 | if [ "x$solr_url" != "x" ] ; then
|
---|
| 76 | cmd="$cmd --solr-url $solr_url"
|
---|
| 77 | fi
|
---|
[30918] | 78 |
|
---|
[30975] | 79 | if [ "x$output_dir" != "x" ] ; then
|
---|
| 80 | cmd="$cmd --output-dir $output_dir"
|
---|
| 81 | fi
|
---|
| 82 |
|
---|
| 83 |
|
---|
| 84 | cmd="$cmd --verbosity 1 $input_dir $json_filelist $*"
|
---|
| 85 |
|
---|
[30929] | 86 | echo "****"
|
---|
| 87 | echo "* Lauching:"
|
---|
| 88 | echo "* $cmd"
|
---|
| 89 | echo "****"
|
---|
[30975] | 90 |
|
---|
[30936] | 91 | if [ "$run_jps" = "1" ] ; then
|
---|
| 92 | echo "* Monitor progress on Spark cluster through:"
|
---|
| 93 | echo "* http://10.10.0.52:8080/"
|
---|
| 94 | echo "****"
|
---|
| 95 | fi
|
---|
[30929] | 96 | echo
|
---|
[30939] | 97 | sleep 2
|
---|
[30929] | 98 |
|
---|
| 99 | $cmd
|
---|
| 100 |
|
---|