[30912] | 1 | #!/bin/bash
|
---|
| 2 |
|
---|
[30926] | 3 | # To work, the follow bash variables need to have been set:
|
---|
| 4 | #
|
---|
| 5 | # json_filelist input_dir output_dir
|
---|
| 6 | #
|
---|
| 7 | # Typically done through running a wrapper script, such as:
|
---|
| 8 | #
|
---|
| 9 | # RUN-PD-CLUSTER.bash
|
---|
[30912] | 10 |
|
---|
[30926] | 11 | if [ "x$json_filelist" = "x" ] ; then
|
---|
[30927] | 12 | echo "_RUN.bash: Failed to set 'json_filelist'" 1>&2
|
---|
[30926] | 13 | exit
|
---|
| 14 | fi
|
---|
[30923] | 15 |
|
---|
[30926] | 16 | if [ "x$input_dir" = "x" ] ; then
|
---|
[30927] | 17 | echo "_RUN.bash: Failed to set 'input_dir'" 1>&2
|
---|
[30926] | 18 | exit
|
---|
| 19 | fi
|
---|
| 20 |
|
---|
| 21 | if [ "x$output_dir" = "x" ] ; then
|
---|
[30927] | 22 | echo "_RUN.bash: Failed to set 'output_dir'" 1>&2
|
---|
[30926] | 23 | exit
|
---|
| 24 | fi
|
---|
| 25 |
|
---|
[30934] | 26 | run_jps=0
|
---|
| 27 | run_jps_daemons=""
|
---|
| 28 | run_jps_daemons_suffix="daemon"
|
---|
[30939] | 29 | using_hdfs=0
|
---|
[30929] | 30 |
|
---|
[30934] | 31 | if [ "x${input_dir##hdfs://*}" = "x" ] || [ "x${output_dir##hdfs://*}" = "x" ] ; then
|
---|
| 32 | # Evidence of running command over HDFS
|
---|
| 33 | run_jps=1
|
---|
| 34 | run_jps_daemons="Spark"
|
---|
[30939] | 35 | using_hdfs=1
|
---|
[30934] | 36 | fi
|
---|
| 37 |
|
---|
[30935] | 38 | if [ "x${master_opt##--master spark://*}" = "x" ] ; then
|
---|
[30934] | 39 | # Evidence of running command submitted to Spark cluster
|
---|
| 40 | run_jps=1
|
---|
| 41 | if [ "x$run_jps_daemons" != "x" ] ; then
|
---|
| 42 | run_jps_daemons="$run_jps_daemons and Hadoop"
|
---|
| 43 | run_jps_daemons_suffix="daemons"
|
---|
| 44 | else
|
---|
| 45 | run_jps_daemons="Hadoop"
|
---|
| 46 | fi
|
---|
| 47 | fi
|
---|
| 48 |
|
---|
| 49 | if [ "$run_jps" = "1" ] ; then
|
---|
| 50 | echo
|
---|
| 51 | echo "****"
|
---|
| 52 | echo "* Checking for $run_jps_daemons $run_jps_daemons_suffix"
|
---|
[30950] | 53 | echo "* Running 'jps':"
|
---|
[30934] | 54 | echo "****"
|
---|
[30939] | 55 | jps | egrep -v " Jps$" | sed 's/^/* /g'
|
---|
[30934] | 56 | echo "****"
|
---|
| 57 | echo "* Done"
|
---|
| 58 | echo "****"
|
---|
| 59 | echo
|
---|
| 60 |
|
---|
[30935] | 61 | sleep 1
|
---|
[30934] | 62 | fi
|
---|
| 63 |
|
---|
[30939] | 64 | if [ "$using_hdfs" = "1" ] ; then
|
---|
| 65 | hadoop fs -test -d "$output_dir"
|
---|
| 66 |
|
---|
| 67 | if [ $? != 0 ] ; then
|
---|
| 68 | echo "Creating directory:"
|
---|
| 69 | echo " $output_dir"
|
---|
| 70 | fi
|
---|
| 71 | fi
|
---|
| 72 |
|
---|
[30918] | 73 | self_contained_jar=target/htrc-ef-ingest-0.9-jar-with-dependencies.jar
|
---|
| 74 | base_cmd="spark-submit --class org.hathitrust.PrepareForIngest $master_opt $self_contained_jar"
|
---|
| 75 |
|
---|
[30934] | 76 | cmd="$base_cmd --verbosity 1 $json_filelist $input_dir $output_dir $*"
|
---|
[30918] | 77 |
|
---|
[30929] | 78 | echo "****"
|
---|
| 79 | echo "* Lauching:"
|
---|
| 80 | echo "* $cmd"
|
---|
| 81 | echo "****"
|
---|
[30936] | 82 | if [ "$run_jps" = "1" ] ; then
|
---|
| 83 | echo "* Monitor progress on Spark cluster through:"
|
---|
| 84 | echo "* http://10.10.0.52:8080/"
|
---|
| 85 | echo "****"
|
---|
| 86 | fi
|
---|
[30929] | 87 | echo
|
---|
[30939] | 88 | sleep 2
|
---|
[30929] | 89 |
|
---|
| 90 | $cmd
|
---|
| 91 |
|
---|