[30912] | 1 | #!/bin/bash
|
---|
| 2 |
|
---|
[30926] | 3 | # To work, the follow bash variables need to have been set:
|
---|
| 4 | #
|
---|
| 5 | # json_filelist input_dir output_dir
|
---|
| 6 | #
|
---|
| 7 | # Typically done through running a wrapper script, such as:
|
---|
| 8 | #
|
---|
| 9 | # RUN-PD-CLUSTER.bash
|
---|
[30912] | 10 |
|
---|
[30926] | 11 | if [ "x$json_filelist" = "x" ] ; then
|
---|
[30927] | 12 | echo "_RUN.bash: Failed to set 'json_filelist'" 1>&2
|
---|
[30926] | 13 | exit
|
---|
| 14 | fi
|
---|
[30923] | 15 |
|
---|
[30926] | 16 | if [ "x$input_dir" = "x" ] ; then
|
---|
[30927] | 17 | echo "_RUN.bash: Failed to set 'input_dir'" 1>&2
|
---|
[30926] | 18 | exit
|
---|
| 19 | fi
|
---|
| 20 |
|
---|
| 21 | if [ "x$output_dir" = "x" ] ; then
|
---|
[30927] | 22 | echo "_RUN.bash: Failed to set 'output_dir'" 1>&2
|
---|
[30926] | 23 | exit
|
---|
| 24 | fi
|
---|
| 25 |
|
---|
[30934] | 26 | run_jps=0
|
---|
| 27 | run_jps_daemons=""
|
---|
| 28 | run_jps_daemons_suffix="daemon"
|
---|
[30929] | 29 |
|
---|
[30934] | 30 | if [ "x${input_dir##hdfs://*}" = "x" ] || [ "x${output_dir##hdfs://*}" = "x" ] ; then
|
---|
| 31 | # Evidence of running command over HDFS
|
---|
| 32 | run_jps=1
|
---|
| 33 | run_jps_daemons="Spark"
|
---|
| 34 | fi
|
---|
| 35 |
|
---|
[30935] | 36 | if [ "x${master_opt##--master spark://*}" = "x" ] ; then
|
---|
[30934] | 37 | # Evidence of running command submitted to Spark cluster
|
---|
| 38 | run_jps=1
|
---|
| 39 | if [ "x$run_jps_daemons" != "x" ] ; then
|
---|
| 40 | run_jps_daemons="$run_jps_daemons and Hadoop"
|
---|
| 41 | run_jps_daemons_suffix="daemons"
|
---|
| 42 | else
|
---|
| 43 | run_jps_daemons="Hadoop"
|
---|
| 44 | fi
|
---|
| 45 | fi
|
---|
| 46 |
|
---|
| 47 | if [ "$run_jps" = "1" ] ; then
|
---|
| 48 | echo
|
---|
| 49 | echo "****"
|
---|
| 50 | echo "* Checking for $run_jps_daemons $run_jps_daemons_suffix"
|
---|
| 51 | echo "****"
|
---|
| 52 | jps | sed 's/^/* /g'
|
---|
| 53 | echo "****"
|
---|
| 54 | echo "* Done"
|
---|
| 55 | echo "****"
|
---|
| 56 | echo
|
---|
| 57 |
|
---|
[30935] | 58 | sleep 1
|
---|
[30934] | 59 | fi
|
---|
| 60 |
|
---|
[30918] | 61 | self_contained_jar=target/htrc-ef-ingest-0.9-jar-with-dependencies.jar
|
---|
| 62 | base_cmd="spark-submit --class org.hathitrust.PrepareForIngest $master_opt $self_contained_jar"
|
---|
| 63 |
|
---|
[30934] | 64 | cmd="$base_cmd --verbosity 1 $json_filelist $input_dir $output_dir $*"
|
---|
[30918] | 65 |
|
---|
[30929] | 66 | echo "****"
|
---|
| 67 | echo "* Lauching:"
|
---|
| 68 | echo "* $cmd"
|
---|
| 69 | echo "****"
|
---|
| 70 | echo "* Monitor progress through:"
|
---|
| 71 | echo "* http://10.10.0.52:8080/"
|
---|
| 72 | echo "****"
|
---|
| 73 | echo
|
---|
[30935] | 74 | sleep 1
|
---|
[30929] | 75 |
|
---|
| 76 | $cmd
|
---|
| 77 |
|
---|
[30918] | 78 | # spark-submit --class org.hathitrust.PrepareForIngest --master local[4] target/htrc-ef-ingest-0.9-jar-with-dependencies.jar --json-filelist=pd-file-listing-step10000.txt pd-ef-json-files pd-solr-json-files $*
|
---|
| 79 |
|
---|
[30912] | 80 | # spark-submit --class org.hathitrust.PrepareForIngest --master local[4] target\htrc-ef-ingest-0.9-jar-with-dependencies.jar --json-filelist=pd-file-listing-step1000.txt json-files solr-files $*
|
---|