1 | #!/bin/bash
|
---|
2 |
|
---|
3 | # To work, the follow bash variables need to have been set:
|
---|
4 | #
|
---|
5 | # json_filelist input_dir output_dir
|
---|
6 | #
|
---|
7 | # Typically done through running a wrapper script, such as:
|
---|
8 | #
|
---|
9 | # RUN-PD-CLUSTER.bash
|
---|
10 |
|
---|
11 | if [ "x$json_filelist" = "x" ] ; then
|
---|
12 | echo "_RUN.bash: Failed to set 'json_filelist'" 1>&2
|
---|
13 | exit
|
---|
14 | fi
|
---|
15 |
|
---|
16 | if [ "x$input_dir" = "x" ] ; then
|
---|
17 | echo "_RUN.bash: Failed to set 'input_dir'" 1>&2
|
---|
18 | exit
|
---|
19 | fi
|
---|
20 |
|
---|
21 | #if [ "x$output_dir" = "x" ] ; then
|
---|
22 | # echo "_RUN.bash: Failed to set 'output_dir'" 1>&2
|
---|
23 | # exit
|
---|
24 | #fi
|
---|
25 |
|
---|
26 | run_jps=0
|
---|
27 | run_jps_daemons=""
|
---|
28 | run_jps_daemons_suffix="daemon"
|
---|
29 | using_hdfs=0
|
---|
30 |
|
---|
31 | if [ "x${input_dir##hdfs://*}" = "x" ] || [ "x${output_dir##hdfs://*}" = "x" ] ; then
|
---|
32 | # Evidence of running command over HDFS
|
---|
33 | run_jps=1
|
---|
34 | run_jps_daemons="Spark"
|
---|
35 | using_hdfs=1
|
---|
36 | fi
|
---|
37 |
|
---|
38 | if [ "x${master_opt##--master spark://*}" = "x" ] ; then
|
---|
39 | # Evidence of running command submitted to Spark cluster
|
---|
40 | run_jps=1
|
---|
41 | if [ "x$run_jps_daemons" != "x" ] ; then
|
---|
42 | run_jps_daemons="$run_jps_daemons and Hadoop"
|
---|
43 | run_jps_daemons_suffix="daemons"
|
---|
44 | else
|
---|
45 | run_jps_daemons="Hadoop"
|
---|
46 | fi
|
---|
47 | fi
|
---|
48 |
|
---|
49 | if [ "$run_jps" = "1" ] ; then
|
---|
50 | echo
|
---|
51 | echo "****"
|
---|
52 | echo "* Checking for $run_jps_daemons $run_jps_daemons_suffix, by running 'jps':"
|
---|
53 | echo "****"
|
---|
54 | jps | egrep -v " Jps$" | sed 's/^/* /g'
|
---|
55 | echo "****"
|
---|
56 | echo "* Done"
|
---|
57 | echo "****"
|
---|
58 | echo
|
---|
59 |
|
---|
60 | sleep 1
|
---|
61 | fi
|
---|
62 |
|
---|
63 | if [ "$using_hdfs" = "1" ] ; then
|
---|
64 | hadoop fs -test -d "$output_dir"
|
---|
65 |
|
---|
66 | if [ $? != 0 ] ; then
|
---|
67 | echo "Creating directory:"
|
---|
68 | echo " $output_dir"
|
---|
69 | fi
|
---|
70 | fi
|
---|
71 |
|
---|
72 | self_contained_jar=target/htrc-ef-ingest-0.9-jar-with-dependencies.jar
|
---|
73 | cmd="spark-submit --class org.hathitrust.PrepareForIngest $master_opt $self_contained_jar"
|
---|
74 |
|
---|
75 | if [ "x$solr_url" != "x" ] ; then
|
---|
76 | cmd="$cmd --solr-url $solr_url"
|
---|
77 | fi
|
---|
78 |
|
---|
79 | if [ "x$output_dir" != "x" ] ; then
|
---|
80 | cmd="$cmd --output-dir $output_dir"
|
---|
81 | fi
|
---|
82 |
|
---|
83 |
|
---|
84 | cmd="$cmd --verbosity 1 $input_dir $json_filelist $*"
|
---|
85 |
|
---|
86 | echo "****"
|
---|
87 | echo "* Lauching:"
|
---|
88 | echo "* $cmd"
|
---|
89 | echo "****"
|
---|
90 |
|
---|
91 | if [ "$run_jps" = "1" ] ; then
|
---|
92 | echo "* Monitor progress on Spark cluster through:"
|
---|
93 | echo "* http://10.10.0.52:8080/"
|
---|
94 | echo "****"
|
---|
95 | fi
|
---|
96 | echo
|
---|
97 | sleep 2
|
---|
98 |
|
---|
99 | $cmd
|
---|
100 |
|
---|