source: other-projects/hathitrust/vagrant-spark-hdfs-cluster/trunk/modules/hadoop/manifests/init.pp@ 30917

Last change on this file since 30917 was 30917, checked in by davidb, 8 years ago

Changes resulting from a fresh run at provisioning, which yielded the first successful run of RUN.bash on the cluster over HDFS. OS=Trusty as Xenial box doesn't appear to have puppet installed. JDK=1.7 because Trusty by default doesn't have apt-get for OpenJDK8.

File size: 2.9 KB
RevLine 
[30903]1class hadoop {
2
3exec { "download_hadoop":
[30917]4 # Download from nearby mirror, otherwise task can time-out with default setting of '300'
5 # command => "wget -O /tmp/hadoop-2.7.3.tar.gz http://apache.mirror.amaze.com.au/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz",
6 command => "wget -O /tmp/hadoop-2.7.3.tar.gz http://www-us.apache.org/dist/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz
7",
8 timeout => 0,
[30913]9 path => $path,
[30917]10# user => $user,
11# group => $group,
[30913]12 unless => "ls ${install_dir} | grep hadoop-2.7.3",
13 require => Package["openjdk-7-jdk"]
14 }
[30903]15
16exec { "unpack_hadoop" :
[30913]17 command => "tar -zxf /tmp/hadoop-2.7.3.tar.gz -C ${install_dir}",
[30903]18 path => $path,
19 creates => "${hadoop_home}-2.7.3",
20 require => Exec["download_hadoop"]
[30913]21 }
[30903]22
[30913]23exec { "rename_hadoop" :
24 command => "ln -s ${install_dir}/hadoop-2.7.3 ${install_dir}/hadoop",
25 path => $path,
26 creates => "${hadoop_home}",
27 require => Exec["unpack_hadoop"]
28 }
29
30exec { 'chown_hadoop':
31 command => "/bin/chown -R ${user}:${group} ${hadoop_home}-2.7.3",
[30903]32 path => '/bin',
33 user => 'root',
[30913]34 require => Exec["rename_hadoop"]
35 }
[30903]36
[30913]37file {
38 "${hadoop_home}/etc/hadoop/slaves":
[30903]39 content => template('hadoop/slaves'),
40 mode => 644,
41 owner => $user,
42 group => $group,
[30913]43 require => Exec["chown_hadoop"]
[30903]44 }
45
[30913]46file {
47 "${hadoop_home}/etc/hadoop/masters":
[30903]48 content => template('hadoop/masters'),
49 mode => 644,
50 owner => $user,
51 group => $group,
[30913]52 require => Exec["chown_hadoop"]
[30903]53 }
54
55file {
[30913]56 "${hadoop_home}/etc/hadoop/core-site.xml":
[30903]57 content => template('hadoop/core-site.xml'),
58 mode => 644,
59 owner => $user,
60 group => $group,
[30913]61 require => Exec["chown_hadoop"]
[30903]62 }
63
64file {
[30913]65 "${hadoop_home}/etc/hadoop/mapred-site.xml":
[30903]66 content => template('hadoop/mapred-site.xml'),
67 mode => 644,
68 owner => $user,
69 group => $group,
[30913]70 require => Exec["chown_hadoop"]
[30903]71 }
72
73 file {
[30913]74 "${hadoop_home}/etc/hadoop/hdfs-site.xml":
[30903]75 content => template('hadoop/hdfs-site.xml'),
76 mode => 644,
77 owner => $user,
78 group => $group,
[30913]79 require => Exec["chown_hadoop"]
[30903]80 }
81
82file {
[30913]83 "${hadoop_home}/etc/hadoop/hadoop-env.sh":
[30903]84 content => template('hadoop/hadoop-env.sh'),
85 mode => 644,
86 owner => $user,
87 group => $group,
[30913]88 require => Exec["chown_hadoop"]
[30903]89 }
90
[30913]91file {
92 [ "${hadoop_home}/hadoop_store",
93 "${hadoop_home}/hadoop_store/hdfs",
94 "${hadoop_home}/hadoop_store/hdfs/namenode",
95 "${hadoop_home}/hadoop_store/hdfs/datanode"]:
[30903]96 ensure => 'directory',
97 owner => "${user}",
98 group => "${group}",
99 mode => 755,
[30913]100 require => Exec["chown_hadoop"]
[30903]101 }
102
[30913]103file {
104 "/home/${user}/.bashrc-setup-hadoop":
105 content => template('hadoop/setup-hadoop.bash'),
106 mode => 644,
107 owner => $user,
108 group => $group,
109 require => [ Exec["unpack_hadoop"], File["/home/${user}"] ]
110 }
[30903]111
[30913]112file_line { "setup_hadoop_home":
[30903]113 ensure => present,
114 path => "/home/${user}/.bashrc",
[30913]115 line => ". .bashrc-setup-hadoop",
[30903]116 require => [ Exec["unpack_hadoop"], File["/home/${user}"] ]
[30913]117 }
[30903]118
119
120
121}
122
Note: See TracBrowser for help on using the repository browser.