- Timestamp:
- 2016-10-25T10:02:58+13:00 (8 years ago)
- Location:
- other-projects/hathitrust/vagrant-spark-hdfs-cluster
- Files:
-
- 1 edited
- 1 moved
Legend:
- Unmodified
- Added
- Removed
-
other-projects/hathitrust/vagrant-spark-hdfs-cluster/trunk/modules/hadoop/manifests/init.pp
r30903 r30913 2 2 3 3 exec { "download_hadoop": 4 # Download from nearby mirror, otherwise task can time-out5 command => "wget -O /tmp/hadoop.tar.gz http://apache.mirror.amaze.com.au/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz",6 # command => "wget -O /tmp/hadoop.tar.gz http://www-eu.apache.org/dist/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz",7 path => $path,8 user => $user,9 group => $user,10 unless => "ls ${install_dir} | grep hadoop-2.7.3",11 require => Package["openjdk-7-jdk"]12 }4 # Download from nearby mirror, otherwise task can time-out 5 command => "wget -O /tmp/hadoop-2.7.3.tar.gz http://apache.mirror.amaze.com.au/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz", 6 # command => "wget -O /tmp/hadoop-2.7.3.tar.gz http://www-eu.apache.org/dist/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz", 7 path => $path, 8 user => $user, 9 group => $user, 10 unless => "ls ${install_dir} | grep hadoop-2.7.3", 11 require => Package["openjdk-7-jdk"] 12 } 13 13 14 14 exec { "unpack_hadoop" : 15 command => "tar -zxf /tmp/hadoop .tar.gz -C ${install_dir}",15 command => "tar -zxf /tmp/hadoop-2.7.3.tar.gz -C ${install_dir}", 16 16 path => $path, 17 17 creates => "${hadoop_home}-2.7.3", 18 18 require => Exec["download_hadoop"] 19 }19 } 20 20 21 exec { 'chown': 22 command => "/bin/chown -R ${user}:${group} /opt/hadoop-2.7.3", 21 exec { "rename_hadoop" : 22 command => "ln -s ${install_dir}/hadoop-2.7.3 ${install_dir}/hadoop", 23 path => $path, 24 creates => "${hadoop_home}", 25 require => Exec["unpack_hadoop"] 26 } 27 28 exec { 'chown_hadoop': 29 command => "/bin/chown -R ${user}:${group} ${hadoop_home}-2.7.3", 23 30 path => '/bin', 24 31 user => 'root', 25 require => Exec[" unpack_hadoop"]26 }32 require => Exec["rename_hadoop"] 33 } 27 34 28 file { 29 "${hadoop_home} -2.7.3/etc/hadoop/slaves":35 file { 36 "${hadoop_home}/etc/hadoop/slaves": 30 37 content => template('hadoop/slaves'), 31 38 mode => 644, 32 39 owner => $user, 33 40 group => $group, 34 require => Exec[" unpack_hadoop"]41 require => Exec["chown_hadoop"] 35 42 } 36 43 37 file { 38 "${hadoop_home} -2.7.3/etc/hadoop/masters":44 file { 45 "${hadoop_home}/etc/hadoop/masters": 39 46 content => template('hadoop/masters'), 40 47 mode => 644, 41 48 owner => $user, 42 49 group => $group, 43 require => Exec[" unpack_hadoop"]50 require => Exec["chown_hadoop"] 44 51 } 45 52 46 53 file { 47 "${hadoop_home} -2.7.3/etc/hadoop/core-site.xml":54 "${hadoop_home}/etc/hadoop/core-site.xml": 48 55 content => template('hadoop/core-site.xml'), 49 56 mode => 644, 50 57 owner => $user, 51 58 group => $group, 52 require => Exec[" unpack_hadoop"]59 require => Exec["chown_hadoop"] 53 60 } 54 61 55 62 file { 56 "${hadoop_home} -2.7.3/etc/hadoop/mapred-site.xml":63 "${hadoop_home}/etc/hadoop/mapred-site.xml": 57 64 content => template('hadoop/mapred-site.xml'), 58 65 mode => 644, 59 66 owner => $user, 60 67 group => $group, 61 require => Exec[" unpack_hadoop"]68 require => Exec["chown_hadoop"] 62 69 } 63 70 64 71 file { 65 "${hadoop_home} -2.7.3/etc/hadoop/hdfs-site.xml":72 "${hadoop_home}/etc/hadoop/hdfs-site.xml": 66 73 content => template('hadoop/hdfs-site.xml'), 67 74 mode => 644, 68 75 owner => $user, 69 76 group => $group, 70 require => Exec[" unpack_hadoop"]77 require => Exec["chown_hadoop"] 71 78 } 72 79 73 80 file { 74 "${hadoop_home} -2.7.3/etc/hadoop/hadoop-env.sh":81 "${hadoop_home}/etc/hadoop/hadoop-env.sh": 75 82 content => template('hadoop/hadoop-env.sh'), 76 83 mode => 644, 77 84 owner => $user, 78 85 group => $group, 79 require => Exec[" unpack_hadoop"]86 require => Exec["chown_hadoop"] 80 87 } 81 88 82 file {83 [ "${hadoop_home} -2.7.3/hadoop_store",84 "${hadoop_home} -2.7.3/hadoop_store/hdfs",85 "${hadoop_home} -2.7.3/hadoop_store/hdfs/namenode",86 "${hadoop_home} -2.7.3/hadoop_store/hdfs/datanode"]:89 file { 90 [ "${hadoop_home}/hadoop_store", 91 "${hadoop_home}/hadoop_store/hdfs", 92 "${hadoop_home}/hadoop_store/hdfs/namenode", 93 "${hadoop_home}/hadoop_store/hdfs/datanode"]: 87 94 ensure => 'directory', 88 95 owner => "${user}", 89 96 group => "${group}", 90 97 mode => 755, 91 require => Exec[" unpack_hadoop"]98 require => Exec["chown_hadoop"] 92 99 } 93 100 94 file_line { "add_hadoop_home": 101 file { 102 "/home/${user}/.bashrc-setup-hadoop": 103 content => template('hadoop/setup-hadoop.bash'), 104 mode => 644, 105 owner => $user, 106 group => $group, 107 require => [ Exec["unpack_hadoop"], File["/home/${user}"] ] 108 } 109 110 file_line { "setup_hadoop_home": 95 111 ensure => present, 96 112 path => "/home/${user}/.bashrc", 97 line => " export HADOOP_HOME=\"${hadoop_home}-2.7.3\"",113 line => ". .bashrc-setup-hadoop", 98 114 require => [ Exec["unpack_hadoop"], File["/home/${user}"] ] 99 }115 } 100 116 101 file_line { "add_hadoop_confdir":102 ensure => present,103 path => "/home/${user}/.bashrc",104 line => 'export HADOOP_CONF_DIR="$HADOOP_HOME/etc/hadoop"',105 require => [ Exec["unpack_hadoop"], File["/home/${user}"] ]106 }107 117 108 file_line { "add_hadoop_setup":109 ensure => present,110 path => "/home/${user}/.bashrc",111 line => 'source "$HADOOP_HOME/etc/hadoop/hadoop-env.sh"',112 require => [ Exec["unpack_hadoop"], File["/home/${user}"] ]113 }114 115 file_line { "add_hadoop_path":116 ensure => present,117 path => "/home/${user}/.bashrc",118 line => 'export PATH="$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin"',119 require => [ Exec["unpack_hadoop"], File["/home/${user}"] ]120 }121 118 122 119 }
Note:
See TracChangeset
for help on using the changeset viewer.