source: other-projects/hathitrust/vagrant-spark-hdfs-cluster/trunk/manifests/base-hadoop.pp@ 30917

Last change on this file since 30917 was 30917, checked in by davidb, 4 years ago

Changes resulting from a fresh run at provisioning, which yielded the first successful run of RUN.bash on the cluster over HDFS. OS=Trusty as Xenial box doesn't appear to have puppet installed. JDK=1.7 because Trusty by default doesn't have apt-get for OpenJDK8.

File size: 2.7 KB
Line 
1$install_dir = "/opt"
2$hadoop_home = "${install_dir}/hadoop"
3$spark_home = "${install_dir}/spark"
4
5$user = "htrc"
6$group = "hadoop"
7
8$hadoop_master = '10.10.0.52'
9$hadoop_backup = '10.10.0.51'
10$hadoop_1 = '10.10.0.53'
11$hadoop_2 = '10.10.0.54'
12$hadoop_3 = '10.10.0.55'
13
14include hadoop
15include spark
16
17file {
18 "/home/${user}":
19 ensure => "directory",
20 owner => "${user}",
21 group => "${group}",
22 mode => 700,
23 require => [ User["${user}"], Group["${group}"] ],
24
25}
26
27group { "${group}":
28 ensure => "present",
29}
30
31user {
32 "${user}":
33 ensure => "present",
34 home => "/home/${user}",
35 name => "${user}",
36 shell => "/bin/bash",
37 managehome => true,
38 gid => $group
39}
40
41
42
43exec { 'apt-get update':
44 command => '/usr/bin/apt-get update',
45}
46
47package { "openjdk-7-jdk" :
48 ensure => present,
49 require => [ Exec['apt-get update'], File["/home/${user}"] ]
50}
51
52#package { "subversion" :
53# ensure => present,
54# require => [ Exec['apt-get update'], File["/home/${user}"] ]
55#}
56
57#package { "maven" :
58# ensure => present,
59# require => [ Exec['apt-get update'], File["/home/${user}"] ]
60#}
61
62file {
63 "/home/${user}/.ssh":
64 ensure => "directory",
65 owner => "${user}",
66 group => "${group}",
67 mode => 750,
68 require => [ Exec['apt-get update'], File["/home/${user}"] ]
69}
70
71file {
72 "/home/${user}/.ssh/id_rsa":
73 source => "puppet:///modules/hadoop/id_rsa",
74 ensure => present,
75 mode => 600,
76 owner => $user,
77 group => $group,
78 require => File["/home/${user}/.ssh"]
79 }
80
81file {
82 "/home/${user}/.ssh/id_rsa.pub":
83 source => "puppet:///modules/hadoop/id_rsa.pub",
84 ensure => present,
85 mode => 644,
86 owner => $user,
87 group => $group,
88 require => File["/home/${user}/.ssh"]
89 }
90
91ssh_authorized_key { "ssh_key":
92 ensure => "present",
93 key => "AAAAB3NzaC1yc2EAAAADAQABAAABAQCeHdBPVGuSPVOO+n94j/Y5f8VKGIAzjaDe30hu9BPetA+CGFpszw4nDkhyRtW5J9zhGKuzmcCqITTuM6BGpHax9ZKP7lRRjG8Lh380sCGA/691EjSVmR8krLvGZIQxeyHKpDBLEmcpJBB5yoSyuFpK+4RhmJLf7ImZA7mtxhgdPGhe6crUYRbLukNgv61utB/hbre9tgNX2giEurBsj9CI5yhPPNgq6iP8ZBOyCXgUNf37bAe7AjQUMV5G6JMZ1clEeNPN+Uy5Yrfojrx3wHfG40NuxuMrFIQo5qCYa3q9/SVOxsJILWt+hZ2bbxdGcQOd9AXYFNNowPayY0BdAkSr",
94 type => "ssh-rsa",
95 user => "${user}",
96 require => File["/home/${user}/.ssh/id_rsa.pub"]
97}
98
99file{
100 "/home/${user}/.ssh/config":
101 owner => "${user}",
102 group => "${group}",
103 mode => 755,
104 content => "StrictHostKeyChecking no",
105 require => File["/home/${user}/.ssh/id_rsa.pub"]
106}
107
108host { 'hadoop1':
109 ip => "${hadoop_1}",
110}
111
112host { 'hadoop2':
113 ip => "${hadoop_2}",
114}
115
116host { 'hadoop3':
117 ip => "${hadoop_3}",
118}
119
120host { 'master':
121 ip => "${hadoop_master}",
122}
123
124host { 'backup':
125 ip => "${hadoop_backup}",
126}
Note: See TracBrowser for help on using the repository browser.