diff --git a/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp b/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp index ba1932b824..5c4cd1ef3b 100644 --- a/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp +++ b/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp @@ -34,6 +34,35 @@ include stdlib + $package_suffix = hiera("bigtop::package_suffix", "") + $parent_dir = hiera("bigtop::parent_dir", "") + $bigtop_base_version = hiera("bigtop::base_version", "") + + if $package_suffix and $package_suffix != '' and $parent_dir and $parent_dir != '' { + $bin_home = "${parent_dir}/${bigtop_base_version}/usr/bin" + $hadoop_home = "${parent_dir}/${bigtop_base_version}/usr/lib/hadoop" + } else { + $bin_home = '/usr/bin' + $hadoop_home = '/usr/lib/hadoop' + } + + $hadoop_package = "hadoop$package_suffix" + $hadoop_hdfs_package = "hadoop$package_suffix-hdfs" + $hadoop_hdfs_secondarynamenode_package = "hadoop$package_suffix-hdfs-secondarynamenode" + $hadoop_hdfs_zkfc_package = "hadoop$package_suffix-hdfs-zkfc" + $hadoop_hdfs_namenode_package = "hadoop$package_suffix-hdfs-namenode" + $hadoop_kms_package = "hadoop$package_suffix-kms" + $hadoop_httpfs_package = "hadoop$package_suffix-httpfs" + $hadoop_hdfs_datanode_package = "hadoop$package_suffix-hdfs-datanode" + $hadoop_hdfs_journalnode_package = "hadoop$package_suffix-hdfs-journalnode" + + $hadoop_yarn_package = "hadoop$package_suffix-yarn" + $hadoop_yarn_nodemanager_package = "hadoop$package_suffix-yarn-nodemanager" + $hadoop_yarn_resourcemanager_package = "hadoop$package_suffix-yarn-resourcemanager" + $hadoop_yarn_proxyserver_package = "hadoop$package_suffix-yarn-proxyserver" + $hadoop_mapreduce_package = "hadoop$package_suffix-mapreduce" + $hadoop_mapreduce_historyserver_package = "hadoop$package_suffix-mapreduce-historyserver" + class deploy ($roles) { if ("datanode" in $roles) { @@ -90,11 +119,13 @@ } } + class init_hdfs { + # $local_var = $::hadoop::hadoop_var exec { "init hdfs": path => ['/bin','/sbin','/usr/bin','/usr/sbin'], - command => 'bash -x /usr/lib/hadoop/libexec/init-hdfs.sh', - require => Package['hadoop-hdfs'] + command => "bash -x ${::hadoop::hadoop_home}/libexec/init-hdfs.sh", + require => Package[$::hadoop::hadoop_hdfs_package] } } @@ -125,17 +156,17 @@ file { "/etc/hadoop/conf/hadoop-env.sh": content => template('hadoop/hadoop-env.sh'), - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } - package { "hadoop": + package { $::hadoop::hadoop_package: ensure => latest, require => Package["jdk"], } #FIXME: package { "hadoop-native": # ensure => latest, - # require => [Package["hadoop"]], + # require => [Package[$::hadoop::hadoop_package]], #} } @@ -168,9 +199,9 @@ include hadoop::common - package { "hadoop-yarn": + package { $::hadoop::hadoop_yarn_package: ensure => latest, - require => [Package["jdk"], Package["hadoop"]], + require => [Package["jdk"], Package[$::hadoop::hadoop_package]], } if ($hadoop_security_authentication == "kerberos") { @@ -181,19 +212,19 @@ # we don't actually need this package as long as we don't put the # keytab in a directory managed by it. But it creates user mapred whom we # wan't to give the keytab to. - require => Package["hadoop-yarn"], + require => Package[$::hadoop::hadoop_yarn_package], } } file { "/etc/hadoop/conf/yarn-site.xml": content => template('hadoop/yarn-site.xml'), - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } file { "/etc/hadoop/conf/container-executor.cfg": content => template('hadoop/container-executor.cfg'), - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } } @@ -267,7 +298,7 @@ owner => 'hdfs', group => 'hdfs', mode => '0700', - require => Package["hadoop-hdfs"], + require => Package[$::hadoop::hadoop_hdfs_package], } file { $ssh_keypath: @@ -290,9 +321,9 @@ fail("High-availability secure clusters are not currently supported") } - package { "hadoop-hdfs": + package { $::hadoop::hadoop_hdfs_package: ensure => latest, - require => [Package["jdk"], Package["hadoop"]], + require => [Package["jdk"], Package[$::hadoop::hadoop_package]], } if ($hadoop_security_authentication == "kerberos") { @@ -303,20 +334,20 @@ # we don't actually need this package as long as we don't put the # keytab in a directory managed by it. But it creates user hdfs whom we # wan't to give the keytab to. - require => Package["hadoop-hdfs"], + require => Package[$::hadoop::hadoop_hdfs_package], } } file { "/etc/hadoop/conf/core-site.xml": content => template('hadoop/core-site.xml'), - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } file { "/etc/hadoop/conf/hdfs-site.xml": content => template('hadoop/hdfs-site.xml'), - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } file { @@ -325,7 +356,7 @@ owner => 'root', group => 'hadoop', mode => '0664', - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } file { @@ -334,7 +365,7 @@ owner => 'root', group => 'hadoop', mode => '0660', - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } file { @@ -343,7 +374,7 @@ owner => 'root', group => 'hadoop', mode => '0660', - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } file { @@ -352,7 +383,7 @@ owner => 'root', group => 'hadoop', mode => '0660', - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } if $hadoop_http_authentication_type == "kerberos" { @@ -374,7 +405,7 @@ # allows access by hdfs and yarn (and mapred - mhmm...) group => "hadoop", content => $http_auth_sig_secret, - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } # all the services will need this @@ -395,7 +426,7 @@ # we don't actually need this package as long as we don't put the # keytab in a directory managed by it. But it creates group hadoop which # we wan't to give the keytab to. - require => Package["hadoop"], + require => Package[$::hadoop::hadoop_package], } # all the services will need this as well @@ -436,9 +467,9 @@ ) inherits hadoop { include hadoop::common_hdfs - package { "hadoop-mapreduce": + package { $::hadoop::hadoop_mapreduce_package: ensure => latest, - require => [Package["jdk"], Package["hadoop"]], + require => [Package["jdk"], Package[$::hadoop::hadoop_package]], } if ($hadoop_security_authentication == "kerberos") { @@ -450,19 +481,19 @@ # we don't actually need this package as long as we don't put the # keytab in a directory managed by it. But it creates user yarn whom we # wan't to give the keytab to. - require => Package["hadoop-mapreduce"], + require => Package[$::hadoop::hadoop_mapreduce_package], } } file { "/etc/hadoop/conf/mapred-site.xml": content => template('hadoop/mapred-site.xml'), - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } file { "/etc/hadoop/conf/taskcontroller.cfg": content => template('hadoop/taskcontroller.cfg'), - require => [Package["hadoop"]], + require => [Package[$::hadoop::hadoop_package]], } } @@ -471,7 +502,7 @@ ) inherits hadoop { include hadoop::common_hdfs - package { "hadoop-hdfs-datanode": + package { $::hadoop::hadoop_hdfs_datanode_package: ensure => latest, require => Package["jdk"], } @@ -479,14 +510,14 @@ file { "/etc/default/hadoop-hdfs-datanode": content => template('hadoop/hadoop-hdfs'), - require => [Package["hadoop-hdfs-datanode"]], + require => [Package[$::hadoop::hadoop_hdfs_datanode_package]], } service { "hadoop-hdfs-datanode": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-hdfs-datanode"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], - require => [ Package["hadoop-hdfs-datanode"], File["/etc/default/hadoop-hdfs-datanode"], File[$hadoop::common_hdfs::hdfs_data_dirs] ], + subscribe => [Package[$::hadoop::hadoop_hdfs_datanode_package], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], + require => [ Package[$::hadoop::hadoop_hdfs_datanode_package], File["/etc/default/hadoop-hdfs-datanode"], File[$hadoop::common_hdfs::hdfs_data_dirs] ], } Kerberos::Host_keytab <| title == "hdfs" |> -> Service["hadoop-hdfs-datanode"] Service<| title == 'hadoop-hdfs-namenode' |> -> Service['hadoop-hdfs-datanode'] @@ -497,7 +528,7 @@ owner => hdfs, group => hdfs, mode => '755', - require => [ Package["hadoop-hdfs"] ], + require => [ Package[$::hadoop::hadoop_hdfs_package] ], } } @@ -513,23 +544,23 @@ if ($hadoop_security_authentication == "kerberos") { kerberos::host_keytab { "httpfs": spnego => true, - require => Package["hadoop-httpfs"], + require => Package[$::hadoop::hadoop_httpfs_package], } } - package { "hadoop-httpfs": + package { $::hadoop::hadoop_httpfs_package: ensure => latest, require => Package["jdk"], } file { "/etc/hadoop/conf/httpfs-site.xml": content => template('hadoop/httpfs-site.xml'), - require => [Package["hadoop-httpfs"]], + require => [Package[$::hadoop::hadoop_httpfs_package]], } file { "/etc/hadoop/conf/httpfs-env.sh": content => template('hadoop/httpfs-env.sh'), - require => [Package["hadoop-httpfs"]], + require => [Package[$::hadoop::hadoop_httpfs_package]], } if $generate_secrets { @@ -545,15 +576,15 @@ content => $httpfs_signature_secret, # it's a password file - do not filebucket backup => false, - require => [Package["hadoop-httpfs"]], + require => [Package[$::hadoop::hadoop_httpfs_package]], } service { "hadoop-httpfs": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-httpfs"], File["/etc/hadoop/conf/httpfs-site.xml"], File["/etc/hadoop/conf/httpfs-env.sh"], File["/etc/hadoop/conf/httpfs-signature.secret"], + subscribe => [Package[$::hadoop::hadoop_httpfs_package], File["/etc/hadoop/conf/httpfs-site.xml"], File["/etc/hadoop/conf/httpfs-env.sh"], File["/etc/hadoop/conf/httpfs-signature.secret"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"]], - require => [ Package["hadoop-httpfs"] ], + require => [ Package[$::hadoop::hadoop_httpfs_package] ], } Kerberos::Host_keytab <| title == "httpfs" |> -> Service["hadoop-httpfs"] } @@ -571,18 +602,18 @@ if ($hadoop_security_authentication == "kerberos") { kerberos::host_keytab { "kms": spnego => true, - require => Package["hadoop-kms"], + require => Package[$::hadoop::hadoop_kms_package], } } - package { "hadoop-kms": + package { $::hadoop::hadoop_kms_package: ensure => latest, require => Package["jdk"], } file { "/etc/hadoop/conf/kms-site.xml": content => template('hadoop/kms-site.xml'), - require => [Package["hadoop-kms"]], + require => [Package[$::hadoop::hadoop_kms_package]], } file { "/etc/hadoop/conf/kms-env.sh": @@ -590,7 +621,7 @@ owner => 'kms', group => 'kms', mode => '0400', - require => [Package["hadoop-kms"]], + require => [Package[$::hadoop::hadoop_kms_package]], } file { "/etc/hadoop/conf/kms.keystore.password": @@ -598,7 +629,7 @@ owner => 'kms', group => 'kms', mode => '0400', - require => [Package["hadoop-kms"]], + require => [Package[$::hadoop::hadoop_kms_package]], } if $generate_secrets { @@ -614,15 +645,15 @@ content => $kms_signature_secret, # it's a password file - do not filebucket backup => false, - require => [Package["hadoop-kms"]], + require => [Package[$::hadoop::hadoop_kms_package]], } service { "hadoop-kms": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-kms"], File["/etc/hadoop/conf/kms-site.xml"], File["/etc/hadoop/conf/kms-env.sh"], File["/etc/hadoop/conf/kms-signature.secret"], + subscribe => [Package[$::hadoop::hadoop_kms_package], File["/etc/hadoop/conf/kms-site.xml"], File["/etc/hadoop/conf/kms-env.sh"], File["/etc/hadoop/conf/kms-signature.secret"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"]], - require => [ Package["hadoop-kms"] ], + require => [ Package[$::hadoop::hadoop_kms_package] ], } Kerberos::Host_keytab <| title == "kms" |> -> Service["hadoop-kms"] } @@ -684,7 +715,7 @@ owner => 'hdfs', group => 'hdfs', mode => '0700', - require => Package["hadoop-hdfs"], + require => Package[$::hadoop::hadoop_hdfs_package], } file { $hadoop::common_hdfs::sshfence_keypath: @@ -731,7 +762,7 @@ } } - package { "hadoop-hdfs-namenode": + package { $::hadoop::hadoop_hdfs_namenode_package: ensure => latest, require => Package["jdk"], } @@ -739,13 +770,13 @@ service { "hadoop-hdfs-namenode": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-hdfs-namenode"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], - require => [Package["hadoop-hdfs-namenode"]], + subscribe => [Package[$::hadoop::hadoop_hdfs_namenode_package], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], + require => [Package[$::hadoop::hadoop_hdfs_namenode_package]], } Kerberos::Host_keytab <| title == "hdfs" |> -> Exec <| tag == "namenode-format" |> -> Service["hadoop-hdfs-namenode"] if ($hadoop::common_hdfs::ha == "auto") { - package { "hadoop-hdfs-zkfc": + package { $::hadoop::hadoop_hdfs_zkfc_package: ensure => latest, require => Package["jdk"], } @@ -753,8 +784,8 @@ service { "hadoop-hdfs-zkfc": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-hdfs-zkfc"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], - require => [Package["hadoop-hdfs-zkfc"]], + subscribe => [Package[$::hadoop::hadoop_hdfs_zkfc_package], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], + require => [Package[$::hadoop::hadoop_hdfs_zkfc_package]], } Service <| title == "hadoop-hdfs-zkfc" |> -> Service <| title == "hadoop-hdfs-namenode" |> } @@ -767,7 +798,7 @@ command => "/bin/bash -c 'hdfs namenode -format -nonInteractive >> /var/lib/hadoop-hdfs/nn.format.log 2>&1'", returns => [ 0, 1], creates => "${hadoop::common_hdfs::namenode_data_dirs[0]}/current/VERSION", - require => [ Package["hadoop-hdfs-namenode"], File[$hadoop::common_hdfs::namenode_data_dirs], File["/etc/hadoop/conf/hdfs-site.xml"] ], + require => [ Package[$::hadoop::hadoop_hdfs_namenode_package], File[$hadoop::common_hdfs::namenode_data_dirs], File["/etc/hadoop/conf/hdfs-site.xml"] ], tag => "namenode-format", } @@ -777,7 +808,7 @@ user => "hdfs", command => "/bin/bash -c 'hdfs zkfc -formatZK -nonInteractive >> /var/lib/hadoop-hdfs/zk.format.log 2>&1'", returns => [ 0, 2], - require => [ Package["hadoop-hdfs-zkfc"], File["/etc/hadoop/conf/hdfs-site.xml"] ], + require => [ Package[$::hadoop::hadoop_hdfs_zkfc_package], File["/etc/hadoop/conf/hdfs-site.xml"] ], tag => "namenode-format", } Service <| title == "zookeeper-server" |> -> Exec <| title == "namenode zk format" |> @@ -800,7 +831,7 @@ # first namenode might be rebooting just now so try for some time command => "/bin/bash -c 'hdfs namenode -bootstrapStandby $retry_params >> /var/lib/hadoop-hdfs/nn.bootstrap-standby.log 2>&1'", creates => "${hadoop::common_hdfs::namenode_data_dirs[0]}/current/VERSION", - require => [ Package["hadoop-hdfs-namenode"], File[$hadoop::common_hdfs::namenode_data_dirs], File["/etc/hadoop/conf/hdfs-site.xml"] ], + require => [ Package[$::hadoop::hadoop_hdfs_namenode_package], File[$hadoop::common_hdfs::namenode_data_dirs], File["/etc/hadoop/conf/hdfs-site.xml"] ], tag => "namenode-format", } } elsif ($hadoop::common_hdfs::ha != "disabled") { @@ -814,7 +845,7 @@ file { "/etc/default/hadoop-hdfs-namenode": content => template('hadoop/hadoop-hdfs'), - require => [Package["hadoop-hdfs-namenode"]], + require => [Package[$::hadoop::hadoop_hdfs_namenode_package]], } hadoop::create_storage_dir { $hadoop::common_hdfs::namenode_data_dirs: } -> @@ -823,7 +854,7 @@ owner => hdfs, group => hdfs, mode => '700', - require => [Package["hadoop-hdfs"]], + require => [Package[$::hadoop::hadoop_hdfs_package]], } } @@ -857,7 +888,7 @@ class secondarynamenode { include hadoop::common_hdfs - package { "hadoop-hdfs-secondarynamenode": + package { $::hadoop::hadoop_hdfs_secondarynamenode_package: ensure => latest, require => Package["jdk"], } @@ -865,14 +896,14 @@ file { "/etc/default/hadoop-hdfs-secondarynamenode": content => template('hadoop/hadoop-hdfs'), - require => [Package["hadoop-hdfs-secondarynamenode"]], + require => [Package[$::hadoop::hadoop_hdfs_secondarynamenode_package]], } service { "hadoop-hdfs-secondarynamenode": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-hdfs-secondarynamenode"], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], - require => [Package["hadoop-hdfs-secondarynamenode"]], + subscribe => [Package[$::hadoop::hadoop_hdfs_secondarynamenode_package], File["/etc/hadoop/conf/core-site.xml"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/hadoop-env.sh"]], + require => [Package[$::hadoop::hadoop_hdfs_secondarynamenode_package]], } Kerberos::Host_keytab <| title == "hdfs" |> -> Service["hadoop-hdfs-secondarynamenode"] } @@ -880,7 +911,7 @@ class journalnode { include hadoop::common_hdfs - package { "hadoop-hdfs-journalnode": + package { $::hadoop::hadoop_hdfs_journalnode_package: ensure => latest, require => Package["jdk"], } @@ -890,9 +921,9 @@ service { "hadoop-hdfs-journalnode": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-hdfs-journalnode"], File["/etc/hadoop/conf/hadoop-env.sh"], + subscribe => [Package[$::hadoop::hadoop_hdfs_journalnode_package], File["/etc/hadoop/conf/hadoop-env.sh"], File["/etc/hadoop/conf/hdfs-site.xml"], File["/etc/hadoop/conf/core-site.xml"]], - require => [ Package["hadoop-hdfs-journalnode"], File[$journalnode_cluster_journal_dir] ], + require => [ Package[$::hadoop::hadoop_hdfs_journalnode_package], File[$journalnode_cluster_journal_dir] ], } hadoop::create_storage_dir { [$hadoop::common_hdfs::journalnode_edits_dir, $journalnode_cluster_journal_dir]: } -> @@ -901,7 +932,7 @@ owner => 'hdfs', group => 'hdfs', mode => '755', - require => [Package["hadoop-hdfs"]], + require => [Package[$::hadoop::hadoop_hdfs_package]], } } @@ -909,7 +940,7 @@ class resourcemanager { include hadoop::common_yarn - package { "hadoop-yarn-resourcemanager": + package { $::hadoop::hadoop_yarn_resourcemanager_package: ensure => latest, require => Package["jdk"], } @@ -917,9 +948,9 @@ service { "hadoop-yarn-resourcemanager": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-yarn-resourcemanager"], File["/etc/hadoop/conf/hadoop-env.sh"], + subscribe => [Package[$::hadoop::hadoop_yarn_resourcemanager_package], File["/etc/hadoop/conf/hadoop-env.sh"], File["/etc/hadoop/conf/yarn-site.xml"], File["/etc/hadoop/conf/core-site.xml"]], - require => [ Package["hadoop-yarn-resourcemanager"] ], + require => [ Package[$::hadoop::hadoop_yarn_resourcemanager_package] ], } Kerberos::Host_keytab <| tag == "mapreduce" |> -> Service["hadoop-yarn-resourcemanager"] } @@ -927,7 +958,7 @@ class proxyserver { include hadoop::common_yarn - package { "hadoop-yarn-proxyserver": + package { $::hadoop::hadoop_yarn_proxyserver_package: ensure => latest, require => Package["jdk"], } @@ -935,9 +966,9 @@ service { "hadoop-yarn-proxyserver": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-yarn-proxyserver"], File["/etc/hadoop/conf/hadoop-env.sh"], + subscribe => [Package[$::hadoop::hadoop_yarn_proxyserver_package], File["/etc/hadoop/conf/hadoop-env.sh"], File["/etc/hadoop/conf/yarn-site.xml"], File["/etc/hadoop/conf/core-site.xml"]], - require => [ Package["hadoop-yarn-proxyserver"] ], + require => [ Package[$::hadoop::hadoop_yarn_proxyserver_package] ], } Kerberos::Host_keytab <| tag == "mapreduce" |> -> Service["hadoop-yarn-proxyserver"] } @@ -945,7 +976,7 @@ class historyserver { include hadoop::common_mapred_app - package { "hadoop-mapreduce-historyserver": + package { $::hadoop::hadoop_mapreduce_historyserver_package: ensure => latest, require => Package["jdk"], } @@ -953,9 +984,9 @@ service { "hadoop-mapreduce-historyserver": ensure => running, hasstatus => true, - subscribe => [Package["hadoop-mapreduce-historyserver"], File["/etc/hadoop/conf/hadoop-env.sh"], + subscribe => [Package[$::hadoop::hadoop_mapreduce_historyserver_package], File["/etc/hadoop/conf/hadoop-env.sh"], File["/etc/hadoop/conf/yarn-site.xml"], File["/etc/hadoop/conf/core-site.xml"]], - require => [Package["hadoop-mapreduce-historyserver"]], + require => [Package[$::hadoop::hadoop_mapreduce_historyserver_package]], } Kerberos::Host_keytab <| tag == "mapreduce" |> -> Service["hadoop-mapreduce-historyserver"] } @@ -965,17 +996,17 @@ include hadoop::common_mapred_app include hadoop::common_yarn - package { "hadoop-yarn-nodemanager": + package { $::hadoop::hadoop_yarn_nodemanager_package: ensure => latest, require => Package["jdk"], } - service { "hadoop-yarn-nodemanager": + service { $::hadoop::hadoop_yarn_nodemanager_package: ensure => running, hasstatus => true, - subscribe => [Package["hadoop-yarn-nodemanager"], File["/etc/hadoop/conf/hadoop-env.sh"], + subscribe => [Package[$::hadoop::hadoop_yarn_nodemanager_package], File["/etc/hadoop/conf/hadoop-env.sh"], File["/etc/hadoop/conf/yarn-site.xml"], File["/etc/hadoop/conf/core-site.xml"]], - require => [ Package["hadoop-yarn-nodemanager"], File[$hadoop::common_yarn::yarn_data_dirs] ], + require => [ Package[$::hadoop::hadoop_yarn_nodemanager_package], File[$hadoop::common_yarn::yarn_data_dirs] ], } Kerberos::Host_keytab <| tag == "mapreduce" |> -> Service["hadoop-yarn-nodemanager"] @@ -985,13 +1016,13 @@ owner => yarn, group => yarn, mode => '755', - require => [Package["hadoop-yarn"]], + require => [Package[$::hadoop::hadoop_yarn_package]], } if ($operatingsystem == 'openEuler') { exec { "usermod yarn": command => "/usr/sbin/usermod -G root yarn", - require => Package["hadoop-yarn-nodemanager"], + require => Package[$::hadoop::hadoop_yarn_nodemanager_package], } } } @@ -1005,7 +1036,7 @@ owner => yarn, group => yarn, mode => '755', - require => [Package["hadoop-mapreduce"]], + require => [Package[$::hadoop::hadoop_mapreduce_package]], } } @@ -1022,7 +1053,7 @@ package { $hadoop_client_packages: ensure => latest, - require => [Package["jdk"], Package["hadoop"], Package["hadoop-hdfs"], Package["hadoop-mapreduce"]], + require => [Package["jdk"], Package[$::hadoop::hadoop_package], Package[$::hadoop::hadoop_hdfs_package], Package[$::hadoop::hadoop_mapreduce_package]], } } diff --git a/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc b/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc index caa8bd474b..d1ac751eba 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc @@ -15,9 +15,9 @@ HADOOP_NAME="datanode" DAEMON="hadoop-hdfs-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh" +EXEC_PATH="@hadoop_home/sbin/hadoop-daemon.sh" SVC_USER="hdfs" -WORKING_DIR="/var/lib/hadoop-hdfs" +WORKING_DIR="@var_lib_hadoop_hdfs" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-hdfs-dfsrouter.svc b/bigtop-packages/src/common/hadoop/hadoop-hdfs-dfsrouter.svc index c4eb58819d..073a209619 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-hdfs-dfsrouter.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-hdfs-dfsrouter.svc @@ -15,9 +15,9 @@ HADOOP_NAME="dfsrouter" DAEMON="hadoop-hdfs-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh" +EXEC_PATH="@hadoop_home/sbin/hadoop-daemon.sh" SVC_USER="hdfs" -WORKING_DIR="/var/lib/hadoop-hdfs" +WORKING_DIR="@var_lib_hadoop_hdfs" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-hdfs-journalnode.svc b/bigtop-packages/src/common/hadoop/hadoop-hdfs-journalnode.svc index a0a43a12c0..20c67fc7d5 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-hdfs-journalnode.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-hdfs-journalnode.svc @@ -15,9 +15,9 @@ HADOOP_NAME="journalnode" DAEMON="hadoop-hdfs-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh" +EXEC_PATH="@hadoop_home/sbin/hadoop-daemon.sh" SVC_USER="hdfs" -WORKING_DIR="/var/lib/hadoop-hdfs" +WORKING_DIR="@var_lib_hadoop_hdfs" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc b/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc index eecfc44be7..b61111be13 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc @@ -15,9 +15,9 @@ HADOOP_NAME="namenode" DAEMON="hadoop-hdfs-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh" +EXEC_PATH="@hadoop_home/sbin/hadoop-daemon.sh" SVC_USER="hdfs" -WORKING_DIR="/var/lib/hadoop-hdfs" +WORKING_DIR="@var_lib_hadoop_hdfs" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc b/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc index 625ca3e1b2..8799d2212b 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc @@ -15,9 +15,9 @@ HADOOP_NAME="secondarynamenode" DAEMON="hadoop-hdfs-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh" +EXEC_PATH="@hadoop_home/sbin/hadoop-daemon.sh" SVC_USER="hdfs" -WORKING_DIR="/var/lib/hadoop-hdfs" +WORKING_DIR="@var_lib_hadoop_hdfs" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-hdfs-zkfc.svc b/bigtop-packages/src/common/hadoop/hadoop-hdfs-zkfc.svc index c055deff3e..694baf890f 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-hdfs-zkfc.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-hdfs-zkfc.svc @@ -15,9 +15,9 @@ HADOOP_NAME="zkfc" DAEMON="hadoop-hdfs-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh" +EXEC_PATH="@hadoop_home/sbin/hadoop-daemon.sh" SVC_USER="hdfs" -WORKING_DIR="/var/lib/hadoop-hdfs" +WORKING_DIR="@var_lib_hadoop_hdfs" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc b/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc index 7373202133..41c729f715 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc @@ -15,7 +15,7 @@ HADOOP_NAME="httpfs" DAEMON="hadoop-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/httpfs.sh" +EXEC_PATH="@hadoop_home/sbin/httpfs.sh" SVC_USER="$HADOOP_NAME" WORKING_DIR="/var/run/hadoop-httpfs" DAEMON_FLAGS="$HADOOP_NAME" diff --git a/bigtop-packages/src/common/hadoop/hadoop-kms.svc b/bigtop-packages/src/common/hadoop/hadoop-kms.svc index 50d83e42f6..6e5db21325 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-kms.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-kms.svc @@ -15,7 +15,7 @@ HADOOP_NAME="kms" DAEMON="hadoop-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop/sbin/kms.sh" +EXEC_PATH="@hadoop_home/sbin/kms.sh" SVC_USER="$HADOOP_NAME" WORKING_DIR="/var/run/hadoop-kms" DAEMON_FLAGS="$HADOOP_NAME" diff --git a/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc b/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc index 79670d7ed9..c98d2e1b0e 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc @@ -15,9 +15,9 @@ HADOOP_NAME="historyserver" DAEMON="hadoop-mapreduce-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh" +EXEC_PATH="@hadoop_mapreduce_home/sbin/mr-jobhistory-daemon.sh" SVC_USER="mapred" -WORKING_DIR="/var/lib/hadoop-mapreduce" +WORKING_DIR="@var_lib_hadoop_mapreduce" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-mapreduce/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc b/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc index e74aa2847d..8dfe085c78 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc @@ -15,9 +15,9 @@ HADOOP_NAME="nodemanager" DAEMON="hadoop-yarn-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh" +EXEC_PATH="@hadoop_yarn_home/sbin/yarn-daemon.sh" SVC_USER="yarn" -WORKING_DIR="/var/lib/hadoop-yarn" +WORKING_DIR="@var_lib_hadoop_yarn" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-yarn/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc b/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc index 4839751d1b..13af78512c 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc @@ -15,9 +15,9 @@ HADOOP_NAME="proxyserver" DAEMON="hadoop-yarn-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh" +EXEC_PATH="@hadoop_yarn_home/sbin/yarn-daemon.sh" SVC_USER="yarn" -WORKING_DIR="/var/lib/hadoop-yarn" +WORKING_DIR="@var_lib_hadoop_yarn" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-yarn/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc b/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc index f56709ce84..1185fa782d 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc @@ -15,9 +15,9 @@ HADOOP_NAME="resourcemanager" DAEMON="hadoop-yarn-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh" +EXEC_PATH="@hadoop_yarn_home/sbin/yarn-daemon.sh" SVC_USER="yarn" -WORKING_DIR="/var/lib/hadoop-yarn" +WORKING_DIR="@var_lib_hadoop_yarn" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-yarn/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-yarn-router.svc b/bigtop-packages/src/common/hadoop/hadoop-yarn-router.svc index 50a9abfb18..a47cf7ffa2 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-yarn-router.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-yarn-router.svc @@ -15,9 +15,9 @@ HADOOP_NAME="router" DAEMON="hadoop-yarn-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh" +EXEC_PATH="@hadoop_yarn_home/sbin/yarn-daemon.sh" SVC_USER="yarn" -WORKING_DIR="/var/lib/hadoop-yarn" +WORKING_DIR="@var_lib_hadoop_yarn" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-yarn/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/common/hadoop/hadoop-yarn-timelineserver.svc b/bigtop-packages/src/common/hadoop/hadoop-yarn-timelineserver.svc index 511ef35901..a0d1d36494 100644 --- a/bigtop-packages/src/common/hadoop/hadoop-yarn-timelineserver.svc +++ b/bigtop-packages/src/common/hadoop/hadoop-yarn-timelineserver.svc @@ -15,9 +15,9 @@ HADOOP_NAME="timelineserver" DAEMON="hadoop-yarn-$HADOOP_NAME" DESC="Hadoop $HADOOP_NAME" -EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh" +EXEC_PATH="@hadoop_yarn_home/sbin/yarn-daemon.sh" SVC_USER="yarn" -WORKING_DIR="/var/lib/hadoop-yarn" +WORKING_DIR="@var_lib_hadoop_yarn" DAEMON_FLAGS="$HADOOP_NAME" CONF_DIR="/etc/hadoop/conf" PIDFILE="/var/run/hadoop-yarn/hadoop-$SVC_USER-$HADOOP_NAME.pid" diff --git a/bigtop-packages/src/deb/hadoop/control b/bigtop-packages/src/deb/hadoop/control index 7b2a538ffe..766614d913 100644 --- a/bigtop-packages/src/deb/hadoop/control +++ b/bigtop-packages/src/deb/hadoop/control @@ -21,7 +21,7 @@ Build-Depends: debhelper (>= 7.0.50~), liblzo2-dev, libzip-dev, sharutils, g++ ( Standards-Version: 3.9.1 Homepage: http://hadoop.apache.org/core/ -Package: hadoop +Package: hadoop-pkgsuffix Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, bigtop-utils (>= 0.7), zookeeper (>= 3.4.0), psmisc, netcat-openbsd, libssl-dev Description: Hadoop is a software platform for processing vast amounts of data @@ -45,123 +45,123 @@ Description: Hadoop is a software platform for processing vast amounts of data nodes around the cluster. MapReduce can then process the data where it is located. -Package: hadoop-hdfs +Package: hadoop-pkgsuffix-hdfs Architecture: any -Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, bigtop-utils (>= 0.7), bigtop-groovy, hadoop (= ${binary:Version}), bigtop-jsvc +Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, bigtop-utils (>= 0.7), bigtop-groovy, hadoop-pkgsuffix (= ${binary:Version}), bigtop-jsvc Description: The Hadoop Distributed File System - Hadoop Distributed File System (HDFS) is the primary storage system used by - Hadoop applications. HDFS creates multiple replicas of data blocks and distributes - them on compute nodes throughout a cluster to enable reliable, extremely rapid + Hadoop Distributed File System (HDFS) is the primary storage system used by + Hadoop applications. HDFS creates multiple replicas of data blocks and distributes + them on compute nodes throughout a cluster to enable reliable, extremely rapid computations. -Package: hadoop-yarn +Package: hadoop-pkgsuffix-yarn Architecture: any -Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, bigtop-utils (>= 0.7), hadoop (= ${binary:Version}) +Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, bigtop-utils (>= 0.7), hadoop-pkgsuffix (= ${binary:Version}) Description: The Hadoop NextGen MapReduce (YARN) YARN (Hadoop NextGen MapReduce) is a general purpose data-computation framework. - The fundamental idea of YARN is to split up the two major functionalities of the + The fundamental idea of YARN is to split up the two major functionalities of the JobTracker, resource management and job scheduling/monitoring, into separate daemons: ResourceManager and NodeManager. . - The ResourceManager is the ultimate authority that arbitrates resources among all + The ResourceManager is the ultimate authority that arbitrates resources among all the applications in the system. The NodeManager is a per-node slave managing allocation - of computational resources on a single node. Both work in support of per-application + of computational resources on a single node. Both work in support of per-application ApplicationMaster (AM). . - An ApplicationMaster is, in effect, a framework specific library and is tasked with - negotiating resources from the ResourceManager and working with the NodeManager(s) to - execute and monitor the tasks. + An ApplicationMaster is, in effect, a framework specific library and is tasked with + negotiating resources from the ResourceManager and working with the NodeManager(s) to + execute and monitor the tasks. -Package: hadoop-mapreduce +Package: hadoop-pkgsuffix-mapreduce Architecture: any -Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, bigtop-utils (>= 0.7), hadoop-yarn (= ${binary:Version}) +Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, bigtop-utils (>= 0.7), hadoop-pkgsuffix-yarn (= ${binary:Version}) Description: The Hadoop MapReduce (MRv2) - Hadoop MapReduce is a programming model and software framework for writing applications + Hadoop MapReduce is a programming model and software framework for writing applications that rapidly process vast amounts of data in parallel on large clusters of compute nodes. -Package: hadoop-hdfs-fuse +Package: hadoop-pkgsuffix-hdfs-fuse Architecture: any -Depends: ${shlibs:Depends}, hadoop-hdfs (= ${binary:Version}), hadoop-client (= ${binary:Version}), bigtop-utils (>= 0.7) +Depends: ${shlibs:Depends}, hadoop-pkgsuffix-hdfs (= ${binary:Version}), hadoop-pkgsuffix-client (= ${binary:Version}), bigtop-utils (>= 0.7) Pre-Depends: fuse Enhances: hadoop Description: Mountable HDFS These projects (enumerated below) allow HDFS to be mounted (on most flavors of Unix) as a standard file system using -Package: hadoop-doc +Package: hadoop-pkgsuffix-doc Architecture: all Section: doc Description: Hadoop Documentation Documentation for Hadoop -Package: hadoop-conf-pseudo +Package: hadoop-pkgsuffix-conf-pseudo Architecture: any -Depends: hadoop (= ${binary:Version}), hadoop-hdfs-namenode (= ${binary:Version}), - hadoop-hdfs-datanode (= ${binary:Version}), hadoop-hdfs-secondarynamenode (= ${binary:Version}), - hadoop-yarn-resourcemanager (= ${binary:Version}), hadoop-yarn-nodemanager (= ${binary:Version}), - hadoop-mapreduce-historyserver (= ${binary:Version}) +Depends: hadoop-pkgsuffix (= ${binary:Version}), hadoop-pkgsuffix-hdfs-namenode (= ${binary:Version}), + hadoop-pkgsuffix-hdfs-datanode (= ${binary:Version}), hadoop-pkgsuffix-hdfs-secondarynamenode (= ${binary:Version}), + hadoop-pkgsuffix-yarn-resourcemanager (= ${binary:Version}), hadoop-pkgsuffix-yarn-nodemanager (= ${binary:Version}), + hadoop-pkgsuffix-mapreduce-historyserver (= ${binary:Version}) Description: Pseudo-distributed Hadoop configuration Contains configuration files for a "pseudo-distributed" Hadoop deployment. In this mode, each of the hadoop components runs as a separate Java process, but all on the same machine. -Package: hadoop-mapreduce-historyserver +Package: hadoop-pkgsuffix-mapreduce-historyserver Architecture: any -Depends: hadoop-mapreduce (= ${binary:Version}), hadoop-hdfs (= ${binary:Version}) +Depends: hadoop-pkgsuffix-mapreduce (= ${binary:Version}), hadoop-pkgsuffix-hdfs (= ${binary:Version}) Description: MapReduce History Server The History server keeps records of the different activities being performed on a Apache Hadoop cluster -Package: hadoop-yarn-nodemanager +Package: hadoop-pkgsuffix-yarn-nodemanager Architecture: any -Depends: hadoop-yarn (= ${binary:Version}) +Depends: hadoop-pkgsuffix-yarn (= ${binary:Version}) Description: YARN Node Manager The NodeManager is the per-machine framework agent who is responsible for containers, monitoring their resource usage (cpu, memory, disk, network) and reporting the same to the ResourceManager/Scheduler. -Package: hadoop-yarn-resourcemanager +Package: hadoop-pkgsuffix-yarn-resourcemanager Architecture: any -Depends: hadoop-yarn (= ${binary:Version}) +Depends: hadoop-pkgsuffix-yarn (= ${binary:Version}) Description: YARN Resource Manager The resource manager manages the global assignment of compute resources to applications -Package: hadoop-yarn-proxyserver +Package: hadoop-pkgsuffix-yarn-proxyserver Architecture: any -Depends: hadoop-yarn (= ${binary:Version}) +Depends: hadoop-pkgsuffix-yarn (= ${binary:Version}) Description: YARN Web Proxy The web proxy server sits in front of the YARN application master web UI. -Package: hadoop-yarn-timelineserver +Package: hadoop-pkgsuffix-yarn-timelineserver Architecture: any -Depends: hadoop-yarn (= ${binary:Version}) +Depends: hadoop-pkgsuffix-yarn (= ${binary:Version}) Description: YARN Timeline Server - Storage and retrieval of applications' current as well as historic - information in a generic fashion is solved in YARN through the + Storage and retrieval of applications' current as well as historic + information in a generic fashion is solved in YARN through the Timeline Server. - -Package: hadoop-yarn-router + +Package: hadoop-pkgsuffix-yarn-router Architecture: any -Depends: hadoop-yarn (= ${binary:Version}) +Depends: hadoop-pkgsuffix-yarn (= ${binary:Version}) Description: YARN Router Server YARN Router Server which supports YARN Federation. -Package: hadoop-hdfs-namenode +Package: hadoop-pkgsuffix-hdfs-namenode Architecture: any -Depends: hadoop-hdfs (= ${binary:Version}) +Depends: hadoop-pkgsuffix-hdfs (= ${binary:Version}) Description: The Hadoop namenode manages the block locations of HDFS files The Hadoop Distributed Filesystem (HDFS) requires one unique server, the namenode, which manages the block locations of files on the filesystem. -Package: hadoop-hdfs-secondarynamenode +Package: hadoop-pkgsuffix-hdfs-secondarynamenode Architecture: any -Depends: hadoop-hdfs (= ${binary:Version}) +Depends: hadoop-pkgsuffix-hdfs (= ${binary:Version}) Description: Hadoop Secondary namenode The Secondary Name Node periodically compacts the Name Node EditLog into a checkpoint. This compaction ensures that Name Node restarts do not incur unnecessary downtime. -Package: hadoop-hdfs-zkfc +Package: hadoop-pkgsuffix-hdfs-zkfc Architecture: any -Depends: hadoop-hdfs (= ${binary:Version}) +Depends: hadoop-pkgsuffix-hdfs (= ${binary:Version}) Description: Hadoop HDFS failover controller The Hadoop HDFS failover controller is a ZooKeeper client which also monitors and manages the state of the NameNode. Each of the machines @@ -169,39 +169,39 @@ Description: Hadoop HDFS failover controller for: Health monitoring, ZooKeeper session management, ZooKeeper-based election. -Package: hadoop-hdfs-journalnode -Provides: hadoop-hdfs-journalnode +Package: hadoop-pkgsuffix-hdfs-journalnode +Provides: hadoop-pkgsuffix-hdfs-journalnode Architecture: any -Depends: hadoop-hdfs (= ${binary:Version}) -Description: Hadoop HDFS JournalNode - The HDFS JournalNode is responsible for persisting NameNode edit logs. - In a typical deployment the JournalNode daemon runs on at least three +Depends: hadoop-pkgsuffix-hdfs (= ${binary:Version}) +Description: Hadoop HDFS JournalNode + The HDFS JournalNode is responsible for persisting NameNode edit logs. + In a typical deployment the JournalNode daemon runs on at least three separate machines in the cluster. -Package: hadoop-hdfs-datanode +Package: hadoop-pkgsuffix-hdfs-datanode Architecture: any -Depends: hadoop-hdfs (= ${binary:Version}) +Depends: hadoop-pkgsuffix-hdfs (= ${binary:Version}) Description: Hadoop Data Node The Data Nodes in the Hadoop Cluster are responsible for serving up blocks of data over the network to Hadoop Distributed Filesystem (HDFS) clients. -Package: hadoop-hdfs-dfsrouter +Package: hadoop-pkgsuffix-hdfs-dfsrouter Architecture: any -Depends: hadoop-hdfs (= ${binary:Version}) +Depends: hadoop-pkgsuffix-hdfs (= ${binary:Version}) Description: Hadoop HDFS Router HDFS Router Server which supports Router Based Federation. Package: libhdfs0 Architecture: any -Depends: hadoop (= ${binary:Version}), ${shlibs:Depends} +Depends: hadoop-pkgsuffix (= ${binary:Version}), ${shlibs:Depends} Description: Hadoop Filesystem Library Hadoop Filesystem Library Package: libhdfs0-dev Architecture: any Section: libdevel -Depends: hadoop (= ${binary:Version}), libhdfs0 (= ${binary:Version}) +Depends: hadoop-pkgsuffix (= ${binary:Version}), libhdfs0 (= ${binary:Version}) Description: Development support for libhdfs0 Includes examples and header files for accessing HDFS from C @@ -217,22 +217,22 @@ Depends: libhdfspp (= ${binary:Version}) Description: Development support for libhdfspp Includes header files for accessing HDFS from C++ -Package: hadoop-httpfs +Package: hadoop-pkgsuffix-httpfs Architecture: any -Depends: hadoop-hdfs (= ${binary:Version}) +Depends: hadoop-pkgsuffix-hdfs (= ${binary:Version}) Description: HTTPFS for Hadoop The server providing HTTP REST API support for the complete FileSystem/FileContext interface in HDFS. -Package: hadoop-kms +Package: hadoop-pkgsuffix-kms Architecture: any -Depends: hadoop (= ${binary:Version}), adduser +Depends: hadoop-pkgsuffix (= ${binary:Version}), adduser Description: KMS for Hadoop The server providing cryptographic key management based on Hadoop KeyProvider API. -Package: hadoop-client +Package: hadoop-pkgsuffix-client Architecture: any -Depends: hadoop (= ${binary:Version}), hadoop-hdfs (= ${binary:Version}), - hadoop-yarn (= ${binary:Version}), hadoop-mapreduce (= ${binary:Version}) +Depends: hadoop-pkgsuffix (= ${binary:Version}), hadoop-pkgsuffix-hdfs (= ${binary:Version}), + hadoop-pkgsuffix-yarn (= ${binary:Version}), hadoop-pkgsuffix-mapreduce (= ${binary:Version}) Description: Hadoop client side dependencies - Installation of this package will provide you with all the dependencies for Hadoop clients. + Installation of this package will provide you with all the dependencies for Hadoop clients. \ No newline at end of file diff --git a/bigtop-packages/src/deb/hadoop/hadoop-client.install b/bigtop-packages/src/deb/hadoop/hadoop-client.install deleted file mode 100644 index 3695fd88ba..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-client.install +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/hadoop/client diff --git a/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.install b/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.install deleted file mode 100644 index 6320c6ad31..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.install +++ /dev/null @@ -1 +0,0 @@ -/etc/hadoop/conf.pseudo diff --git a/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.postinst b/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.postinst index 9c404b9ef2..d2acccd3aa 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.postinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.postinst @@ -36,7 +36,7 @@ set -e case "$1" in configure) - update-alternatives --install /etc/hadoop/conf hadoop-conf /etc/hadoop/conf.pseudo 30 + update-alternatives --install /etc/hadoop/conf hadoop-conf @etc_hadoop@/conf.pseudo 30 ;; abort-upgrade|abort-remove|abort-deconfigure) diff --git a/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.prerm b/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.prerm index 97462237d8..214e36509f 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.prerm +++ b/bigtop-packages/src/deb/hadoop/hadoop-conf-pseudo.prerm @@ -35,7 +35,7 @@ set -e case "$1" in remove|upgrade|deconfigure) - update-alternatives --remove hadoop-conf /etc/hadoop/conf.pseudo > /dev/null || : + update-alternatives --remove hadoop-conf @etc_hadoop@/conf.pseudo > /dev/null || : ;; failed-upgrade) diff --git a/bigtop-packages/src/deb/hadoop/hadoop-doc.dirs b/bigtop-packages/src/deb/hadoop/hadoop-doc.dirs deleted file mode 100644 index 7064fc0773..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-doc.dirs +++ /dev/null @@ -1 +0,0 @@ -/usr/share/doc/hadoop-doc/ diff --git a/bigtop-packages/src/deb/hadoop/hadoop-doc.install b/bigtop-packages/src/deb/hadoop/hadoop-doc.install deleted file mode 100644 index f7989bbe2a..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-doc.install +++ /dev/null @@ -1 +0,0 @@ -/usr/share/doc/hadoop-doc diff --git a/bigtop-packages/src/deb/hadoop/hadoop-hdfs-fuse.dirs b/bigtop-packages/src/deb/hadoop/hadoop-hdfs-fuse.dirs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/bigtop-packages/src/deb/hadoop/hadoop-hdfs-fuse.install b/bigtop-packages/src/deb/hadoop/hadoop-hdfs-fuse.install deleted file mode 100644 index 1df2f9750a..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-hdfs-fuse.install +++ /dev/null @@ -1,3 +0,0 @@ -/etc/default/hadoop-fuse -/usr/bin/hadoop-fuse-dfs -/usr/lib/hadoop/bin/fuse_dfs diff --git a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.dirs b/bigtop-packages/src/deb/hadoop/hadoop-hdfs.dirs deleted file mode 100644 index ae98a12f8a..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.dirs +++ /dev/null @@ -1,6 +0,0 @@ -/etc/hadoop/conf.empty/ -/usr/lib/hadoop/libexec -/usr/lib/hadoop-hdfs -/usr/bin -/var/lib/hadoop-hdfs/cache -/var/log/hadoop-hdfs diff --git a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.install b/bigtop-packages/src/deb/hadoop/hadoop-hdfs.install deleted file mode 100644 index 1851ea933d..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.install +++ /dev/null @@ -1,11 +0,0 @@ -/etc/security/limits.d/hdfs.conf -/etc/hadoop/conf.empty/hdfs-site.xml -/usr/lib/hadoop-hdfs -/usr/lib/hadoop/libexec/hdfs-config.sh -/usr/lib/hadoop/libexec/init-hdfs.sh -/usr/lib/hadoop/libexec/init-hcfs.json -/usr/lib/hadoop/libexec/init-hcfs.groovy -/usr/bin/hdfs -/var/lib/hadoop-hdfs -/var/log/hadoop-hdfs -/var/run/hadoop-hdfs diff --git a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.postinst b/bigtop-packages/src/deb/hadoop/hadoop-hdfs.postinst index 2d972675d6..7328f317cc 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.postinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-hdfs.postinst @@ -21,13 +21,13 @@ set -e case "$1" in configure) - mkdir -p /var/log/hadoop-hdfs /var/run/hadoop-hdfs /var/lib/hadoop-hdfs/cache /usr/lib/hadoop/logs || : + mkdir -p /var/log/hadoop-hdfs /var/run/hadoop-hdfs @var_lib_hdfs@/cache @usr_lib_hadoop@/logs || : chgrp -R hadoop /var/log/hadoop-hdfs /var/run/hadoop-hdfs chmod g+w /var/run/hadoop-hdfs /var/log/hadoop-hdfs - chown hdfs:hadoop /var/lib/hadoop-hdfs/ /var/lib/hadoop-hdfs/cache - chmod 0755 /var/lib/hadoop-hdfs - chmod 1777 /var/lib/hadoop-hdfs/cache - chmod 1777 /usr/lib/hadoop/logs + chown hdfs:hadoop @var_lib_hdfs@ @var_lib_hdfs@/cache + chmod 0755 @var_lib_hdfs@ + chmod 1777 @var_lib_hdfs@/cache + chmod 1777 @usr_lib_hadoop@/logs ;; abort-upgrade|abort-remove|abort-deconfigure) diff --git a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.preinst b/bigtop-packages/src/deb/hadoop/hadoop-hdfs.preinst index 72bdc3c28c..d29988eeb1 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-hdfs.preinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-hdfs.preinst @@ -38,7 +38,7 @@ case "$1" in adduser \ --system \ --ingroup hdfs \ - --home /var/lib/hadoop-hdfs \ + --home @var_lib_hdfs@ \ --gecos "Hadoop HDFS" \ --shell /bin/bash \ hdfs >/dev/null 2>/dev/null || : diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs deleted file mode 100644 index 58d9b1ecdd..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs +++ /dev/null @@ -1 +0,0 @@ -/var/log/hadoop-httpfs diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install deleted file mode 100644 index c8bb78fd25..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install +++ /dev/null @@ -1,4 +0,0 @@ -/etc/hadoop/conf.empty/httpfs-env.sh -/etc/hadoop/conf.empty/httpfs-log4j.properties -/etc/hadoop/conf.empty/httpfs-site.xml -/var/lib/hadoop-httpfs diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst index 6b224e2d0d..c08d2c515e 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst @@ -22,7 +22,7 @@ set -e case "$1" in configure) mkdir -p /var/log/hadoop-httpfs /var/run/hadoop-httpfs || : - chown httpfs:httpfs /var/run/hadoop-httpfs /var/log/hadoop-httpfs /var/lib/hadoop-httpfs + chown httpfs:httpfs /var/run/hadoop-httpfs /var/log/hadoop-httpfs @var_lib_httpfs@ ;; abort-upgrade|abort-remove|abort-deconfigure) diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.preinst b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.preinst index 55cac1c355..20cfaa9e7e 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.preinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.preinst @@ -37,7 +37,7 @@ case "$1" in adduser \ --system \ --ingroup httpfs \ - --home /var/lib/hadoop-httpfs \ + --home @var_lib_httpfs@ \ --gecos "Hadoop HTTPFS" \ --shell /bin/bash \ httpfs >/dev/null 2>/dev/null || : diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.dirs b/bigtop-packages/src/deb/hadoop/hadoop-kms.dirs deleted file mode 100644 index d5ca2e84ea..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-kms.dirs +++ /dev/null @@ -1 +0,0 @@ -/var/log/hadoop-kms diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.install b/bigtop-packages/src/deb/hadoop/hadoop-kms.install deleted file mode 100644 index 64aa17f999..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-kms.install +++ /dev/null @@ -1,5 +0,0 @@ -/etc/hadoop/conf.empty/kms-acls.xml -/etc/hadoop/conf.empty/kms-env.sh -/etc/hadoop/conf.empty/kms-log4j.properties -/etc/hadoop/conf.empty/kms-site.xml -/var/lib/hadoop-kms diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst b/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst index e379225b33..43980c2a20 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst @@ -22,7 +22,7 @@ set -e case "$1" in configure) mkdir -p /var/log/hadoop-kms /var/run/hadoop-kms || : - chown kms:kms /var/lib/hadoop-kms /var/log/hadoop-kms /var/run/hadoop-kms + chown kms:kms @var_lib_kms@ /var/log/hadoop-kms /var/run/hadoop-kms ;; abort-upgrade|abort-remove|abort-deconfigure) diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.preinst b/bigtop-packages/src/deb/hadoop/hadoop-kms.preinst index 3218897e1d..dce71f5783 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-kms.preinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-kms.preinst @@ -37,7 +37,7 @@ case "$1" in adduser \ --system \ --ingroup kms \ - --home /var/lib/hadoop-kms \ + --home @var_lib_kms@ \ --gecos "Hadoop KMS" \ --shell /bin/bash \ kms >/dev/null 2>/dev/null || : diff --git a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.dirs b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.dirs deleted file mode 100644 index d219a53a5c..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.dirs +++ /dev/null @@ -1,6 +0,0 @@ -/etc/hadoop/conf.empty/ -/usr/lib/hadoop/libexec -/usr/lib/hadoop-mapreduce -/usr/bin -/var/lib/hadoop-mapreduce/cache -/var/log/hadoop-mapreduce diff --git a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install deleted file mode 100644 index ee971a9191..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install +++ /dev/null @@ -1,10 +0,0 @@ -/etc/security/limits.d/mapreduce.conf -/etc/hadoop/conf.empty/mapred-site.xml -/etc/hadoop/conf.empty/mapred-env.sh -/etc/hadoop/conf.empty/mapred-queues.xml.template -/usr/lib/hadoop-mapreduce -/usr/lib/hadoop/libexec/mapred-config.sh -/usr/bin/mapred -/var/lib/hadoop-mapreduce -/var/log/hadoop-mapreduce -/var/run/hadoop-mapreduce diff --git a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.postinst b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.postinst index 8013cbb723..c568a5bf40 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.postinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.postinst @@ -21,12 +21,12 @@ set -e case "$1" in configure) - mkdir -p /var/log/hadoop-mapreduce /var/run/hadoop-mapreduce /var/lib/hadoop-mapreduce/cache || : + mkdir -p /var/log/hadoop-mapreduce /var/run/hadoop-mapreduce @var_lib_mapreduce@/cache || : chgrp -R hadoop /var/log/hadoop-mapreduce /var/run/hadoop-mapreduce chmod g+w /var/run/hadoop-mapreduce /var/log/hadoop-mapreduce - chown mapred:hadoop /var/lib/hadoop-mapreduce /var/lib/hadoop-mapreduce/cache - chmod 0755 /var/lib/hadoop-mapreduce - chmod 1777 /var/lib/hadoop-mapreduce/cache + chown mapred:hadoop @var_lib_mapreduce@ @var_lib_mapreduce@/cache + chmod 0755 @var_lib_mapreduce@ + chmod 1777 @var_lib_mapreduce@/cache ;; abort-upgrade|abort-remove|abort-deconfigure) diff --git a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.preinst b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.preinst index 84e975e57f..fbefd06230 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.preinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.preinst @@ -37,7 +37,7 @@ case "$1" in adduser \ --system \ --ingroup mapred \ - --home /var/lib/hadoop-mapreduce \ + --home @var_lib_mapreduce@ \ --gecos "Hadoop MapReduce" \ --shell /bin/bash \ mapred >/dev/null 2>/dev/null || : diff --git a/bigtop-packages/src/deb/hadoop/hadoop-yarn.dirs b/bigtop-packages/src/deb/hadoop/hadoop-yarn.dirs deleted file mode 100644 index fa2aee26dd..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-yarn.dirs +++ /dev/null @@ -1,6 +0,0 @@ -/etc/hadoop/conf.empty/ -/usr/lib/hadoop/libexec -/usr/lib/hadoop-yarn -/usr/bin -/var/lib/hadoop-yarn/cache -/var/log/hadoop-yarn diff --git a/bigtop-packages/src/deb/hadoop/hadoop-yarn.install b/bigtop-packages/src/deb/hadoop/hadoop-yarn.install deleted file mode 100644 index e9c507467c..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop-yarn.install +++ /dev/null @@ -1,11 +0,0 @@ -/etc/security/limits.d/yarn.conf -/etc/hadoop/conf.empty/yarn-env.sh -/etc/hadoop/conf.empty/yarn-site.xml -/etc/hadoop/conf.empty/capacity-scheduler.xml -/etc/hadoop/conf.empty/container-executor.cfg -/usr/lib/hadoop-yarn -/usr/lib/hadoop/libexec/yarn-config.sh -/usr/bin/yarn -/var/lib/hadoop-yarn -/var/log/hadoop-yarn -/var/run/hadoop-yarn diff --git a/bigtop-packages/src/deb/hadoop/hadoop-yarn.postinst b/bigtop-packages/src/deb/hadoop/hadoop-yarn.postinst index abe427786a..96d5624157 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-yarn.postinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-yarn.postinst @@ -21,14 +21,14 @@ set -e case "$1" in configure) - chown root:yarn /usr/lib/hadoop-yarn/bin/container-executor - chmod 4754 /usr/lib/hadoop-yarn/bin/container-executor - mkdir -p /var/log/hadoop-yarn /var/run/hadoop-yarn /var/lib/hadoop-yarn/cache || : + chown root:yarn @usr_lib_yarn@/bin/container-executor + chmod 4754 @usr_lib_yarn@/bin/container-executor + mkdir -p /var/log/hadoop-yarn /var/run/hadoop-yarn @var_lib_yarn@/cache || : chown yarn:hadoop /var/log/hadoop-yarn /var/run/hadoop-yarn chmod g+w /var/log/hadoop-yarn /var/run/hadoop-yarn - chown yarn:hadoop /var/lib/hadoop-yarn/ /var/lib/hadoop-yarn/cache - chmod 0755 /var/lib/hadoop-yarn - chmod 1777 /var/lib/hadoop-yarn/cache + chown yarn:hadoop @var_lib_yarn@/ @var_lib_yarn@/cache + chmod 0755 @var_lib_yarn@ + chmod 1777 @var_lib_yarn@/cache ;; abort-upgrade|abort-remove|abort-deconfigure) diff --git a/bigtop-packages/src/deb/hadoop/hadoop-yarn.preinst b/bigtop-packages/src/deb/hadoop/hadoop-yarn.preinst index fdf5aae98a..b97f306b01 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop-yarn.preinst +++ b/bigtop-packages/src/deb/hadoop/hadoop-yarn.preinst @@ -38,7 +38,7 @@ case "$1" in adduser \ --system \ --ingroup yarn \ - --home /var/lib/hadoop-yarn \ + --home @var_lib_yarn@ \ --gecos "Hadoop YARN" \ --shell /bin/bash \ yarn >/dev/null 2>/dev/null || : diff --git a/bigtop-packages/src/deb/hadoop/hadoop.dirs b/bigtop-packages/src/deb/hadoop/hadoop.dirs deleted file mode 100644 index eec22b14d9..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop.dirs +++ /dev/null @@ -1,4 +0,0 @@ -/etc/hadoop/conf.empty/ -/usr/lib/hadoop -/usr/bin -/usr/share/doc/hadoop diff --git a/bigtop-packages/src/deb/hadoop/hadoop.install b/bigtop-packages/src/deb/hadoop/hadoop.install deleted file mode 100644 index f69b68b02b..0000000000 --- a/bigtop-packages/src/deb/hadoop/hadoop.install +++ /dev/null @@ -1,27 +0,0 @@ -/etc/hadoop/conf.empty/hadoop-metrics2.properties -/etc/hadoop/conf.empty/log4j.properties -/etc/hadoop/conf.empty/workers -/etc/hadoop/conf.empty/ssl-client.xml.example -/etc/hadoop/conf.empty/ssl-server.xml.example -/etc/hadoop/conf.empty/core-site.xml -/etc/hadoop/conf.empty/configuration.xsl -/etc/hadoop/conf.empty/hadoop-env.sh -/etc/hadoop/conf.empty/hadoop-policy.xml -/etc/default/hadoop -/etc/bash_completion.d/hadoop -/usr/lib/hadoop/etc -/usr/lib/hadoop/libexec/hadoop-config.sh -/usr/lib/hadoop/libexec/hadoop-layout.sh -/usr/lib/hadoop/libexec/hadoop-functions.sh -/usr/lib/hadoop/libexec/shellprofile.d -/usr/lib/hadoop/libexec/tools -/usr/lib/hadoop/*.jar -/usr/lib/hadoop/lib -/usr/lib/hadoop/sbin -/usr/lib/hadoop/bin -/usr/lib/hadoop/tools -/usr/bin/hadoop -/usr/share/man/man1/hadoop.1.* -/usr/share/man/man1/hdfs.1.* -/usr/share/man/man1/yarn.1.* -/usr/share/man/man1/mapred.1.* diff --git a/bigtop-packages/src/deb/hadoop/hadoop.postinst b/bigtop-packages/src/deb/hadoop/hadoop.postinst index a151f100fd..7950b568fd 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop.postinst +++ b/bigtop-packages/src/deb/hadoop/hadoop.postinst @@ -24,7 +24,7 @@ case "$1" in # We used to chown /usr/lib/hadoop here, but with security we don't want to do that - # things like task-controller are very particular about ownership chown -R root:hadoop /etc/hadoop - update-alternatives --install /etc/hadoop/conf hadoop-conf /etc/hadoop/conf.empty 10 + update-alternatives --install /etc/hadoop/conf hadoop-conf @etc_hadoop@/conf.empty 10 ;; abort-upgrade|abort-remove|abort-deconfigure) diff --git a/bigtop-packages/src/deb/hadoop/hadoop.prerm b/bigtop-packages/src/deb/hadoop/hadoop.prerm index ce65330e91..d85db183d1 100644 --- a/bigtop-packages/src/deb/hadoop/hadoop.prerm +++ b/bigtop-packages/src/deb/hadoop/hadoop.prerm @@ -35,7 +35,7 @@ set -e case "$1" in remove|upgrade|deconfigure) - update-alternatives --remove hadoop-conf /etc/hadoop/conf.empty || : + update-alternatives --remove hadoop-conf @etc_hadoop@/conf.empty || : ;; failed-upgrade) diff --git a/bigtop-packages/src/deb/hadoop/libhdfs0-dev.install b/bigtop-packages/src/deb/hadoop/libhdfs0-dev.install deleted file mode 100644 index 2228e0e2ba..0000000000 --- a/bigtop-packages/src/deb/hadoop/libhdfs0-dev.install +++ /dev/null @@ -1 +0,0 @@ -/usr/include/hdfs.h diff --git a/bigtop-packages/src/deb/hadoop/libhdfs0.dirs b/bigtop-packages/src/deb/hadoop/libhdfs0.dirs deleted file mode 100644 index fce258fe70..0000000000 --- a/bigtop-packages/src/deb/hadoop/libhdfs0.dirs +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/ diff --git a/bigtop-packages/src/deb/hadoop/libhdfs0.install b/bigtop-packages/src/deb/hadoop/libhdfs0.install deleted file mode 100644 index 4abd42eb8b..0000000000 --- a/bigtop-packages/src/deb/hadoop/libhdfs0.install +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/libhdfs.* diff --git a/bigtop-packages/src/deb/hadoop/libhdfspp-dev.dirs b/bigtop-packages/src/deb/hadoop/libhdfspp-dev.dirs deleted file mode 100644 index dfc9182e8a..0000000000 --- a/bigtop-packages/src/deb/hadoop/libhdfspp-dev.dirs +++ /dev/null @@ -1 +0,0 @@ -/usr/include/hdfspp diff --git a/bigtop-packages/src/deb/hadoop/libhdfspp-dev.install b/bigtop-packages/src/deb/hadoop/libhdfspp-dev.install deleted file mode 100644 index dfc9182e8a..0000000000 --- a/bigtop-packages/src/deb/hadoop/libhdfspp-dev.install +++ /dev/null @@ -1 +0,0 @@ -/usr/include/hdfspp diff --git a/bigtop-packages/src/deb/hadoop/libhdfspp.dirs b/bigtop-packages/src/deb/hadoop/libhdfspp.dirs deleted file mode 100644 index fce258fe70..0000000000 --- a/bigtop-packages/src/deb/hadoop/libhdfspp.dirs +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/ diff --git a/bigtop-packages/src/deb/hadoop/libhdfspp.install b/bigtop-packages/src/deb/hadoop/libhdfspp.install deleted file mode 100644 index 8a938d9df1..0000000000 --- a/bigtop-packages/src/deb/hadoop/libhdfspp.install +++ /dev/null @@ -1 +0,0 @@ -/usr/lib/libhdfspp.* diff --git a/bigtop-packages/src/deb/hadoop/rules b/bigtop-packages/src/deb/hadoop/rules index fe0ba5ec8f..996cecc18d 100755 --- a/bigtop-packages/src/deb/hadoop/rules +++ b/bigtop-packages/src/deb/hadoop/rules @@ -27,6 +27,257 @@ export DH_OPTIONS dh $@ hadoop_version=${HADOOP_BASE_VERSION} +hadoop_name=hadoop +parent_dir=${PARENT_DIR} +pkg_name_suffix=${PKG_NAME_SUFFIX} + +lib_dir=${parent_dir}/usr/lib +include_dir=${parent_dir}/usr/include +man_dir=${parent_dir}/usr/share/man +bin_dir=${parent_dir}/usr/bin +doc_hadoop=${parent_dir}/usr/share/doc/${hadoop_name}-doc +usr_lib_hadoop=${lib_dir}/${hadoop_name} +etc_default=${parent_dir}/etc/default +etc_hadoop=${parent_dir}/etc/${hadoop_name} +var_lib_hadoop=${parent_dir}/var/lib/${hadoop_name} +np_etc_hadoop=/etc/${hadoop_name} +np_var_run_hadoop=/var/run/${hadoop_name} + +usr_lib_hdfs=${lib_dir}/${hadoop_name}-hdfs +usr_lib_yarn=${lib_dir}/${hadoop_name}-yarn +usr_lib_mapreduce=${lib_dir}/${hadoop_name}-mapreduce +var_lib_yarn=${parent_dir}/var/lib/${hadoop_name}-yarn +var_lib_hdfs=${parent_dir}/var/lib/${hadoop_name}-hdfs +var_lib_mapreduce=${parent_dir}/var/lib/${hadoop_name}-mapreduce +var_lib_httpfs=${parent_dir}/var/lib/${hadoop_name}-httpfs +var_lib_kms=${parent_dir}/var/lib/${hadoop_name}-kms + + +define hadoop_install +${etc_hadoop}/conf.empty/hadoop-metrics2.properties +${etc_hadoop}/conf.empty/log4j.properties +${etc_hadoop}/conf.empty/workers +${etc_hadoop}/conf.empty/ssl-client.xml.example +${etc_hadoop}/conf.empty/ssl-server.xml.example +${etc_hadoop}/conf.empty/core-site.xml +${etc_hadoop}/conf.empty/configuration.xsl +${etc_hadoop}/conf.empty/hadoop-env.sh +${etc_hadoop}/conf.empty/hadoop-policy.xml +${etc_default}/hadoop +${np_etc_hadoop} +/etc/bash_completion.d/hadoop +${usr_lib_hadoop}/etc +${usr_lib_hadoop}/libexec/hadoop-config.sh +${usr_lib_hadoop}/libexec/hadoop-layout.sh +${usr_lib_hadoop}/libexec/hadoop-functions.sh +${usr_lib_hadoop}/libexec/shellprofile.d +${usr_lib_hadoop}/libexec/tools +${usr_lib_hadoop}/*.jar +${usr_lib_hadoop}/lib +${usr_lib_hadoop}/sbin +${usr_lib_hadoop}/bin +${usr_lib_hadoop}/tools +${bin_dir}/hadoop +${man_dir}/man1/hadoop.1.* +${man_dir}/man1/hdfs.1.* +${man_dir}/man1/yarn.1.* +${man_dir}/man1/mapred.1.* +endef + +define hadoop_dirs +${etc_hadoop}/conf.empty/ +${usr_lib_hadoop} +${bin_dir} +${doc_hadoop} +endef + +define hadoop_client_install +${usr_lib_hadoop}/client +endef + +define hadoop_conf_pseudo_install +${etc_hadoop}/conf.pseudo +endef + +define hadoop_doc_install +${doc_hadoop} +endef + +define hadoop_doc_dirs +${doc_hadoop}/ +endef + +define hadoop_hdfs_install +/etc/security/limits.d/hdfs.conf +${etc_hadoop}/conf.empty/hdfs-site.xml +${usr_lib_hadoop}-hdfs +${usr_lib_hadoop}/libexec/hdfs-config.sh +${usr_lib_hadoop}/libexec/init-hdfs.sh +${usr_lib_hadoop}/libexec/init-hcfs.json +${usr_lib_hadoop}/libexec/init-hcfs.groovy +${bin_dir}/hdfs +${var_lib_hdfs} +/var/log/hadoop-hdfs +/var/run/hadoop-hdfs +endef + +define hadoop_hdfs_dirs +${etc_hadoop}/conf.empty/ +${usr_lib_hadoop}/libexec +${usr_lib_hadoop}-hdfs +${bin_dir} +${var_lib_hdfs}/cache +/var/log/hadoop-hdfs +endef + + +define hadoop_hdfs_fuse_install +${etc_default}/hadoop-fuse +${bin_dir}/hadoop-fuse-dfs +${usr_lib_hadoop}/bin/fuse_dfs +endef + +define hadoop_httpfs_install +${etc_hadoop}/conf.empty/httpfs-env.sh +${etc_hadoop}/conf.empty/httpfs-log4j.properties +${etc_hadoop}/conf.empty/httpfs-site.xml +${var_lib_httpfs} +endef + +define hadoop_httpfs_dirs +/var/log/hadoop-httpfs +endef + +define hadoop_kms_install +${etc_hadoop}/conf.empty/kms-acls.xml +${etc_hadoop}/conf.empty/kms-env.sh +${etc_hadoop}/conf.empty/kms-log4j.properties +${etc_hadoop}/conf.empty/kms-site.xml +${var_lib_kms} +endef + +define hadoop_kms_dirs +/var/log/hadoop-kms +endef + +define hadoop_mapreduce_dirs +${etc_hadoop}/conf.empty/ +${usr_lib_hadoop}/libexec +${usr_lib_hadoop}-mapreduce +${bin_dir} +${var_lib_mapreduce}/cache +/var/log/hadoop-mapreduce +endef + +define hadoop_mapreduce_install +/etc/security/limits.d/mapreduce.conf +${etc_hadoop}/conf.empty/mapred-site.xml +${etc_hadoop}/conf.empty/mapred-env.sh +${etc_hadoop}/conf.empty/mapred-queues.xml.template +${usr_lib_hadoop}-mapreduce +${usr_lib_hadoop}/libexec/mapred-config.sh +${bin_dir}/mapred +${var_lib_mapreduce} +/var/log/hadoop-mapreduce +/var/run/hadoop-mapreduce +endef + +define hadoop_yarn_dirs +${etc_hadoop}/conf.empty/ +${usr_lib_hadoop}/libexec +${usr_lib_hadoop}-yarn +${bin_dir} +${var_lib_yarn}/cache +/var/log/hadoop-yarn +endef + +define hadoop_yarn_install +/etc/security/limits.d/yarn.conf +${etc_hadoop}/conf.empty/yarn-env.sh +${etc_hadoop}/conf.empty/yarn-site.xml +${etc_hadoop}/conf.empty/capacity-scheduler.xml +${etc_hadoop}/conf.empty/container-executor.cfg +${usr_lib_hadoop}-yarn +${usr_lib_hadoop}/libexec/yarn-config.sh +${bin_dir}/yarn +${var_lib_yarn} +/var/log/hadoop-yarn +/var/run/hadoop-yarn +endef + +define libhdfs0_install +${lib_dir}/libhdfs.* +endef + +define libhdfs0_dirs +${lib_dir} +endef + +define libhdfs0_dev_install +${include_dir}/hdfs.h +endef + +define libhdfspp_install +${lib_dir}/libhdfspp.* +endef + +define libhdfspp_dirs +${lib_dir} +endef + +define libhdfspp_dev_install +${include_dir}/hdfspp +endef + +define libhdfspp_dev_dirs +${include_dir}/hdfspp +endef + +define gen_rule + $(foreach item, postinst preinst prerm,\ + if [ -f debian/hadoop$(1).$(item) ]; then \ + sed -i -e 's:@usr_lib_hadoop@:${usr_lib_hadoop}:g' -e 's:@var_lib_hdfs@:${var_lib_hdfs}:g' -e 's:@var_lib_hadoop@:${var_lib_hadoop}:g' -e 's:@etc_hadoop@:${etc_hadoop}:g' -e 's:@var_lib_httpfs@:${var_lib_httpfs}:g' -e 's:@var_lib_kms@:${var_lib_kms}:g' \ + -e 's:@var_lib_mapreduce@:${var_lib_mapreduce}:g' -e 's:@usr_lib_yarn@:${usr_lib_yarn}:g' -e 's:@var_lib_yarn@:${var_lib_yarn}:g' -e 's:@etc_hadoop@:${etc_hadoop}:g' \ + debian/hadoop$(1).$(item); \ + if [ "${pkg_name_suffix}" != "" ] ;then mv debian/hadoop$(1).$(item) debian/hadoop${pkg_name_suffix}$(1).$(item);fi \ + fi; \ + ) +endef + + +.PHONY: gen_files +gen_files: + $(foreach item,'' -conf -hdfs -httpfs -kms -mapreduce -yarn, $(call gen_rule,$(item))) + + $(foreach item,$(hadoop_install),echo $(item) >> debian/hadoop${pkg_name_suffix}.install;) + $(foreach item,$(hadoop_dirs),echo $(item) >> debian/hadoop${pkg_name_suffix}.dirs;) + $(foreach item,$(hadoop_client_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-client.install;) + $(foreach item,$(hadoop_conf_pseudo_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-conf-pseudo.install;) + $(foreach item,$(hadoop_doc_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-doc.install;) + $(foreach item,$(hadoop_doc_dirs),echo $(item) >> debian/hadoop${pkg_name_suffix}-doc.dirs;) + $(foreach item,$(hadoop_hdfs_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-hdfs.install;) + $(foreach item,$(hadoop_hdfs_dirs),echo $(item) >> debian/hadoop${pkg_name_suffix}-hdfs.dirs;) + $(foreach item,$(hadoop_hdfs_fuse_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-hdfs-fuse.install;) + $(foreach item,$(hadoop_httpfs_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-httpfs.install;) + $(foreach item,$(hadoop_httpfs_dirs),echo $(item) >> debian/hadoop${pkg_name_suffix}-httpfs.dirs;) + $(foreach item,$(hadoop_kms_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-kms.install;) + $(foreach item,$(hadoop_kms_dirs),echo $(item) >> debian/hadoop${pkg_name_suffix}-kms.dirs;) + $(foreach item,$(hadoop_mapreduce_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-mapreduce.install;) + $(foreach item,$(hadoop_mapreduce_dirs),echo $(item) >> debian/hadoop${pkg_name_suffix}-mapreduce.dirs;) + $(foreach item,$(hadoop_yarn_install),echo $(item) >> debian/hadoop${pkg_name_suffix}-yarn.install;) + $(foreach item,$(hadoop_yarn_dirs),echo $(item) >> debian/hadoop${pkg_name_suffix}-yarn.dirs;) + $(foreach item,$(libhdfs0_install),echo $(item) >> debian/libhdfs0.install;) + $(foreach item,$(libhdfs0_dirs),echo $(item) >> debian/libhdfs0.dirs;) + $(foreach item,$(libhdfs0_dev_install),echo $(item) >> debian/libhdfs0-dev.install;) + $(foreach item,$(libhdfspp_install),echo $(item) >> debian/libhdfspp.install;) + $(foreach item,$(libhdfspp_dirs),echo $(item) >> debian/libhdfspp.dirs;) + $(foreach item,$(libhdfspp_dev_install),echo $(item) >> debian/libhdfspp-dev.install;) + $(foreach item,$(libhdfspp_dev_dirs),echo $(item) >> debian/libhdfspp-dev.dirs;) + +.PHONY: update_control +update_control: + sed -i 's/-pkgsuffix/${pkg_name_suffix}/g' debian/control + ifeq (${DEB_BUILD_ARCH},amd64) native_dir=Linux-amd64-64 @@ -35,6 +286,10 @@ ifeq (${DEB_BUILD_ARCH},i386) native_dir=Linux-i386-32 endif +override_dh_auto_clean: update_control + dh_auto_clean + + override_dh_auto_build: env HADOOP_VERSION=${hadoop_version} HADOOP_ARCH=${native_dir} \ bash debian/do-component-build -Divy.home=${HOME}/.ivy2 @@ -44,40 +299,59 @@ hadoop_svcs=hdfs-namenode hdfs-secondarynamenode hdfs-datanode hdfs-zkfc hdfs-jo mapreduce-historyserver httpfs kms $(hadoop_svcs): debian/init.d.tmpl - bash $< debian/hadoop-$@.svc deb debian/hadoop-$@.init - cp debian/$(firstword $(subst -, ,$@)).default debian/tmp/etc/default/hadoop-$@ - echo /etc/default/hadoop-$@ >> debian/hadoop-$@.install + sed -i -e "s|@hadoop_home|${usr_lib_hadoop}|" debian/hadoop-$@.svc + sed -i -e "s|@hadoop_mapreduce_home|${usr_lib_mapreduce}|" debian/hadoop-$@.svc + sed -i -e "s|@hadoop_yarn_home|${usr_lib_yarn}|" debian/hadoop-$@.svc + sed -i -e "s|@var_lib_hadoop_hdfs|${var_lib_hdfs}|" debian/hadoop-$@.svc + sed -i -e "s|@var_lib_hadoop_yarn|${var_lib_yarn}|" debian/hadoop-$@.svc + sed -i -e "s|@var_lib_hadoop_mapreduce|${var_lib_mapreduce}|" debian/hadoop-$@.svc + BIGTOP_DEFAULTS_DIR=${etc_default} bash $< debian/hadoop-$@.svc deb debian/hadoop${pkg_name_suffix}-$@.init + cp debian/$(firstword $(subst -, ,$@)).default debian/tmp/${etc_default}/hadoop-$@ + echo ${etc_default}/hadoop-$@ >> debian/hadoop${pkg_name_suffix}-$@.install # FIXME: workaround for BIGTOP-105 - [ -f debian/hadoop-$@.postinst ] || cp debian/hadoop.daemon.postinst.tpl debian/hadoop-$@.postinst - sed -i -e "s|@HADOOP_DAEMON@|$@|" debian/hadoop-$@.postinst + [ -f debian/hadoop${pkg_name_suffix}-$@.postinst ] || cp debian/hadoop.daemon.postinst.tpl debian/hadoop${pkg_name_suffix}-$@.postinst + sed -i -e "s|@HADOOP_DAEMON@|$@|" debian/hadoop${pkg_name_suffix}-$@.postinst + -override_dh_auto_install: +override_dh_auto_install: gen_files env HADOOP_VERSION=${hadoop_version} \ bash debian/install_hadoop.sh \ - --prefix=debian/tmp/ \ - --distro-dir=debian \ - --build-dir=${CURDIR}/build \ - --system-lib-dir=/usr/lib/ \ - --system-include-dir=/usr/include \ - --doc-dir=/usr/share/doc/hadoop-doc \ - --man-dir=/usr/share/man + --distro-dir=debian \ + --build-dir=${CURDIR}/build \ + --prefix=debian/tmp/ \ + --doc-dir=${doc_hadoop} \ + --bin-dir=${bin_dir} \ + --man-dir=${man_dir} \ + --etc-default=${etc_default} \ + --hadoop-dir=${usr_lib_hadoop} \ + --hdfs-dir=${usr_lib_hdfs} \ + --yarn-dir=${usr_lib_yarn} \ + --mapreduce-dir=${usr_lib_mapreduce} \ + --var-hdfs=${var_lib_hdfs} \ + --var-yarn=${var_lib_yarn} \ + --var-mapreduce=${var_lib_mapreduce} \ + --var-httpfs=${var_lib_httpfs} \ + --var-kms=${var_lib_kms} \ + --system-include-dir=${include_dir} \ + --system-lib-dir=${lib_dir} \ + --etc-hadoop=${etc_hadoop} # Forcing Zookeeper dependency to be on the packaged jar - ln -sf /usr/lib/zookeeper/zookeeper.jar debian/tmp/usr/lib/hadoop/lib/zookeeper-[[:digit:]]*.jar + ln -sf ${lib_dir}/zookeeper/zookeeper.jar debian/tmp/${usr_lib_hadoop}/lib/zookeeper-[[:digit:]]*.jar # Workaround for BIGTOP-583 - rm -f debian/tmp/usr/lib/hadoop-*/lib/slf4j-log4j12-*.jar + rm -f debian/tmp/${usr_lib_hadoop}-*/lib/slf4j-log4j12-*.jar # FIXME: BIGTOP-463 # mkdir -p debian/tmp/etc/default # Refactored from install-arch - cp debian/hadoop-fuse.default debian/tmp/etc/default/hadoop-fuse + cp debian/hadoop-fuse.default debian/tmp/${etc_default}/hadoop-fuse mkdir -p debian/tmp/etc/security/limits.d cp debian/hdfs.conf debian/yarn.conf debian/mapreduce.conf debian/tmp/etc/security/limits.d override_dh_install: $(hadoop_svcs) dh_install # Drop misc fuse_dfs directories - rm -Rf debian/hadoop/usr/lib/hadoop/bin/fuse_dfs - rm -Rf debian/hadoop/usr/lib/hadoop/contrib/fuse-dfs - rm -Rf debian/hadoop/usr/lib/hadoop/hdfs/contrib/fuse-dfs + rm -Rf debian/hadoop${pkg_name_suffix}/${usr_lib_hadoop}/bin/fuse_dfs + rm -Rf debian/hadoop${pkg_name_suffix}/${usr_lib_hadoop}/contrib/fuse-dfs + rm -Rf debian/hadoop${pkg_name_suffix}/${usr_lib_hadoop}/hdfs/contrib/fuse-dfs override_dh_strip: dh_strip --no-automatic-dbgsym @@ -85,4 +359,7 @@ override_dh_strip: override_dh_shlibdeps: dh_shlibdeps --dpkg-shlibdeps-params=--ignore-missing-info +override_dh_shlibdeps: + dh_shlibdeps -l${lib_dir} + override_dh_strip_nondeterminism: diff --git a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec index 8d6ef8b188..d7b897d0a9 100644 --- a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec +++ b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec @@ -585,6 +585,17 @@ env HADOOP_VERSION=%{hadoop_base_version} bash %{SOURCE2} \ # %__install -d -m 0755 $RPM_BUILD_ROOT/%{etc_default} %__cp $RPM_SOURCE_DIR/%{hadoop_name}-fuse.default $RPM_BUILD_ROOT/%{etc_default}/%{hadoop_name}-fuse + + +for service in %{hadoop_services} + sed -i -e "s|@hadoop_home|%usr_lib_hadoop|" $RPM_SOURCE_DIR/%{hadoop_name}-${service}.svc + sed -i -e "s|@hadoop_mapreduce_home|%usr_lib_mapreduce|" $RPM_SOURCE_DIR/%{hadoop_name}-${service}.svc + sed -i -e "s|@hadoop_yarn_home|%usr_lib_yarn|" $RPM_SOURCE_DIR/%{hadoop_name}-${service}.svc + sed -i -e "s|@var_lib_hadoop_hdfs|%{var_lib_hdfs}|" $RPM_SOURCE_DIR/%{hadoop_name}-${service}.svc + sed -i -e "s|@var_lib_hadoop_yarn|%{var_lib_yarn}|" $RPM_SOURCE_DIR/%{hadoop_name}-${service}.svc + sed -i -e "s|@var_lib_hadoop_mapreduce|%{var_lib_mapreduce}|" $RPM_SOURCE_DIR/%{hadoop_name}-${service}.svc + + # Generate the init.d scripts for service in %{hadoop_services} do diff --git a/bigtop-packages/src/templates/init.d.tmpl b/bigtop-packages/src/templates/init.d.tmpl index f25d161522..f90229880e 100755 --- a/bigtop-packages/src/templates/init.d.tmpl +++ b/bigtop-packages/src/templates/init.d.tmpl @@ -188,7 +188,7 @@ cat <<__EOT__ ### END INIT INFO . /lib/lsb/init-functions -BIGTOP_DEFAULTS_DIR=\${BIGTOP_DEFAULTS_DIR-/etc/default} +BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default} [ -n "\${BIGTOP_DEFAULTS_DIR}" -a -r \${BIGTOP_DEFAULTS_DIR}/hadoop ] && . \${BIGTOP_DEFAULTS_DIR}/hadoop [ -n "\${BIGTOP_DEFAULTS_DIR}" -a -r \${BIGTOP_DEFAULTS_DIR}/$DAEMON ] && . \${BIGTOP_DEFAULTS_DIR}/$DAEMON diff --git a/bigtop.bom b/bigtop.bom index 52587fd39e..92d7de37ad 100644 --- a/bigtop.bom +++ b/bigtop.bom @@ -155,6 +155,7 @@ bigtop { 'hadoop' { name = 'hadoop' rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_") + deb_pkg_suffix = "-" + bigtop.base_version.replace(".", "-") relNotes = 'Apache Hadoop' version { base = '3.3.6'; pkg = base; release = 1 } tarball { destination = "${name}-${version.base}.tar.gz" diff --git a/packages.gradle b/packages.gradle index 2901ea345a..580516f725 100644 --- a/packages.gradle +++ b/packages.gradle @@ -115,7 +115,7 @@ def getPkgNameSuffix(component, type) { def getParentDir(bigtopBaseVersion, type) { def defaultParentDirValue = type.equalsIgnoreCase("deb") ? "" : "%{nil}" def parentDir = project.hasProperty("parentDir") ? project.property('parentDir') : defaultParentDirValue - if (parentDir && parentDir != "%{nil}") { + if (parentDir != defaultParentDirValue) { parentDir = "${parentDir}/${bigtopBaseVersion}" } return parentDir @@ -382,7 +382,10 @@ def genTasks = { target -> def final BASE_VERSION = config.bigtop.components[target].version.base def final SRCDEB = "${PKG_NAME}_$PKG_VERSION-${BIGTOP_BUILD_STAMP}.dsc" def final HADOOP_VERSION = config.bigtop.components["hadoop"].version.pkg + def final FULL_PARENT_DIR = getParentDir(config.bigtop.base_version, "deb") + def final DEB_PKG_NAME_SUFFIX = getPkgNameSuffix(config.bigtop.components[target], "deb").pkgNameSuffix + def final BIGTOP_BASE_VERSION = "${config.bigtop.base_version}" exec { workingDir PKG_OUTPUT_DIR commandLine "dpkg-source -x $SRCDEB".split(' ') @@ -399,6 +402,9 @@ def genTasks = { target -> --set-envvar=${toOldStyleName(target)}_BASE_VERSION=$BASE_VERSION \ --set-envvar=${toOldStyleName(target)}_VERSION=$PKG_VERSION \ --set-envvar=${toOldStyleName(target)}_RELEASE=$BIGTOP_BUILD_STAMP \ +--set-envvar=PARENT_DIR=$FULL_PARENT_DIR \ +--set-envvar=PKG_NAME_SUFFIX=$DEB_PKG_NAME_SUFFIX \ +--set-envvar=bigtop_base_version=${BIGTOP_BASE_VERSION} \ -uc -us -b """ exec { @@ -428,6 +434,8 @@ def genTasks = { target -> println "\tNothing to do. Exiting..." return } + + def final BIGTOP_BASE_VERSION = "${config.bigtop.base_version}" def final BIGTOP_BUILD_STAMP = System.getenv('BIGTOP_BUILD_STAMP') ?: config.bigtop.components[target].version.release def final TARBALL_SRC = config.bigtop.components[target].tarball.source @@ -443,6 +451,8 @@ def genTasks = { target -> def final DEB_PKG_DIR = "$PKG_BUILD_DIR/deb/$PKG_NAME-${PKG_VERSION}-${BIGTOP_BUILD_STAMP}" def final ENABLE_MAVEN_PARALLEL_BUILD = config.bigtop.components[target].maven_parallel_build def final MAVEN_BUILD_THREADS = project.hasProperty('buildThreads') ? project.property('buildThreads') : null + def final FULL_PARENT_DIR = getParentDir(config.bigtop.base_version, "deb") + def final DEB_PKG_NAME_SUFFIX = getPkgNameSuffix(config.bigtop.components[target], "deb").pkgNameSuffix mkdir (DEB_BLD_DIR) copy { @@ -511,7 +521,10 @@ def genTasks = { target -> // Creating source package exec { workingDir DEB_BLD_DIR + environment 'PARENT_DIR', FULL_PARENT_DIR + environment 'PKG_NAME_SUFFIX', DEB_PKG_NAME_SUFFIX commandLine "dpkg-buildpackage -uc -us -sa -S".split(' ') + environment 'bigtop_base_version', BIGTOP_BASE_VERSION } mkdir(PKG_OUTPUT_DIR) copy { diff --git a/provisioner/docker/docker-hadoop.sh b/provisioner/docker/docker-hadoop.sh index 6d8861f5a8..0ef534178e 100755 --- a/provisioner/docker/docker-hadoop.sh +++ b/provisioner/docker/docker-hadoop.sh @@ -43,6 +43,8 @@ usage() { echo " -p, --provision - Deploy configuration changes" echo " -r, --repo REPO_URL - Overwrite the yum/apt repo defined in config file" echo " -s, --smoke-tests COMPONENTS - Run Bigtop smoke tests" + echo " -ps, --pkg-suffix - Use this if you build with pkgSuffix, and want to deploy or run smoke tests with your custom package" + echo " -pd, --parent-dir PARENT_DIR - PARENT_DIR is parentDir build arguments, use this if you build with parentDir, and want to deploy or run smoke tests" echo " COMPONENTS is optional. If not specified, default to smoke_test_components in config file" echo " COMPONENTS is a comma separated string" echo " For example: $PROG -c 3 --smoke-tests hdfs" @@ -120,7 +122,9 @@ create() { gpg_check=true fi fi - generate-config "$hadoop_head_node" "$repo" "$components" + bigtop_base_version=$(get_base_version) + package_suffix=$(get_package_suffix) + generate-config "$hadoop_head_node" "$repo" "$components" "$package_suffix" "$bigtop_base_version" "$parent_dir" # Start provisioning generate-hosts @@ -159,6 +163,9 @@ bigtop::bigtop_repo_gpg_check: $gpg_check hadoop_cluster_node::cluster_components: $3 hadoop_cluster_node::cluster_nodes: [$node_list] hadoop::common_yarn::yarn_resourcemanager_scheduler_class: org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler +bigtop::package_suffix: $4 +bigtop::base_version: $5 +bigtop::parent_dir: $6 EOF } @@ -207,7 +214,7 @@ smoke-tests() { if [ -z ${smoke_test_components+x} ]; then smoke_test_components="`echo $(get-yaml-config smoke_test_components) | sed 's/ /,/g'`" fi - docker exec $hadoop_head_node bash -c "bash -x /bigtop-home/provisioner/utils/smoke-tests.sh $smoke_test_components" + docker exec $hadoop_head_node bash -c "bash -x /bigtop-home/provisioner/utils/smoke-tests.sh $smoke_test_components $bigtop_base_version $parent_dir" } destroy() { @@ -312,6 +319,36 @@ change_docker_compose_cmd() { DOCKER_COMPOSE_CMD="docker compose" } +get_base_version() { + local file=../../pom.xml + local version + + version=$(sed -n ' + /org.apache.bigtop<\/groupId>/ { + N + N + s/.*\(.*\)<\/version>.*/\1/p + }' "$file") + + base_version=$(echo "$version" | cut -d'-' -f1) + echo "$base_version" +} + +get_package_suffix() { + base_version=$(get_base_version) + if [ "$pkg_suffix" = true ]; then + #todo Temporarily hard-coding pkg_suffix because currently all pkg_suffix values in bigtop.bom are fixed + if [ "$distro" = "centos" ]; then + package_suffix="_$(echo $base_version | tr '.' '_')" + elif [ "$distro" = "debian" ]; then + package_suffix="-$(echo $base_version | tr '.' '-')" + fi + echo "$package_suffix" + else + echo "" + fi +} + PROG=`basename $0` if [ $# -eq 0 ]; then @@ -450,6 +487,17 @@ while [ $# -gt 0 ]; do fi READY_TO_TEST=true ;; + -ps|--pkg-suffix) + pkg_suffix=true + log "pkg-suffix specified" + shift ;; + -pd|--parent-dir) + if [ $# -lt 2 ]; then + log "No parent-dir specified" + parent_dir="" + fi + parent_dir="$2" + shift 2;; -h|--help) usage shift;; diff --git a/provisioner/utils/smoke-tests.sh b/provisioner/utils/smoke-tests.sh index 188c51ddd4..4b06edc936 100755 --- a/provisioner/utils/smoke-tests.sh +++ b/provisioner/utils/smoke-tests.sh @@ -16,6 +16,12 @@ HCFS_USER="hdfs" SMOKE_TESTS=$1 +BIGTOP_BASE_VERSION=$2 +PARENT_DIR=$3 + +echo "Smoke Tests Parameter: $SMOKE_TESTS" +echo "Bigtop Base Version Parameter: $BIGTOP_BASE_VERSION" +echo "Parent Directory Parameter: $PARENT_DIR" if [ -z "$SMOKE_TESTS" ]; then >&2 echo -e "\nSMOKE_TESTS VARIABLE IS NOT DEFINED. CHECK THE INPUT OF `basename $0` \n" @@ -32,6 +38,12 @@ fi echo -e "\n===== EXPORTING VARIABLES =====\n" +if [ -n "$PARENT_DIR" ]; then + ZOOKEEPER_HOME="${PARENT_DIR}/${BIGTOP_BASE_VERSION}/usr/lib/zookeeper" + HADOOP_HOME="${PARENT_DIR}/${BIGTOP_BASE_VERSION}/usr/lib/hadoop" + HADOOP_MAPRED_HOME="${PARENT_DIR}/${BIGTOP_BASE_VERSION}/usr/lib/hadoop-mapreduce" +fi + export ALLUXIO_HOME=${ALLUXIO_HOME:-/usr/lib/alluxio} export FLINK_HOME=${FLINK_HOME:-/usr/lib/flink} export HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}