diff --git a/context.xml b/context.xml
new file mode 100644
index 0000000000000000000000000000000000000000..be19aeae11b03a4619401294c6b308c5364d720e
--- /dev/null
+++ b/context.xml
@@ -0,0 +1,3 @@
+
+
+
diff --git a/hadoop-3.2.1-src.tar.gz b/hadoop-3.2.1-src.tar.gz
new file mode 100644
index 0000000000000000000000000000000000000000..697c17d921fd03f9d9e3c7f48c05536365ff0cd7
Binary files /dev/null and b/hadoop-3.2.1-src.tar.gz differ
diff --git a/hadoop-hdfs.service.template b/hadoop-hdfs.service.template
new file mode 100644
index 0000000000000000000000000000000000000000..bca5c5f859619c9127d81a827ed1e0cd618d5d3e
--- /dev/null
+++ b/hadoop-hdfs.service.template
@@ -0,0 +1,37 @@
+[Unit]
+Description=The Hadoop DAEMON daemon
+After=network.target
+After=NetworkManager.target
+
+[Service]
+Type=forking
+EnvironmentFile=-/etc/sysconfig/hadoop-hdfs
+EnvironmentFile=-/etc/sysconfig/hadoop-DAEMON
+ExecStart=/usr/sbin/hadoop-daemon.sh start DAEMON
+ExecStop=/usr/sbin/hadoop-daemon.sh stop DAEMON
+User=hdfs
+Group=hadoop
+PIDFile=/var/run/hadoop-hdfs/hadoop-hdfs-DAEMON.pid
+LimitNOFILE=32768
+LimitNPROC=65536
+
+#######################################
+# Note: Below are cgroup options
+#######################################
+#Slice=
+#CPUAccounting=true
+#CPUShares=1024
+
+#MemoryAccounting=true
+#TBD: MemoryLimit=bytes, MemorySoftLimit=bytes
+
+#BlockIOAccounting=true
+#BlockIOWeight=??
+#BlockIODeviceWeight=??
+#TBD: BlockIOReadBandwidth=bytes, BlockIOWriteBandwidth=bytes
+
+#DeviceAllow=
+#DevicePolicy=auto|closed|strict
+
+[Install]
+WantedBy=multi-user.target
diff --git a/hadoop-httpfs.sysconfig b/hadoop-httpfs.sysconfig
new file mode 100644
index 0000000000000000000000000000000000000000..63c953c79f12d2b83590e1fb2b325198ab382fe2
--- /dev/null
+++ b/hadoop-httpfs.sysconfig
@@ -0,0 +1,5 @@
+CATALINA_BASE=/usr/share/hadoop/httpfs/tomcat
+CATALINA_HOME=/usr/share/hadoop/httpfs/tomcat
+CATALINA_TMPDIR=/var/cache/hadoop-httpfs
+
+CATALINA_OPTS="-Dhttpfs.home.dir=/usr -Dhttpfs.config.dir=/etc/hadoop -Dhttpfs.log.dir=/var/log/hadoop-httpfs -Dhttpfs.temp.dir=/var/cache/hadoop-httpfs -Dhttpfs.admin.port=14001 -Dhttpfs.http.port=14000"
diff --git a/hadoop-layout.sh b/hadoop-layout.sh
new file mode 100644
index 0000000000000000000000000000000000000000..7801fc8072ae28f6d3247c572e323be061a3c2ac
--- /dev/null
+++ b/hadoop-layout.sh
@@ -0,0 +1,29 @@
+export HADOOP_PREFIX=/usr
+export HADOOP_COMMON_HOME=/usr
+export HADOOP_COMMON_DIR=share/hadoop/common
+export HADOOP_COMMON_LIB_JARS_DIR=share/hadoop/common/lib
+export HADOOP_COMMON_LIB_NATIVE_DIR=lib/hadoop
+export HADOOP_CONF_DIR=/etc/hadoop
+export HADOOP_LIBEXEC_DIR=/usr/libexec
+
+export HADOOP_HDFS_HOME=$HADOOP_PREFIX
+export HDFS_DIR=share/hadoop/hdfs
+export HDFS_LIB_JARS_DIR=share/hadoop/hadoop/lib
+export HADOOP_PID_DIR=/var/run/hadoop-hdfs
+export HADOOP_LOG_DIR=/var/log/hadoop-hdfs
+export HADOOP_IDENT_STRING=hdfs
+
+export HADOOP_YARN_HOME=$HADOOP_PREFIX
+export YARN_DIR=share/hadoop/yarn
+export YARN_LIB_JARS_DIR=share/hadoop/yarn/lib
+export YARN_PID_DIR=/var/run/hadoop-yarn
+export YARN_LOG_DIR=/var/log/hadoop-yarn
+export YARN_CONF_DIR=/etc/hadoop
+export YARN_IDENT_STRING=yarn
+
+export HADOOP_MAPRED_HOME=$HADOOP_PREFIX
+export MAPRED_DIR=share/hadoop/mapreduce
+export MAPRED_LIB_JARS_DIR=share/hadoop/mapreduce/lib
+export HADOOP_MAPRED_PID_DIR=/var/run/hadoop-mapreduce
+export HADOOP_MAPRED_LOG_DIR=/var/log/hadoop-mapreduce
+export HADOOP_MAPRED_IDENT_STRING=mapred
diff --git a/hadoop-mapreduce.service.template b/hadoop-mapreduce.service.template
new file mode 100644
index 0000000000000000000000000000000000000000..fb9080427e9d0743ffadc58b79b02f6e494edf97
--- /dev/null
+++ b/hadoop-mapreduce.service.template
@@ -0,0 +1,37 @@
+[Unit]
+Description=The Hadoop DAEMON daemon
+After=network.target
+After=NetworkManager.target
+
+[Service]
+Type=forking
+EnvironmentFile=-/etc/sysconfig/hadoop-mapreduce
+EnvironmentFile=-/etc/sysconfig/hadoop-DAEMON
+ExecStart=/usr/sbin/mr-jobhistory-daemon.sh start DAEMON
+ExecStop=/usr/sbin/mr-jobhistory-daemon.sh stop DAEMON
+User=mapred
+Group=hadoop
+PIDFile=/var/run/hadoop-mapreduce/mapred-mapred-DAEMON.pid
+LimitNOFILE=32768
+LimitNPROC=65536
+
+#######################################
+# Note: Below are cgroup options
+#######################################
+#Slice=
+#CPUAccounting=true
+#CPUShares=1024
+
+#MemoryAccounting=true
+#TBD: MemoryLimit=bytes, MemorySoftLimit=bytes
+
+#BlockIOAccounting=true
+#BlockIOWeight=??
+#BlockIODeviceWeight=??
+#TBD: BlockIOReadBandwidth=bytes, BlockIOWriteBandwidth=bytes
+
+#DeviceAllow=
+#DevicePolicy=auto|closed|strict
+
+[Install]
+WantedBy=multi-user.target
diff --git a/hadoop-tomcat-users.xml b/hadoop-tomcat-users.xml
new file mode 100644
index 0000000000000000000000000000000000000000..daa8e18deab30749fbbecbac106e435d47073743
--- /dev/null
+++ b/hadoop-tomcat-users.xml
@@ -0,0 +1,49 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-yarn.service.template b/hadoop-yarn.service.template
new file mode 100644
index 0000000000000000000000000000000000000000..00e53f4fc31f9d5baa7ef72014a4b2b441f5855f
--- /dev/null
+++ b/hadoop-yarn.service.template
@@ -0,0 +1,37 @@
+[Unit]
+Description=The Hadoop DAEMON daemon
+After=network.target
+After=NetworkManager.target
+
+[Service]
+Type=forking
+EnvironmentFile=-/etc/sysconfig/hadoop-yarn
+EnvironmentFile=-/etc/sysconfig/hadoop-DAEMON
+ExecStart=/usr/sbin/yarn-daemon.sh start DAEMON
+ExecStop=/usr/sbin/yarn-daemon.sh stop DAEMON
+User=yarn
+Group=hadoop
+PIDFile=/var/run/hadoop-yarn/yarn-yarn-DAEMON.pid
+LimitNOFILE=32768
+LimitNPROC=65536
+
+#######################################
+# Note: Below are cgroup options
+#######################################
+#Slice=
+#CPUAccounting=true
+#CPUShares=1024
+
+#MemoryAccounting=true
+#TBD: MemoryLimit=bytes, MemorySoftLimit=bytes
+
+#BlockIOAccounting=true
+#BlockIOWeight=??
+#BlockIODeviceWeight=??
+#TBD: BlockIOReadBandwidth=bytes, BlockIOWriteBandwidth=bytes
+
+#DeviceAllow=
+#DevicePolicy=auto|closed|strict
+
+[Install]
+WantedBy=multi-user.target
diff --git a/hadoop.logrotate b/hadoop.logrotate
new file mode 100644
index 0000000000000000000000000000000000000000..e722f00ba3090ceb5dfdd77387a7224474ef381c
--- /dev/null
+++ b/hadoop.logrotate
@@ -0,0 +1,8 @@
+/var/log/hadoop-NAME/*.log
+{
+ missingok
+ copytruncate
+ compress
+ weekly
+ rotate 52
+}
diff --git a/hadoop.spec b/hadoop.spec
new file mode 100644
index 0000000000000000000000000000000000000000..414ff87c7761a2e57691ac251b45870f9d5bc1ea
--- /dev/null
+++ b/hadoop.spec
@@ -0,0 +1,1017 @@
+%global _hardened_build 1
+
+%global hadoop_version %{version}
+%global hdfs_services hadoop-zkfc.service hadoop-datanode.service hadoop-secondarynamenode.service hadoop-namenode.service hadoop-journalnode.service
+%global mapreduce_services hadoop-historyserver.service
+%global yarn_services hadoop-proxyserver.service hadoop-resourcemanager.service hadoop-nodemanager.service hadoop-timelineserver.service
+
+# Filter out undesired provides and requires
+%global __requires_exclude_from ^%{_libdir}/%{name}/libhadoop.so$
+%global __provides_exclude_from ^%{_libdir}/%{name}/.*$
+
+Name: hadoop
+Version: 3.2.1
+Release: 1
+Summary: A software platform for processing vast amounts of data
+# The BSD license file is missing
+# https://issues.apache.org/jira/browse/HADOOP-9849
+License: ASL 2.0 and BSD and Zlib and BSL-1.0 and MPL-2.0 and EPL-1.0 and MIT
+URL: https://%{name}.apache.org
+Source0: https://www.apache.org/dist/%{name}/core/%{name}-%{version}/%{name}-%{version}-src.tar.gz
+Source1: %{name}-layout.sh
+Source2: %{name}-hdfs.service.template
+Source3: %{name}-mapreduce.service.template
+Source4: %{name}-yarn.service.template
+Source5: context.xml
+Source6: %{name}.logrotate
+Source7: %{name}-httpfs.sysconfig
+Source8: hdfs-create-dirs
+Source9: %{name}-tomcat-users.xml
+
+BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
+BuildRequires: java-1.8.0-openjdk-devel maven hostname maven-local tomcat cmake snappy openssl-devel
+BuildRequires: cyrus-sasl-devel chrpath systemd protobuf2-compiler protobuf2-devel protobuf2
+Requires: java-1.8.0-openjdk
+
+%description
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+%package client
+Summary: Libraries for Apache Hadoop clients
+BuildArch: noarch
+Requires: %{name}-common = %{version}-%{release}
+Requires: %{name}-hdfs = %{version}-%{release}
+Requires: %{name}-mapreduce = %{version}-%{release}
+Requires: %{name}-yarn = %{version}-%{release}
+
+%description client
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package provides libraries for Apache Hadoop clients.
+
+%package common
+Summary: Common files needed by Apache Hadoop daemons
+BuildArch: noarch
+Requires(pre): /usr/sbin/useradd
+Obsoletes: %{name}-javadoc < 2.4.1-22%{?dist}
+
+# These are required to meet the symlinks for the classpath
+Requires: antlr-tool
+Requires: apache-commons-beanutils
+Requires: avalon-framework
+Requires: avalon-logkit
+Requires: checkstyle
+Requires: coreutils
+Requires: geronimo-jms
+Requires: glassfish-jaxb
+Requires: glassfish-jsp
+Requires: glassfish-jsp-api
+Requires: istack-commons
+Requires: jakarta-commons-httpclient
+Requires: java-base64
+Requires: java-xmlbuilder
+Requires: javamail
+Requires: jettison
+Requires: jetty8
+Requires: jsr-311
+Requires: mockito
+Requires: objectweb-asm
+Requires: objenesis
+Requires: paranamer
+Requires: relaxngDatatype
+Requires: servlet3
+Requires: snappy-java
+Requires: txw2
+Requires: which
+
+%description common
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package contains common files and utilities needed by other Apache
+Hadoop modules.
+
+%package common-native
+Summary: The native Apache Hadoop library file
+Requires: %{name}-common = %{version}-%{release}
+
+%description common-native
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package contains the native-hadoop library
+
+%package devel
+Summary: Headers for Apache Hadoop
+Requires: libhdfs%{?_isa} = %{version}-%{release}
+
+%description devel
+Header files for Apache Hadoop's hdfs library and other utilities
+
+%package hdfs
+Summary: The Apache Hadoop Distributed File System
+BuildArch: noarch
+Requires: apache-commons-daemon-jsvc
+Requires: %{name}-common = %{version}-%{release}
+Requires(post): systemd
+Requires(preun): systemd
+Requires(postun): systemd
+
+%description hdfs
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+The Hadoop Distributed File System (HDFS) is the primary storage system
+used by Apache Hadoop applications.
+
+
+%package httpfs
+Summary: Provides web access to HDFS
+BuildArch: noarch
+Requires: apache-commons-dbcp
+Requires: ecj >= 1:4.2.1-6
+Requires: json_simple
+Requires: tomcat
+Requires: tomcat-lib
+Requires: tomcat-native
+Requires(post): systemd
+Requires(preun): systemd
+Requires(postun): systemd
+
+%description httpfs
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package provides a server that provides HTTP REST API support for
+the complete FileSystem/FileContext interface in HDFS.
+
+%package -n libhdfs
+Summary: The Apache Hadoop Filesystem Library
+Requires: %{name}-hdfs = %{version}-%{release}
+Requires: lzo
+
+%description -n libhdfs
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package provides the Apache Hadoop Filesystem Library.
+
+%package mapreduce
+Summary: Apache Hadoop MapReduce (MRv2)
+BuildArch: noarch
+Requires: %{name}-common = %{version}-%{release}
+Requires(post): systemd
+Requires(preun): systemd
+Requires(postun): systemd
+
+%description mapreduce
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package provides Apache Hadoop MapReduce (MRv2).
+
+%package mapreduce-examples
+Summary: Apache Hadoop MapReduce (MRv2) examples
+BuildArch: noarch
+Requires: hsqldb
+
+%description mapreduce-examples
+This package contains mapreduce examples.
+
+%package maven-plugin
+Summary: Apache Hadoop maven plugin
+BuildArch: noarch
+Requires: maven
+
+%description maven-plugin
+The Apache Hadoop maven plugin
+
+%package tests
+Summary: Apache Hadoop test resources
+BuildArch: noarch
+Requires: %{name}-common = %{version}-%{release}
+Requires: %{name}-hdfs = %{version}-%{release}
+Requires: %{name}-mapreduce = %{version}-%{release}
+Requires: %{name}-yarn = %{version}-%{release}
+
+%description tests
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package contains test related resources for Apache Hadoop.
+
+%package yarn
+Summary: Apache Hadoop YARN
+Requires: %{name}-common = %{version}-%{release}
+Requires: %{name}-mapreduce = %{version}-%{release}
+Requires: aopalliance
+Requires: atinject
+Requires: hamcrest
+Requires: hawtjni
+Requires: leveldbjni
+Requires(post): systemd
+Requires(preun): systemd
+Requires(postun): systemd
+
+%description yarn
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package contains Apache Hadoop YARN.
+
+%package yarn-security
+Summary: The ability to run Apache Hadoop YARN in secure mode
+Requires: %{name}-yarn = %{version}-%{release}
+
+%description yarn-security
+Apache Hadoop is a framework that allows for the distributed processing of
+large data sets across clusters of computers using simple programming models.
+It is designed to scale up from single servers to thousands of machines, each
+offering local computation and storage.
+
+This package contains files needed to run Apache Hadoop YARN in secure mode.
+
+%prep
+%autosetup -p1 -n %{name}-%{version}-src
+
+%pom_disable_module hadoop-minikdc hadoop-common-project
+%pom_disable_module hadoop-pipes hadoop-tools
+%pom_disable_module hadoop-azure hadoop-tools
+%pom_disable_module hadoop-yarn-server-timelineservice-hbase-tests hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
+
+# War files we don't want
+%mvn_package :%{name}-auth-examples __noinstall
+%mvn_package :%{name}-hdfs-httpfs __noinstall
+
+# Parts we don't want to distribute
+%mvn_package :%{name}-assemblies __noinstall
+
+# Workaround for bz1012059
+%mvn_package :%{name}-project-dist __noinstall
+
+# Create separate file lists for packaging
+%mvn_package :::tests: %{name}-tests
+%mvn_package :%{name}-*-tests::{}: %{name}-tests
+%mvn_package :%{name}-client*::{}: %{name}-client
+%mvn_package :%{name}-hdfs*::{}: %{name}-hdfs
+%mvn_package :%{name}-mapreduce-examples*::{}: %{name}-mapreduce-examples
+%mvn_package :%{name}-mapreduce*::{}: %{name}-mapreduce
+%mvn_package :%{name}-archives::{}: %{name}-mapreduce
+%mvn_package :%{name}-datajoin::{}: %{name}-mapreduce
+%mvn_package :%{name}-distcp::{}: %{name}-mapreduce
+%mvn_package :%{name}-extras::{}: %{name}-mapreduce
+%mvn_package :%{name}-gridmix::{}: %{name}-mapreduce
+%mvn_package :%{name}-openstack::{}: %{name}-mapreduce
+%mvn_package :%{name}-rumen::{}: %{name}-mapreduce
+%mvn_package :%{name}-sls::{}: %{name}-mapreduce
+%mvn_package :%{name}-streaming::{}: %{name}-mapreduce
+%mvn_package :%{name}-tools*::{}: %{name}-mapreduce
+%mvn_package :%{name}-maven-plugins::{}: %{name}-maven-plugin
+%mvn_package :%{name}-minicluster::{}: %{name}-tests
+%mvn_package :%{name}-yarn*::{}: %{name}-yarn
+
+# Jar files that need to be overridden due to installation location
+%mvn_file :%{name}-common::tests: %{name}/%{name}-common
+
+%build
+mvn -Dsnappy.lib=/usr/lib64 -Dbundle.snappy -Dcontainer-executor.conf.dir=%{_sysconfdir}/%{name} -Pdist,native -DskipTests -DskipIT -Dmaven.javadoc.skip=true package
+
+%install
+# Copy all jar files except those generated by the build
+# $1 the src directory
+# $2 the dest directory
+copy_dep_jars()
+{
+ find $1 ! -name "hadoop-*.jar" -name "*.jar" | xargs install -m 0644 -t $2
+ rm -f $2/tools-*.jar
+}
+
+# Create symlinks for jars from the build
+# $1 the location to create the symlink
+link_hadoop_jars()
+{
+ for f in `ls hadoop-* | grep -v tests | grep -v examples`
+ do
+ n=`echo $f | sed "s/-%{version}//"`
+ if [ -L $1/$n ]
+ then
+ continue
+ elif [ -e $1/$f ]
+ then
+ rm -f $1/$f $1/$n
+ fi
+ p=`find %{buildroot}/%{_jnidir} %{buildroot}/%{_javadir}/%{name} -name $n | sed "s#%{buildroot}##"`
+ %{__ln_s} $p $1/$n
+ done
+}
+
+%mvn_install
+
+install -d -m 0755 %{buildroot}/%{_libdir}/%{name}
+install -d -m 0755 %{buildroot}/%{_includedir}/%{name}
+install -d -m 0755 %{buildroot}/%{_jnidir}/%{name}
+
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/client/lib
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/common/lib
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/hdfs/lib
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/hdfs/webapps
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
+install -d -m 0755 %{buildroot}/%{_datadir}/%{name}/yarn/lib
+install -d -m 0755 %{buildroot}/%{_sysconfdir}/%{name}/tomcat/Catalina/localhost
+install -d -m 0755 %{buildroot}/%{_sysconfdir}/logrotate.d
+install -d -m 0755 %{buildroot}/%{_sysconfdir}/sysconfig
+install -d -m 0755 %{buildroot}/%{_tmpfilesdir}
+install -d -m 0755 %{buildroot}/%{_sharedstatedir}/%{name}-hdfs
+install -d -m 0755 %{buildroot}/%{_sharedstatedir}/tomcats/httpfs
+install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-yarn
+install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-httpfs/temp
+install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-httpfs/work
+install -d -m 0755 %{buildroot}/%{_var}/cache/%{name}-mapreduce
+install -d -m 0755 %{buildroot}/%{_var}/log/%{name}-yarn
+install -d -m 0755 %{buildroot}/%{_var}/log/%{name}-hdfs
+install -d -m 0755 %{buildroot}/%{_var}/log/%{name}-httpfs
+install -d -m 0755 %{buildroot}/%{_var}/log/%{name}-mapreduce
+install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-yarn
+install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-hdfs
+install -d -m 0755 %{buildroot}/%{_var}/run/%{name}-mapreduce
+
+basedir='%{name}-common-project/%{name}-common/target/%{name}-common-%{hadoop_version}'
+hdfsdir='%{name}-hdfs-project/%{name}-hdfs/target/%{name}-hdfs-%{hadoop_version}'
+httpfsdir='%{name}-hdfs-project/%{name}-hdfs-httpfs/target/%{name}-hdfs-httpfs-%{hadoop_version}'
+mapreddir='%{name}-mapreduce-project/target/%{name}-mapreduce-%{hadoop_version}'
+yarndir='%{name}-yarn-project/target/%{name}-yarn-project-%{hadoop_version}'
+
+# copy jar package
+install -d -m 0755 %{buildroot}/%{_datadir}/java/%{name}
+install -d -m 0755 %{buildroot}/%{_datadir}/maven-poms/%{name}
+# client
+install -m 0755 %{name}-client-modules/%{name}-client/target/hadoop-client-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-client.jar
+echo %{_datadir}/java/%{name}/hadoop-client.jar >> .mfiles-hadoop-client
+install -m 0755 %{name}-client-modules/%{name}-client/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-client.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-client.pom >> .mfiles-hadoop-client
+# common
+install -m 0755 %{name}-common-project/%{name}-annotations/target/hadoop-annotations-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-annotations.jar
+echo %{_datadir}/java/%{name}/hadoop-annotations.jar >> .mfiles
+install -m 0755 %{name}-common-project/%{name}-auth/target/hadoop-auth-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-auth.jar
+echo %{_datadir}/java/%{name}/hadoop-auth.jar >> .mfiles
+install -m 0755 %{name}-tools/%{name}-aws/target/hadoop-aws-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-aws.jar
+echo %{_datadir}/java/%{name}/hadoop-aws.jar >> .mfiles
+install -m 0755 %{name}-build-tools/target/hadoop-build-tools-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-build-tools.jar
+echo %{_datadir}/java/%{name}/hadoop-build-tools.jar >> .mfiles
+install -m 0755 %{name}-common-project/%{name}-nfs/target/hadoop-nfs-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-nfs.jar
+echo %{_datadir}/java/%{name}/hadoop-nfs.jar >> .mfiles
+install -m 0755 %{name}-common-project/%{name}-common/target/hadoop-common-%{version}.jar %{buildroot}/%{_prefix}/lib/java/hadoop/hadoop-common.jar
+echo %{_prefix}/lib/java/hadoop/hadoop-common.jar >> .mfiles
+install -m 0755 %{name}-common-project/%{name}-annotations/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-annotations.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-annotations.pom >> .mfiles
+install -m 0755 %{name}-common-project/%{name}-auth/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-auth.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-auth.pom >> .mfiles
+install -m 0755 %{name}-tools/%{name}-aws/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-aws.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-aws.pom >> .mfiles
+install -m 0755 %{name}-build-tools/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-build-tools.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-build-tools.pom >> .mfiles
+install -m 0755 %{name}-common-project/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-common-project.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-common-project.pom >> .mfiles
+install -m 0755 %{name}-common-project/%{name}-common/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-common.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-common.pom >> .mfiles
+install -m 0755 %{name}-dist/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-dist.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-dist.pom >> .mfiles
+install -m 0755 %{name}-common-project/%{name}-nfs/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-nfs.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-nfs.pom >> .mfiles
+install -m 0755 %{name}-project/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-project.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-project.pom >> .mfiles
+echo %{_sysconfdir}/%{name}/hadoop-user-functions.sh.example >> .mfiles
+echo %{_sysconfdir}/%{name}/shellprofile.d/example.sh >> .mfiles
+echo %{_sysconfdir}/%{name}/workers >> .mfiles
+echo %{_prefix}/libexec/hadoop-functions.sh >> .mfiles
+echo %{_prefix}/libexec/hadoop-layout.sh.example >> .mfiles
+echo %{_prefix}/sbin/workers.sh >> .mfiles
+echo %{_datadir}/%{name}/common/hadoop-common.jar >> .mfiles
+# hdfs
+install -m 0755 %{name}-hdfs-project/%{name}-hdfs-nfs/target/hadoop-hdfs-nfs-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-hdfs-nfs.jar
+echo %{_datadir}/java/%{name}/hadoop-hdfs-nfs.jar >> .mfiles-hadoop-hdfs
+install -m 0755 %{name}-hdfs-project/%{name}-hdfs/target/hadoop-hdfs-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-hdfs.jar
+echo %{_datadir}/java/%{name}/hadoop-hdfs.jar >> .mfiles-hadoop-hdfs
+install -m 0755 %{name}-hdfs-project/%{name}-hdfs-nfs/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-hdfs-nfs.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-hdfs-nfs.pom >> .mfiles-hadoop-hdfs
+install -m 0755 %{name}-hdfs-project/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-hdfs-project.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-hdfs-project.pom >> .mfiles-hadoop-hdfs
+install -m 0755 %{name}-hdfs-project/%{name}-hdfs/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-hdfs.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-hdfs.pom >> .mfiles-hadoop-hdfs
+echo %{_prefix}/libexec/shellprofile.d/hadoop-hdfs.sh >> .mfiles-hadoop-hdfs
+# mapreduce
+install -m 0755 %{name}-tools/%{name}-archives/target/hadoop-archives-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-archives.jar
+echo %{_datadir}/java/%{name}/hadoop-archives.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-datajoin/target/hadoop-datajoin-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-datajoin.jar
+echo %{_datadir}/java/%{name}/hadoop-datajoin.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-distcp/target/hadoop-distcp-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-distcp.jar
+echo %{_datadir}/java/%{name}/hadoop-distcp.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-extras/target/hadoop-extras-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-extras.jar
+echo %{_datadir}/java/%{name}/hadoop-extras.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-gridmix/target/hadoop-gridmix-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-gridmix.jar
+echo %{_datadir}/java/%{name}/hadoop-gridmix.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-app/target/hadoop-mapreduce-client-app-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-app.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-app.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-common/target/hadoop-mapreduce-client-common-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-common.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-common.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-core/target/hadoop-mapreduce-client-core-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-core.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-core.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-hs-plugins/target/hadoop-mapreduce-client-hs-plugins-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-hs-plugins.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-hs-plugins.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-hs/target/hadoop-mapreduce-client-hs-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-hs.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-hs.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-jobclient/target/hadoop-mapreduce-client-jobclient-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-jobclient.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-jobclient.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-shuffle/target/hadoop-mapreduce-client-shuffle-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-shuffle.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-shuffle.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-openstack/target/hadoop-openstack-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-openstack.jar
+echo %{_datadir}/java/%{name}/hadoop-openstack.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-rumen/target/hadoop-rumen-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-rumen.jar
+echo %{_datadir}/java/%{name}/hadoop-rumen.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-sls/target/hadoop-sls-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-sls.jar
+echo %{_datadir}/java/%{name}/hadoop-sls.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-streaming/target/hadoop-streaming-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-streaming.jar
+echo %{_datadir}/java/%{name}/hadoop-streaming.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-tools-dist/target/hadoop-tools-dist-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-tools-dist.jar
+echo %{_datadir}/java/%{name}/hadoop-tools-dist.jar >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-archives/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-archives.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-archives.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-datajoin/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-datajoin.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-datajoin.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-distcp/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-distcp.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-distcp.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-extras/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-extras.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-extras.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-gridmix/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-gridmix.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-gridmix.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-app/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-app.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-app.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-common/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-common.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-common.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-core/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-core.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-core.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-hs-plugins/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-hs-plugins.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-hs-plugins.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-hs/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-hs.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-hs.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-jobclient/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-jobclient.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-jobclient.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-shuffle/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-shuffle.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-client-shuffle.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-mapreduce-project/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-openstack/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-openstack.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-openstack.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-rumen/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-rumen.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-rumen.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-sls/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-sls.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-sls.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-streaming/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-streaming.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-streaming.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/%{name}-tools-dist/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-tools-dist.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-tools-dist.pom >> .mfiles-hadoop-mapreduce
+install -m 0755 %{name}-tools/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-tools.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-tools.pom >> .mfiles-hadoop-mapreduce
+echo %{_prefix}/libexec/shellprofile.d/hadoop-mapreduce.sh >> .mfiles-hadoop-mapreduce
+# mapreduce-examples
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-examples/target/hadoop-mapreduce-examples-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-examples.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-examples.jar >> .mfiles-hadoop-mapreduce-examples
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-examples/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-mapreduce-examples.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-mapreduce-examples.pom >> .mfiles-hadoop-mapreduce-examples
+# maven-plugin
+install -m 0755 %{name}-maven-plugins/target/hadoop-maven-plugins-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-maven-plugins.jar
+echo %{_datadir}/java/%{name}/hadoop-maven-plugins.jar >> .mfiles-hadoop-maven-plugin
+install -m 0755 %{name}-maven-plugins/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-maven-plugins.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-maven-plugins.pom >> .mfiles-hadoop-maven-plugin
+# tests
+install -m 0755 %{name}-client-modules/%{name}-client/target/hadoop-client-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-client-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-client-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-common-project/%{name}-common/target/hadoop-common-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-common-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-common-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-hdfs-project/%{name}-hdfs/target/hadoop-hdfs-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-hdfs-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-hdfs-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-app/target/hadoop-mapreduce-client-app-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-app-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-app-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-mapreduce-project/%{name}-mapreduce-client/%{name}-mapreduce-client-jobclient/target/hadoop-mapreduce-client-jobclient-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-mapreduce-client-jobclient-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-mapreduce-client-jobclient-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-minicluster/target/hadoop-minicluster-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-minicluster.jar
+echo %{_datadir}/java/%{name}/hadoop-minicluster.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-tools/%{name}-tools-dist/target/hadoop-tools-dist-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-tools-dist-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-tools-dist-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-common/target/hadoop-yarn-common-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-common-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-common-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-registry/target/hadoop-yarn-registry-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-registry-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-registry-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-resourcemanager/target/hadoop-yarn-server-resourcemanager-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-resourcemanager-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-resourcemanager-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-sharedcachemanager/target/hadoop-yarn-server-sharedcachemanager-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-sharedcachemanager-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-sharedcachemanager-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-tests/target/hadoop-yarn-server-tests-%{version}-tests.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-tests-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-tests-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-tests/target/hadoop-yarn-server-tests-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-tests.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-tests.jar >> .mfiles-hadoop-tests
+install -m 0755 %{name}-minicluster/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-minicluster.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-minicluster.pom >> .mfiles-hadoop-tests
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-tests/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server-tests.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server-tests.pom >> .mfiles-hadoop-tests
+# yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-api/target/hadoop-yarn-api-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-api.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-api.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-applications/%{name}-yarn-applications-distributedshell/target/hadoop-yarn-applications-distributedshell-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-applications-distributedshell.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-applications-distributedshell.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-applications/%{name}-yarn-applications-unmanaged-am-launcher/target/hadoop-yarn-applications-unmanaged-am-launcher-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-applications-unmanaged-am-launcher.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-applications-unmanaged-am-launcher.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-client/target/hadoop-yarn-client-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-client.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-client.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-common/target/hadoop-yarn-common-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-common.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-common.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-registry/target/hadoop-yarn-registry-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-registry.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-registry.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-applicationhistoryservice/target/hadoop-yarn-server-applicationhistoryservice-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-applicationhistoryservice.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-applicationhistoryservice.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-common/target/hadoop-yarn-server-common-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-common.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-common.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-resourcemanager/target/hadoop-yarn-server-resourcemanager-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-resourcemanager.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-resourcemanager.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-sharedcachemanager/target/hadoop-yarn-server-sharedcachemanager-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-sharedcachemanager.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-sharedcachemanager.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/%{name}-yarn-server-web-proxy/target/hadoop-yarn-server-web-proxy-%{version}.jar %{buildroot}/%{_datadir}/java/%{name}/hadoop-yarn-server-web-proxy.jar
+echo %{_datadir}/java/%{name}/hadoop-yarn-server-web-proxy.jar >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-api/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-api.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-api.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-applications-distributedshell.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-applications-distributedshell.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-applications-unmanaged-am-launcher.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-applications-unmanaged-am-launcher.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-applications/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-applications.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-applications.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-client/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-client.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-client.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-common/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-common.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-common.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-registry/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-registry.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-registry.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server-applicationhistoryservice.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server-applicationhistoryservice.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/hadoop-yarn-server-common/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server-common.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server-common.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/hadoop-yarn-server-nodemanager/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server-nodemanager.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server-nodemanager.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server-resourcemanager.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server-resourcemanager.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/hadoop-yarn-server-sharedcachemanager/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server-sharedcachemanager.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server-sharedcachemanager.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/hadoop-yarn-server-web-proxy/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server-web-proxy.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server-web-proxy.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-server/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-server.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-server.pom >> .mfiles-hadoop-yarn
+install -m 0755 %{name}-yarn-project/%{name}-yarn/%{name}-yarn-site/pom.xml %{buildroot}/%{_datadir}/maven-poms/%{name}/hadoop-yarn-site.pom
+echo %{_datadir}/maven-poms/%{name}/hadoop-yarn-site.pom >> .mfiles-hadoop-yarn
+echo %{_sysconfdir}/%{name}/yarnservice-log4j.properties >> .mfiles-hadoop-yarn
+echo %{_prefix}/bin/container-executor >> .mfiles-hadoop-yarn
+echo %{_prefix}/bin/oom-listener >> .mfiles-hadoop-yarn
+echo %{_prefix}/bin/test-container-executor >> .mfiles-hadoop-yarn
+echo %{_prefix}/libexec/shellprofile.d/hadoop-yarn.sh >> .mfiles-hadoop-yarn
+echo %{_prefix}/sbin/FederationStateStore/* >> .mfiles-hadoop-yarn
+# copy script folders
+for dir in bin libexec sbin
+do
+ cp -arf $basedir/$dir %{buildroot}/%{_prefix}
+ cp -arf $hdfsdir/$dir %{buildroot}/%{_prefix}
+ cp -arf $mapreddir/$dir %{buildroot}/%{_prefix}
+ cp -arf $yarndir/$dir %{buildroot}/%{_prefix}
+done
+
+# This binary is obsoleted and causes a conflict with qt-devel
+rm -rf %{buildroot}/%{_bindir}/rcc
+
+# Duplicate files
+rm -f %{buildroot}/%{_sbindir}/hdfs-config.sh
+
+# copy config files
+cp -arf $basedir/etc/* %{buildroot}/%{_sysconfdir}
+cp -arf $httpfsdir/etc/* %{buildroot}/%{_sysconfdir}
+cp -arf $mapreddir/etc/* %{buildroot}/%{_sysconfdir}
+cp -arf $yarndir/etc/* %{buildroot}/%{_sysconfdir}
+
+# copy binaries
+cp -arf $basedir/lib/native/libhadoop.so* %{buildroot}/%{_libdir}/%{name}
+chrpath --delete %{buildroot}/%{_libdir}/%{name}/*
+cp -arf ./hadoop-hdfs-project/hadoop-hdfs-native-client/target/hadoop-hdfs-native-client-%{version}/include/hdfs.h %{buildroot}/%{_includedir}/%{name}
+cp -arf ./hadoop-hdfs-project/hadoop-hdfs-native-client/target/hadoop-hdfs-native-client-%{version}/lib/native/libhdfs.so* %{buildroot}/%{_libdir}
+chrpath --delete %{buildroot}/%{_libdir}/libhdfs*
+
+# Not needed since httpfs is deployed with existing systemd setup
+rm -f %{buildroot}/%{_sbindir}/httpfs.sh
+rm -f %{buildroot}/%{_libexecdir}/httpfs-config.sh
+rm -f %{buildroot}/%{_bindir}/httpfs-env.sh
+
+# Remove files with .cmd extension
+find %{buildroot} -name *.cmd | xargs rm -f
+
+# Modify hadoop-env.sh to point to correct locations for JAVA_HOME
+# and JSVC_HOME.
+sed -i "s|\${JAVA_HOME}|/usr/lib/jvm/jre|" %{buildroot}/%{_sysconfdir}/%{name}/%{name}-env.sh
+sed -i "s|\${JSVC_HOME}|/usr/bin|" %{buildroot}/%{_sysconfdir}/%{name}/%{name}-env.sh
+
+# Ensure the java provided DocumentBuilderFactory is used
+sed -i "s|\(HADOOP_OPTS.*=.*\)\$HADOOP_CLIENT_OPTS|\1 -Djavax.xml.parsers.DocumentBuilderFactory=com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl \$HADOOP_CLIENT_OPTS|" %{buildroot}/%{_sysconfdir}/%{name}/%{name}-env.sh
+echo "export YARN_OPTS=\"\$YARN_OPTS -Djavax.xml.parsers.DocumentBuilderFactory=com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl\"" >> %{buildroot}/%{_sysconfdir}/%{name}/yarn-env.sh
+
+# Workaround for bz1012059
+install -d -m 0755 %{buildroot}/%{_mavenpomdir}/
+install -pm 644 hadoop-project-dist/pom.xml %{buildroot}/%{_mavenpomdir}/JPP.%{name}-%{name}-project-dist.pom
+%{__ln_s} %{_jnidir}/%{name}/hadoop-common.jar %{buildroot}/%{_datadir}/%{name}/common
+%{__ln_s} %{_javadir}/%{name}/hadoop-hdfs.jar %{buildroot}/%{_datadir}/%{name}/hdfs
+%{__ln_s} %{_javadir}/%{name}/hadoop-client.jar %{buildroot}/%{_datadir}/%{name}/client
+
+# client jar depenencies
+copy_dep_jars hadoop-client-modules/%{name}-client/target/%{name}-client-%{hadoop_version}/share/%{name}/client/lib %{buildroot}/%{_datadir}/%{name}/client/lib
+%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/client/lib
+pushd hadoop-client-modules/%{name}-client/target/%{name}-client-%{hadoop_version}/share/%{name}/client/lib
+ link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/client/lib
+popd
+pushd hadoop-client-modules/%{name}-client/target/%{name}-client-%{hadoop_version}/share/%{name}/client
+ link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/client
+popd
+
+# common jar depenencies
+copy_dep_jars $basedir/share/%{name}/common/lib %{buildroot}/%{_datadir}/%{name}/common/lib
+%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/common/lib
+pushd $basedir/share/%{name}/common
+ link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/common
+popd
+pushd $basedir/share/%{name}/common/lib
+ link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/common/lib
+popd
+
+# hdfs jar dependencies
+copy_dep_jars $hdfsdir/share/%{name}/hdfs/lib %{buildroot}/%{_datadir}/%{name}/hdfs/lib
+%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/hdfs/lib
+%{__ln_s} %{_jnidir}/%{name}/%{name}-hdfs-bkjournal.jar %{buildroot}/%{_datadir}/%{name}/hdfs/lib
+pushd $hdfsdir/share/%{name}/hdfs
+ link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/hdfs
+popd
+
+# httpfs
+# Create the webapp directory structure
+pushd %{buildroot}/%{_sharedstatedir}/tomcats/httpfs
+ %{__ln_s} %{_datadir}/%{name}/httpfs/tomcat/conf conf
+ %{__ln_s} %{_datadir}/%{name}/httpfs/tomcat/lib lib
+ %{__ln_s} %{_datadir}/%{name}/httpfs/tomcat/logs logs
+ %{__ln_s} %{_datadir}/%{name}/httpfs/tomcat/temp temp
+ %{__ln_s} %{_datadir}/%{name}/httpfs/tomcat/webapps webapps
+ %{__ln_s} %{_datadir}/%{name}/httpfs/tomcat/work work
+popd
+
+# Copy the tomcat configuration and overlay with specific configuration bits.
+# This is needed so the httpfs instance won't collide with a system running
+# tomcat
+for cfgfile in catalina.policy catalina.properties context.xml \
+ tomcat.conf web.xml server.xml logging.properties;
+do
+ cp -a %{_sysconfdir}/tomcat/$cfgfile %{buildroot}/%{_sysconfdir}/%{name}/tomcat
+done
+
+# Replace, in place, the Tomcat configuration files delivered with the current
+# Fedora release. See BZ#1295968 for some reason.
+sed -i -e 's/8005/${httpfs.admin.port}/g' -e 's/8080/${httpfs.http.port}/g' %{buildroot}/%{_sysconfdir}/%{name}/tomcat/server.xml
+sed -i -e 's/catalina.base/httpfs.log.dir/g' %{buildroot}/%{_sysconfdir}/%{name}/tomcat/logging.properties
+# Given the permission, only the root and tomcat users can access to that file,
+# not the build user. So, the build would fail here.
+install -m 660 %{SOURCE9} %{buildroot}/%{_sysconfdir}/%{name}/tomcat/tomcat-users.xml
+
+# Copy the httpfs webapp
+cp -arf %{name}-hdfs-project/%{name}-hdfs-httpfs/target/classes/webapps/webhdfs %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps
+
+# Tell tomcat to follow symlinks
+install -d -m 0766 %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/META-INF/
+cp %{SOURCE5} %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/META-INF/
+
+# Remove the jars included in the webapp and create symlinks
+rm -f %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib/tools*.jar
+rm -f %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib/tomcat-*.jar
+%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat/webapps/webhdfs/WEB-INF/lib
+
+pushd %{buildroot}/%{_datadir}/%{name}/httpfs/tomcat
+ %{__ln_s} %{_datadir}/tomcat/bin bin
+ %{__ln_s} %{_sysconfdir}/%{name}/tomcat conf
+ %{__ln_s} %{_datadir}/tomcat/lib lib
+ %{__ln_s} %{_var}/cache/%{name}-httpfs/temp temp
+ %{__ln_s} %{_var}/cache/%{name}-httpfs/work work
+ %{__ln_s} %{_var}/log/%{name}-httpfs logs
+popd
+
+# mapreduce jar dependencies
+mrdir='%{name}-mapreduce-project/target/%{name}-mapreduce-%{hadoop_version}'
+copy_dep_jars $mrdir/share/%{name}/mapreduce/lib %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
+%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
+%{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/mapreduce/lib
+pushd $mrdir/share/%{name}/mapreduce
+ link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/mapreduce
+popd
+
+# yarn jar dependencies
+yarndir='%{name}-yarn-project/target/%{name}-yarn-project-%{hadoop_version}'
+copy_dep_jars $yarndir/share/%{name}/yarn/lib %{buildroot}/%{_datadir}/%{name}/yarn/lib
+%{_bindir}/xmvn-subst %{buildroot}/%{_datadir}/%{name}/yarn/lib
+%{__ln_s} %{_javadir}/%{name}/%{name}-annotations.jar %{buildroot}/%{_datadir}/%{name}/yarn/lib
+pushd $yarndir/share/%{name}/yarn
+ link_hadoop_jars %{buildroot}/%{_datadir}/%{name}/yarn
+popd
+
+# Install hdfs webapp bits
+cp -arf hadoop-hdfs-project/hadoop-hdfs/target/webapps/* %{buildroot}/%{_datadir}/%{name}/hdfs/webapps
+
+# hadoop layout. Convert to appropriate lib location for 32 and 64 bit archs
+lib=$(echo %{?_libdir} | sed -e 's:/usr/\(.*\):\1:')
+if [ "$lib" = "%_libdir" ]; then
+ echo "_libdir is not located in /usr. Lib location is wrong"
+ exit 1
+fi
+sed -e "s|HADOOP_COMMON_LIB_NATIVE_DIR\s*=.*|HADOOP_COMMON_LIB_NATIVE_DIR=$lib/%{name}|" %{SOURCE1} > %{buildroot}/%{_libexecdir}/%{name}-layout.sh
+
+# systemd configuration
+install -d -m 0755 %{buildroot}/%{_unitdir}/
+for service in %{hdfs_services} %{mapreduce_services} %{yarn_services}
+do
+ s=`echo $service | cut -d'-' -f 2 | cut -d'.' -f 1`
+ daemon=$s
+ if [[ "%{hdfs_services}" == *$service* ]]
+ then
+ src=%{SOURCE2}
+ elif [[ "%{mapreduce_services}" == *$service* ]]
+ then
+ src=%{SOURCE3}
+ elif [[ "%{yarn_services}" == *$service* ]]
+ then
+ if [[ "$s" == "timelineserver" ]]
+ then
+ daemon='historyserver'
+ fi
+ src=%{SOURCE4}
+ else
+ echo "Failed to determine type of service for %service"
+ exit 1
+ fi
+ sed -e "s|DAEMON|$daemon|g" $src > %{buildroot}/%{_unitdir}/%{name}-$s.service
+done
+
+cp -f %{SOURCE7} %{buildroot}/%{_sysconfdir}/sysconfig/tomcat@httpfs
+
+# Ensure /var/run directories are recreated on boot
+echo "d %{_var}/run/%{name}-yarn 0775 yarn hadoop -" > %{buildroot}/%{_tmpfilesdir}/%{name}-yarn.conf
+echo "d %{_var}/run/%{name}-hdfs 0775 hdfs hadoop -" > %{buildroot}/%{_tmpfilesdir}/%{name}-hdfs.conf
+echo "d %{_var}/run/%{name}-mapreduce 0775 mapred hadoop -" > %{buildroot}/%{_tmpfilesdir}/%{name}-mapreduce.conf
+
+# logrotate config
+for type in hdfs httpfs yarn mapreduce
+do
+ sed -e "s|NAME|$type|" %{SOURCE6} > %{buildroot}/%{_sysconfdir}/logrotate.d/%{name}-$type
+done
+sed -i "s|{|%{_var}/log/hadoop-hdfs/*.audit\n{|" %{buildroot}/%{_sysconfdir}/logrotate.d/%{name}-hdfs
+
+# hdfs init script
+install -m 755 %{SOURCE8} %{buildroot}/%{_sbindir}
+
+%pretrans -p hdfs
+path = "%{_datadir}/%{name}/hdfs/webapps"
+st = posix.stat(path)
+if st and st.type == "link" then
+ os.remove(path)
+end
+
+%pre common
+getent group hadoop >/dev/null || groupadd -r hadoop
+
+%pre hdfs
+getent group hdfs >/dev/null || groupadd -r hdfs
+getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop HDFS" --shell /sbin/nologin -M -r -g hdfs -G hadoop --home %{_sharedstatedir}/%{name}-hdfs hdfs
+
+%pre mapreduce
+getent group mapred >/dev/null || groupadd -r mapred
+getent passwd mapred >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop MapReduce" --shell /sbin/nologin -M -r -g mapred -G hadoop --home %{_var}/cache/%{name}-mapreduce mapred
+
+%pre yarn
+getent group yarn >/dev/null || groupadd -r yarn
+getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Apache Hadoop Yarn" --shell /sbin/nologin -M -r -g yarn -G hadoop --home %{_var}/cache/%{name}-yarn yarn
+
+%preun hdfs
+%systemd_preun %{hdfs_services}
+
+%preun mapreduce
+%systemd_preun %{mapreduce_services}
+
+%preun yarn
+%systemd_preun %{yarn_services}
+
+%post common-native -p /sbin/ldconfig
+
+%post hdfs
+# Change the home directory for the hdfs user
+if [[ `getent passwd hdfs | cut -d: -f 6` != "%{_sharedstatedir}/%{name}-hdfs" ]]
+then
+ /usr/sbin/usermod -d %{_sharedstatedir}/%{name}-hdfs hdfs
+fi
+
+if [ $1 -gt 1 ]
+then
+ if [ -d %{_var}/cache/%{name}-hdfs ] && [ ! -L %{_var}/cache/%{name}-hdfs ]
+ then
+ # Move the existing hdfs data to the new location
+ mv -f %{_var}/cache/%{name}-hdfs/* %{_sharedstatedir}/%{name}-hdfs/
+ fi
+fi
+%systemd_post %{hdfs_services}
+
+%post -n libhdfs -p /sbin/ldconfig
+
+%post mapreduce
+%systemd_post %{mapreduce_services}
+
+%post yarn
+%systemd_post %{yarn_services}
+
+%postun common-native -p /sbin/ldconfig
+
+%postun hdfs
+%systemd_postun_with_restart %{hdfs_services}
+
+if [ $1 -lt 1 ]
+then
+ # Remove the compatibility symlink
+ rm -f %{_var}/cache/%{name}-hdfs
+fi
+
+%postun -n libhdfs -p /sbin/ldconfig
+
+%postun mapreduce
+%systemd_postun_with_restart %{mapreduce_services}
+
+%postun yarn
+%systemd_postun_with_restart %{yarn_services}
+
+%posttrans hdfs
+# Create a symlink to the new location for hdfs data in case the user changed
+# the configuration file and the new one isn't in place to point to the
+# correct location
+if [ ! -e %{_var}/cache/%{name}-hdfs ]
+then
+ %{__ln_s} %{_sharedstatedir}/%{name}-hdfs %{_var}/cache
+fi
+
+%files -f .mfiles-%{name}-client client
+%{_datadir}/%{name}/client
+
+%files -f .mfiles common
+%doc LICENSE.txt
+%doc NOTICE.txt
+%doc README.txt
+%config(noreplace) %{_sysconfdir}/%{name}/core-site.xml
+%config(noreplace) %{_sysconfdir}/%{name}/%{name}-env.sh
+%config(noreplace) %{_sysconfdir}/%{name}/%{name}-metrics2.properties
+%config(noreplace) %{_sysconfdir}/%{name}/%{name}-policy.xml
+%config(noreplace) %{_sysconfdir}/%{name}/log4j.properties
+%config(noreplace) %{_sysconfdir}/%{name}/ssl-client.xml.example
+%config(noreplace) %{_sysconfdir}/%{name}/ssl-server.xml.example
+%config(noreplace) %{_sysconfdir}/%{name}/configuration.xsl
+
+%dir %{_datadir}/%{name}
+%dir %{_datadir}/%{name}/common
+%{_datadir}/%{name}/common/lib
+%{_libexecdir}/%{name}-config.sh
+%{_libexecdir}/%{name}-layout.sh
+
+# Workaround for bz1012059
+%{_mavenpomdir}/JPP.%{name}-%{name}-project-dist.pom
+
+%{_bindir}/%{name}
+%{_sbindir}/%{name}-daemon.sh
+%{_sbindir}/%{name}-daemons.sh
+%{_sbindir}/start-all.sh
+%{_sbindir}/start-balancer.sh
+%{_sbindir}/start-dfs.sh
+%{_sbindir}/start-secure-dns.sh
+%{_sbindir}/stop-all.sh
+%{_sbindir}/stop-balancer.sh
+%{_sbindir}/stop-dfs.sh
+%{_sbindir}/stop-secure-dns.sh
+
+%files common-native
+%{_libdir}/%{name}/libhadoop.*
+
+%files devel
+%{_includedir}/%{name}
+%{_libdir}/libhdfs.so
+
+%files -f .mfiles-%{name}-hdfs hdfs
+%{_datadir}/%{name}/hdfs
+%{_unitdir}/%{name}-datanode.service
+%{_unitdir}/%{name}-namenode.service
+%{_unitdir}/%{name}-journalnode.service
+%{_unitdir}/%{name}-secondarynamenode.service
+%{_unitdir}/%{name}-zkfc.service
+%{_libexecdir}/hdfs-config.sh
+%{_bindir}/hdfs
+%{_sbindir}/distribute-exclude.sh
+%{_sbindir}/refresh-namenodes.sh
+%{_sbindir}/hdfs-create-dirs
+%{_tmpfilesdir}/%{name}-hdfs.conf
+%config(noreplace) %attr(644, root, root) %{_sysconfdir}/logrotate.d/%{name}-hdfs
+%attr(0755,hdfs,hadoop) %dir %{_var}/run/%{name}-hdfs
+%attr(0755,hdfs,hadoop) %dir %{_var}/log/%{name}-hdfs
+%attr(0755,hdfs,hadoop) %dir %{_sharedstatedir}/%{name}-hdfs
+
+
+%files httpfs
+%config(noreplace) %{_sysconfdir}/sysconfig/tomcat@httpfs
+%config(noreplace) %{_sysconfdir}/%{name}/httpfs-env.sh
+%config(noreplace) %{_sysconfdir}/%{name}/httpfs-log4j.properties
+%config(noreplace) %{_sysconfdir}/%{name}/httpfs-signature.secret
+%config(noreplace) %{_sysconfdir}/%{name}/httpfs-site.xml
+%attr(-,tomcat,tomcat) %config(noreplace) %{_sysconfdir}/%{name}/tomcat/*.*
+%attr(0775,root,tomcat) %dir %{_sysconfdir}/%{name}/tomcat
+%attr(0775,root,tomcat) %dir %{_sysconfdir}/%{name}/tomcat/Catalina
+%attr(0775,root,tomcat) %dir %{_sysconfdir}/%{name}/tomcat/Catalina/localhost
+%{_datadir}/%{name}/httpfs
+%{_sharedstatedir}/tomcats/httpfs
+%config(noreplace) %attr(644, root, root) %{_sysconfdir}/logrotate.d/%{name}-httpfs
+%attr(0775,root,tomcat) %dir %{_var}/log/%{name}-httpfs
+%attr(0775,root,tomcat) %dir %{_var}/cache/%{name}-httpfs
+%attr(0775,root,tomcat) %dir %{_var}/cache/%{name}-httpfs/temp
+%attr(0775,root,tomcat) %dir %{_var}/cache/%{name}-httpfs/work
+
+%files -n libhdfs
+%{_libdir}/libhdfs.so.*
+
+%files -f .mfiles-%{name}-mapreduce mapreduce
+%config(noreplace) %{_sysconfdir}/%{name}/mapred-env.sh
+%config(noreplace) %{_sysconfdir}/%{name}/mapred-queues.xml.template
+%config(noreplace) %{_sysconfdir}/%{name}/mapred-site.xml
+%{_datadir}/%{name}/mapreduce
+%{_libexecdir}/mapred-config.sh
+%{_unitdir}/%{name}-historyserver.service
+%{_bindir}/mapred
+%{_sbindir}/mr-jobhistory-daemon.sh
+%{_tmpfilesdir}/%{name}-mapreduce.conf
+%config(noreplace) %attr(644, root, root) %{_sysconfdir}/logrotate.d/%{name}-mapreduce
+%attr(0755,mapred,hadoop) %dir %{_var}/run/%{name}-mapreduce
+%attr(0755,mapred,hadoop) %dir %{_var}/log/%{name}-mapreduce
+%attr(0755,mapred,hadoop) %dir %{_var}/cache/%{name}-mapreduce
+
+%files -f .mfiles-%{name}-mapreduce-examples mapreduce-examples
+
+%files -f .mfiles-%{name}-maven-plugin maven-plugin
+
+%files -f .mfiles-%{name}-tests tests
+
+%files -f .mfiles-%{name}-yarn yarn
+%config(noreplace) %{_sysconfdir}/%{name}/capacity-scheduler.xml
+%config(noreplace) %{_sysconfdir}/%{name}/yarn-env.sh
+%config(noreplace) %{_sysconfdir}/%{name}/yarn-site.xml
+%{_unitdir}/%{name}-nodemanager.service
+%{_unitdir}/%{name}-proxyserver.service
+%{_unitdir}/%{name}-resourcemanager.service
+%{_unitdir}/%{name}-timelineserver.service
+%{_libexecdir}/yarn-config.sh
+%{_datadir}/%{name}/yarn
+%{_bindir}/yarn
+%{_sbindir}/yarn-daemon.sh
+%{_sbindir}/yarn-daemons.sh
+%{_sbindir}/start-yarn.sh
+%{_sbindir}/stop-yarn.sh
+%{_tmpfilesdir}/%{name}-yarn.conf
+%config(noreplace) %attr(644, root, root) %{_sysconfdir}/logrotate.d/%{name}-yarn
+%attr(0755,yarn,hadoop) %dir %{_var}/run/%{name}-yarn
+%attr(0755,yarn,hadoop) %dir %{_var}/log/%{name}-yarn
+%attr(0755,yarn,hadoop) %dir %{_var}/cache/%{name}-yarn
+
+%files yarn-security
+%config(noreplace) %{_sysconfdir}/%{name}/container-executor.cfg
+
+%changelog
+* Fri Mar 12 2021 Ge Wang - 3.2.1-1
+- Init package
diff --git a/hadoop.yaml b/hadoop.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..c651911c56ccada49514d416e1c41d268dc3695c
--- /dev/null
+++ b/hadoop.yaml
@@ -0,0 +1,4 @@
+version_control: NA
+src_repo: NA
+tag_prefix: NA
+separator: NA
diff --git a/hdfs-create-dirs b/hdfs-create-dirs
new file mode 100644
index 0000000000000000000000000000000000000000..0f0d7d85b0aec5b28c9d463f874106549f2f3e80
--- /dev/null
+++ b/hdfs-create-dirs
@@ -0,0 +1,66 @@
+#!/bin/bash
+
+hdfs_dirs="/user /var/log /tmp"
+mapred_dirs="/tmp/hadoop-yarn/staging /tmp/hadoop-yarn/staging/history /tmp/hadoop-yarn/staging/history/done /tmp/hadoop-yarn/staging/history/done_intermediate"
+yarn_dirs="/tmp/hadoop-yarn /var/log/hadoop-yarn"
+
+# Must be run as root
+if [[ $EUID -ne 0 ]]
+then
+ echo "This must be run as root" 1>&2
+ exit 1
+fi
+
+# Start the namenode if it isn't running
+started=0
+systemctl status hadoop-namenode > /dev/null 2>&1
+rc=$?
+if [[ $rc -gt 0 ]]
+then
+ # Format the namenode if it hasn't been formatted
+ runuser hdfs -s /bin/bash /bin/bash -c "hdfs namenode -format -nonInteractive" > /dev/null 2>&1
+ if [[ $? -eq 0 ]]
+ then
+ echo "Formatted the Hadoop namenode"
+ fi
+
+ echo "Starting the Hadoop namenode"
+ systemctl start hadoop-namenode > /dev/null 2>&1
+ rc=$?
+ started=1
+fi
+
+if [[ $rc -ne 0 ]]
+then
+ echo "The Hadoop namenode failed to start"
+ exit 1
+fi
+
+for dir in $hdfs_dirs $yarn_dirs $mapred_dirs
+do
+ echo "Creating directory $dir"
+ runuser hdfs -s /bin/bash /bin/bash -c "hadoop fs -mkdir -p $dir" > /dev/null 2>&1
+done
+
+echo "Setting permissions on /tmp"
+runuser hdfs -s /bin/bash /bin/bash -c "hadoop fs -chmod 1777 /tmp" > /dev/null 2>&1
+
+for dir in $mapred_dirs
+do
+ echo "Setting permissions and ownership for $dir"
+ runuser hdfs -s /bin/bash /bin/bash -c "hadoop fs -chown mapred:mapred $dir" > /dev/null 2>&1
+ runuser hdfs -s /bin/bash /bin/bash -c "hadoop fs -chmod 1777 $dir" > /dev/null 2>&1
+done
+
+for dir in $yarn_dirs
+do
+ echo "Setting permissions and ownership for $dir"
+ runuser hdfs -s /bin/bash /bin/bash -c "hadoop fs -chown yarn:mapred $dir" > /dev/null 2>&1
+done
+
+# Stop the namenode if we started it
+if [[ $started -gt 0 ]]
+then
+ echo "Stopping the Hadoop namenode"
+ systemctl stop hadoop-namenode > /dev/null 2>&1
+fi