#!/bin/bash################################################################################################################################################################################################################################################################################################################################## Install Hadoop and set permissons ################################################################################################################################################################################################################################################################################################################################################################################################### Hive INSTALLATION ####################################################################################################################### HBASE INSTALLATION ######################################################################################################################## PIG INSTALLATION ##############################################################################################################################################################################################################################################################################################################################################################echo " "echo "If you runing this script with bash you will not get any error. You are runing this script with sh. You will get this error"echo 'If you get this error below "SingileNode_Ecosystem.sh: 16: SingileNode_Ecosystem.sh: source: not found"'echo " "source /etc/environmentecho " "echo " "echo 'Just run this script with bash::: "bash SingileNode_Ecosystem.sh"'echo " "echo " "echo 'run this command: "bash SingileNode_Ecosystem.sh"'echo " "echo "Enter password for sudo user::"$condread condif [ ! -z "$cond" ]thena="/usr/bin/sudo -S"na="echo $cond\n "#$na | $a cat /etc/sudoersp=$(pwd)b=$USER$na | $a rm -rf /usr/local/had$na | $a rm -rf /hadoop$na | $a apt-get update$na | $a apt-get upgrade -y$na | $a apt-get install openssh-server openssh-client -y$na | $a apt-get install openjdk-6-jdk openjdk-6-jre -y$na | $a mkdir /usr/local/had$na | $a mkdir /hadoop$na | $a chown $USER:$GROUP /hadoop$na | $a chown $USER:$GROUP /usr/local/hadecho "If you want chanage the hostname give your hostname what do you want..........."echo "If you don't want to chanage the hostname just type enter key of key board....."echo " "echo "Enter for skip install of Hostname:"$HOSTecho "Please give your Hostname:"$HOSTread HOSTif [ -z "$HOST" ]thenecho Your hostname configuration successfully skiped..................else$na | $a hostname $HOSTecho "$HOST" > a$na | $a mv a /etc/hostname/sbin/ifconfig eth0 | grep 'inet addr' | cut -d':' -f2 | cut -d' ' -f1 > becho "$HOST" >> bpaste -s b > a$na | $a mv a /etc/hostsecho Your hostname configuration successfully finced..................rm bfi#### HADOOP HBASE HIVE PIG INSTALLATION ###echo "Enter 0 for skip install of hadoop hive hbase pig::"$cond1echo "Enter 1 for install of hadoop hive hbase pig::"$cond1read cond1echo "Enter 0 for skip install of auto strat hadoop when start the michen::"$cond2echo "Enter 1 for install of auto strat hadoop when start the michen::"$cond2read cond2echo /usr/local/sbin > bin.listecho /usr/local/bin >> bin.listecho /usr/sbin >> bin.listecho /usr/bin >> bin.listecho /sbin >> bin.listecho /bin >> bin.listecho /usr/games >> bin.listecho /usr/lib/jvm/java-6-openjdk-i386/bin >> bin.listecho 'JAVA_HOME="/usr/lib/jvm/java-6-openjdk-i386"' > ho.listecho Hadoop ecosystems Installation on single node started...............if [ $cond1 -eq 0 ]thenecho Hive install and configuration successfully skiped..................echo Hadoop install and configuration successfully skiped................echo Hbase install and configuration successfully skiped.................echo Pig install and configuration successfully skiped...................elif [ $cond1 -eq 1 ]thenwget http://archive.apache.org/dist/ant/binaries/apache-ant-1.9.3-bin.tar.gztar xzf apache-ant-1.9.3-bin.tar.gzmv apache-ant-1.9.3 /usr/local/had/antecho /usr/local/had/ant/bin >> bin.listnotify-send 'The apache-ant Installation finced it will start installation of apache-maven' 'Do not distrub the install of script'wget http://archive.apache.org/dist/maven/maven-3/3.1.1/binaries/apache-maven-3.1.1-bin.tar.gztar xzf apache-maven-3.1.1-bin.tar.gzmv apache-maven-3.1.1 /usr/local/had/mavenecho /usr/local/had/maven/bin >> bin.listnotify-send 'The apache-maven Installation finced it will start installation of apache-forrest' 'Do not distrub the install of script'wget http://archive.apache.org/dist/forrest/0.9/apache-forrest-0.9.tar.gztar xzf apache-forrest-0.9.tar.gzmv apache-forrest-0.9 /usr/local/had/forrestecho /usr/local/had/forrest/bin >> bin.listnotify-send 'The apache-forrest Installation finced it will start installation of apache-hive' 'Do not distrub the install of script'wget http://archive.apache.org/dist/hive/stable/hive-0.11.0.tar.gztar xzf hive-0.11.0.tar.gzmv hive-0.11.0 hiveecho "create database mshive" | mysql -u root -p'root'cp hive/conf/hive-default.xml.template hive/conf/hive-default.xmlcp hive/conf/hive-env.sh.template hive/conf/hive-env.shcp hive/conf/hive-log4j.properties.template hive/conf/hive-log4j.propertiescp mysql-connector-java-5.1.18-bin.jar hive/libsed 's/#\ export\ HIVE_CONF_DIR=/export\ HIVE_CONF_DIR=\"\/usr\/local\/hadoop\/hive\/conf\"/g' hive/conf/hive-env.sh -ised 's/#\ export\ HIVE_AUX_JARS_PATH=/export\ HIVE_AUX_JARS_PATH=\"\/usr\/local\/hadoop\/hive\/lib\/mysql-connector-java-5.1.25-bin.jar\"/g' hive/conf/hive-env.sh -ised 's/<value>jdbc:derby:;databaseName=metastore_db;create=true<\/value>/<value>jdbc:mysql:\/\/localhost:3306\/myshive?createDatabaseIfNotExist=true<\/value>/g' hive/conf/hive-default.xml -ised 's/<value>APP<\/value>/<value>root<\/value>/g' hive/conf/hive-default.xml -ised 's/<value>mine<\/value>/<value>root<\/value>/g' hive/conf/hive-default.xml -ised 's/org.apache.derby.jdbc.EmbeddedDriver/com.mysql.jdbc.Driver/g' hive/conf/hive-default.xml -imv hive /usr/local/hadecho /usr/local/had/hive/bin >> bin.listecho 'HIVE_HOME="/usr/local/had/hive"' >> ho.listecho 'HIVE_CONF_DIR="/usr/local/had/hive/conf"' >> ho.listecho Hive install and configuration successfully over....................wget http://hortonworks.com/wp-content/uploads/2013/05/hive_logo.pngp=$(pwd)icon="$p/hive_logo.png"notify-send -i $icon 'The apache-hive Installation finced' 'Do not distrub the script it run another install'wget http://archive.apache.org/dist/hadoop/common/stable1/hadoop-1.2.1.tar.gztar xzf hadoop-1.2.1.tar.gzmv hadoop-1.2.1 hadoopsed "s/<\/configuration>/<property>\n<name>hadoop.tmp.dir<\/name>\n<value>\/hdoop<\/value>\n<description>a base for other temporary directories<\/description>\n<\/property>\n<property>\n<name>fs.default.name<\/name>\n<value>hdfs:\/\/$c:54310<\/value>\n<\description>location of name node<\/description>\n<\/property>\n<\/configuration>/g" -i.bak hadoop/conf/core-site.xmlsed 's=<configuration>=<configuration>\n<property>\n<name>mapred.job.tracker<\/name>\n<value>'$c':54311<\/value>\n<\/property>\n<property>\n<name>mapred.child.java.opts<\/name>\n<value>-Xmx512m<\/value>\n<\/property>=g' -i.bak hadoop/conf/mapred-site.xmlsed 's=<configuration>=<configuration>\n<property>\n<name>dfs.replication<\/name>\n<value>1<\/value>\n<description>default block replication<\/description>\n<\/property>=g' -i.bak hadoop/conf/hdfs-site.xmlsed 's/localhost/'$c'/g' -i.bak hadoop/conf/slavessed 's/localhost/'$c'/g' -i.bak hadoop/conf/mastersmv hadoop /usr/local/hadecho /usr/local/had/hadoop/bin >> bin.listecho 'HADOOP_HOME="/usr/local/had/hadoop"' >> ho.listecho 'HADOOP_CONF_DIR="/usr/local/had/hadoop/conf"' >> ho.listecho Hadoop install and configuration successfully over..................wget http://www.parallelx.com/img/hadoop-elephant_logo.pngwget http://cloudtimes.org/wp-content/uploads/2013/06/hadoop-logo-square.jpgp=$(pwd)icon="$p/hadoop-logo-square.jpg"notify-send -i $icon 'The apache-hadoop Install finced' 'Do not distrub the script it run another install'wget http://archive.apache.org/dist/hbase/stable/hbase-0.94.18.tar.gztar xzf hbase-0.94.18.tar.gzmv hbase-0.94.16 hbasesed "s/<\/configuration>/<property>\n<name>hbase.rootdir<\/name>\n<value>hdfs:\/\/'$c':54310\/hbase<\/value>\n<\/property>\n<property>\n<name>hbase.cluster.distributed<\/name>\n<value>true<\/value>\n<\/property>\n<property>\n<name>hbase.zookeeper.property.clientPort<\/name>\n<value>2181<\/value>\n<\/property>\n<property>\n<name>hbase.zookeeper.quorum<\/name>\n<value>'$c'<\/value>\n<\/property>\n<\/configuration>/g" -i.bak hbase/conf/hbase-site.xmlsed 's/localhost/'$c'/g' hbase/conf/regionservers -ised 's/#\ export\ HBASE_MANAGES_ZK=true/export\ HBASE_MANAGES_ZK=true/g' hbase/conf/hbase-env.sh -imv hbase /usr/local/had/echo /usr/local/had/hbase/bin >> bin.listecho 'HBASE_HOME="/usr/local/had/hbase"' >> ho.listecho 'HBASE_CONF_DIR="/usr/local/had/hbase/conf"' >> ho.listecho Hbase install and configuration successfully over...................wget http://www.bigsql.org/se/images/hbase.pngp=$(pwd)icon="$p/hbase.png"notify-send -i $icon 'The apache-hbase Install finced' 'Do not distrub the script it run another install'wget http://archive.apache.org/dist/pig/stable/pig-0.12.0.tar.gztar xzf pig-0.12.0.tar.gzcd pig-0.12.0antcd -mv pig-0.12.0 /usr/local/had/pigecho /usr/local/had/pig/bin >> bin.listecho 'PIG_HOME="/usr/local/had/pig"' >> ho.listecho Pig install and configuration successfully over.....................wget http://joshualande.com/assets/pig_logo.jpgp=$(pwd)icon="$p/pig_logo.jpg"notify-send -i $icon 'The apache-pig Install finced' 'Do not distrub the script it run another install'fiif [ $cond2 -eq 0 ]thencat bin.list | paste -s | sed 's/\t/:/g' | sed 's/^/"/g' | sed 's/^/=/g' | sed 's/^/PATH/g' | sed 's/$/"/g' > encat ho.list >> en$na | $a mv en /etc/environmentsource /etc/environmentelif [ $cond2 -eq 1 ]thencat bin.list | paste -s | sed 's/\t/:/g' | sed 's/^/"/g' | sed 's/^/=/g' | sed 's/^/PATH/g' | sed 's/$/"/g' > encat ho.list >> en$na | $a mv en /etc/environmentsource /etc/environmenta=$USERb=$(hostname)e=$(which start-all.sh)f=$(which hadoop)g=$(which start-hbase.sh)h=$(which zkServer.sh)echo '#!/bin/sh' > st.shecho 'b=$(hostname)' >> st.shecho 'c=$(ifconfig eth0 | grep "inet addr" | cut -d':' -f2 | cut -d' ' -f1 )' >> st.shecho 'echo $c $b > /etc/hosts' >> st.shecho "ssh $a@$b $f" >> st.shecho "ssh $a@$b $g" >> st.shecho "ssh $a@$b $h start" >> st.shchmod +x st.sh$na | $a mkdir /root/cron$na | $a mv st.sh /root/cron/st.sh$na | $a cp /var/spool/cron/crontabs/root .$na | $a chown $a rootecho '@reboot sleep && 30 /root/cron/st.sh >> /root/cron/alst.log 2>&1' >> root$na | $a chown root:root root$na | $a chmod 0600 root$na | $a mv root /var/spool/cron/crontabs/echo Hosts configuration successfully over...............................elseecho $condfifi
Hadoop SingleNode Apache Shell Script
Apache Hadoop SingleNode installation shellscript
Tuesday 15 April 2014
Apache_Singlenode.sh
READ.txt
Apache-Hadoop-SingleNode-installation-shellscript ================================================= Apache_Hadoop-Singlenode Hi friend first do one thing install mysql-server and mysql-client (In the midille installation it will ask password of mysql-server give root password (exmp:root)) sudo apt-get install mysql-server mysql-client Start run script bellow given command: bash Apache_Singlenode.sh It will take time It will ask password of sudo user give sudo user password ( exmp: 123) Enter password for sudo user:: It will ask of "enter command" for skip of hostfile configuration. It will ask hostname If you want to configure the hostfile just give your hostname: (ex: tony.com) It will ask bellow Enter 0 for skip install of hadoop hive hbase pig:: Enter 1 for install of hadoop hive hbase pig:: Give 1 for installation of hadoop hive hbase pig If you give 0 Installation will finced with out install hadoop hbase hive pig Enter 0 for skip install of auto strat hadoop when start the michen:: Enter 1 for install of auto strat hadoop when start the michen:: In this time if you give 0 it will set environment only If you give 1 it will set environment and set auto runthe hadoop and hbase when the michen start. Thanks Anthoni If you have any doubt send your mail to bellow: tony.ntw@gmail.com
Subscribe to:
Posts (Atom)