sudo apt-get install ssh openssh-server
sudo apt-get install ssh openssh-client
启动ssh-server
$ sudo /etc/init.d/ssh restart
$ netstat -tlptcp6 0 0 *:ssh *:* LISTEN -
看到上面这一行输出说明ssh-server已经在运行了。
关闭防火墙
sudo ufw disablessh生成密钥有rsa和dsa两种生成方式,默认情况下采用rsa方式
ssh-keygen -t rsa -P ""
cd ~/.ssh
cat id_rsa.pub >> authorized_keys
安装hadoop
cd /home/jonee
解压hadoop-2.6.0.tar.gz
sudo tar -zxvf hadoop-2.6.0.tar.gz
将解压出的文件夹改名为hadoopsudo mv hadoop-2.6.0 hadoop将该hadoop文件夹的属主用户设为jonee
sudo chown -R jonee hadoop
sudo tar -zxvf hadoop-2.6.0.tar.gz
sudo chmod -R 777 /home/jonee/hadoop
--群组用户名:
sudo chown -R hadoop:hadoop /home/jonee/hadoop
2.配置
修改bashrc的配置
sudo nano ~/.bashrc
在文件末尾添加:
#HADOOP VARIABLES START
export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-i386
export HADOOP_INSTALL=/home/jonee/hadoop
export PATH=$PATH:$HADOOP_INSTALL/bin
export PATH=$PATH:$HADOOP_INSTALL/sbin
export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_HOME=$HADOOP_INSTALL
export HADOOP_HDFS_HOME=$HADOOP_INSTALL
export YARN_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib"
#HADOOP VARIABLES END
配制文件
hadoop-env.sh
sudo nano home/jonee/hadoop/conf/hadoop-env.sh
设置
export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-i386
core-site.xml
编辑命令
<span style="font-family: Arial, Helvetica, sans-serif; background-color: rgb(255, 255, 255);">sudo nano /home/jonee/hadoop/etc/hadoop/core-site.xml</span>
<configuration> <property> <name>hadoop.tmp.dir</name> <value>/home/jonee/hadoop/tmp</value> <description>Abase for other temporary directories.</description> </property> <property> <name>fs.defaultFS</name> <value>hdfs://localhost:9000</value> </property></configuration>
编辑命令
sudo nano /home/jonee/hadoop/etc/hadoop/mapred-site.xml
</pre><pre name="code" class="html"> <configuration>
<property>
<name> mapred.job.tracker</name>
<value>localhost:9001</value>
</property>
</configuration>
编辑命令
</pre> sudo nano /home/jonee/hadoop/etc/hadoop/mapred-site.xml<pre name="code" class="html"> <configuration>sudo nano /home/jonee/hadoop/etc/hadoop/yarn-site.xml
<property>
<name>mapred.job.tracker</name>
<value>localhost:9001</value>
</property>
</configuration>
<configuration>sudo nano /home/jonee/hadoop/etc/hadoop/hdfs-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/home/jonee/hadoop/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/home/jonee/hadoop/dfs/data</value>
</property>
<property> //这个属性节点是为了防止后面eclopse存在拒绝读写设置的
<name>dfs.permissions</name>
<value>false</value>
</property>
</configuration>
读写权限
sudo chmod 777 /home/jonee/hadoop
卸载
sudo apt-get purge hadoop-2.6.0.tar.gz
sudo apt-get autoremove
sudo apt-get clean
和
dpkg -l |grep ^rc|awk '{print $2}' |sudo xargs dpkg -P