Hadoop学习笔记之HBase客户端程序开发

时间:2022-04-26 23:35:25

先说一下今天的学习目标:

今天我们将使用Java在Window7客户端上对HBase进行调用,进行创建表,插入行,读取行,读取所有表记录,删除表等基础操作。

下面开始今天的学习

1.开启hadoop,并测试是否可用

  • start-all.sh
    Hadoop学习笔记之HBase客户端程序开发
    Warning不用管,不影响使用。
  • jps
    Hadoop学习笔记之HBase客户端程序开发
  • hadoop fs -ls /
    Hadoop学习笔记之HBase客户端程序开发

2.开启HBase

  • start-hbase.sh
    Hadoop学习笔记之HBase客户端程序开发
  • hbase shell
    Hadoop学习笔记之HBase客户端程序开发
  • jps 检测Java进程
    Hadoop学习笔记之HBase客户端程序开发

3.开始Eclipse 连接HDFS

Hadoop学习笔记之HBase客户端程序开发

连接成功

4.创建项目Hadoop_5_HBase开始编程

我们将要创建HBaseHelper.java和HBaseInvoke.java两个类。

HBaseHelper:主要实现对HBase的连接和对数据库的构建、创建表、删除表、插入记录、删除记录、获取一条记录、获取所有记录等功能。

HBaseInvoke :主要实现通过对HBaseHelper的调用实现对HBase的基础操作的测试。

HBaseHelper.java 代码如下:

package com.hugy.hadoop;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;

public class HBaseHelper {
	private Configuration conf;// 配置器
	private HBaseAdmin admin;// HBase管理员

	/**
	 * 获取HBase配置器
	 * 
	 * @param conf
	 *            Hadoop配置器
	 * @throws IOException
	 */
	public HBaseHelper(Configuration conf) throws IOException {
		this.conf = HBaseConfiguration.create(conf);
		this.admin = new HBaseAdmin(this.conf);
		System.out.println("创建HBase配置成功!");
	}

	/**
	 * 获取HBase配置器
	 * 
	 * @throws IOException
	 */
	public HBaseHelper() throws IOException {
		Configuration cnf = new Configuration();
		this.conf = HBaseConfiguration.create(cnf);
		this.admin = new HBaseAdmin(this.conf);
		System.out.println("创建HBase配置成功!");
	}

	/**
	 * 创建HBase表
	 * 
	 * @param tableName
	 *            表名
	 * @param colFamilies
	 *            列簇
	 * @throws IOException
	 */
	public void createTable(String tableName, String colFamilies[])
			throws IOException {
		if (this.admin.tableExists(tableName)) {
			System.out.println("Table: " + tableName + " already exists !");
		} else {
			HTableDescriptor dsc = new HTableDescriptor(tableName);
			int len = colFamilies.length;
			for (int i = 0; i < len; i++) {
				HColumnDescriptor family = new HColumnDescriptor(colFamilies[i]);
				dsc.addFamily(family);
			}
			admin.createTable(dsc);
			System.out.println("创建表" + tableName + "成功");
		}

	}

	/**
	 * 删除表
	 * 
	 * @param tableName
	 *            表名
	 * @throws IOException
	 */
	public void deleteTable(String tableName) throws IOException {
		if (this.admin.tableExists(tableName)) {
			admin.disableTable(tableName);
			System.out.println("禁用表" + tableName + "!");
			admin.deleteTable(tableName);
			System.out.println("删除表成功!");
		} else {
			System.out.println(tableName + "表不存在 !");
		}
	}

	/**
	 * 插入记录
	 * 
	 * @param tableName
	 *            表名
	 * @param rowkey
	 *            键
	 * @param family
	 *            簇
	 * @param qualifier
	 * @param value
	 *            值
	 * @throws IOException
	 */
	public void insertRecord(String tableName, String rowkey, String family,
			String qualifier, String value) throws IOException {
		HTable table = new HTable(this.conf, tableName);
		Put put = new Put(rowkey.getBytes());
		put.add(family.getBytes(), qualifier.getBytes(), value.getBytes());
		table.put(put);
		System.out.println(tableName + "插入key:" + rowkey + "行成功!");
	}

	/**
	 * 删除一行记录
	 * 
	 * @param tableName
	 *            表名
	 * @param rowkey
	 *            主键
	 * @throws IOException
	 */
	public void deleteRecord(String tableName, String rowkey)
			throws IOException {
		HTable table = new HTable(this.conf, tableName);
		Delete del = new Delete(rowkey.getBytes());
		table.delete(del);
		System.out.println(tableName + "删除行" + rowkey + "成功!");
	}

	/**
	 * 获取一条记录
	 * 
	 * @param tableName
	 *            表名
	 * @param rowkey
	 *            主键
	 * @return
	 * @throws IOException
	 */
	public Result getOneRecord(String tableName, String rowkey)
			throws IOException {
		HTable table = new HTable(this.conf, tableName);
		Get get = new Get(rowkey.getBytes());
		Result rs = table.get(get);
		return rs;
	}

	/**
	 * 获取所有数据
	 * @param tableName 表名
	 * @return
	 * @throws IOException
	 */
	public List<Result> getAllRecord(String tableName) throws IOException {
		HTable table = new HTable(this.conf, tableName);
		Scan scan = new Scan();
		ResultScanner scanner = table.getScanner(scan);
		List<Result> list = new ArrayList<Result>();
		for (Result r : scanner) {
			list.add(r);
		}
		scanner.close();
		return list;
	}
}

HBaseInvoke.java代码如下:

package com.hugy.hadoop;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Result;

public class HBaseInvoke {

	/**
	 * @param args
	 * @throws IOException
	 */
	public static void main(String[] args) throws IOException {
		System.out.println("HBase 操作开始...");
		// 1.初始化HBaseOperation
		Configuration conf = new Configuration();
		// 与hbase/conf/hbase-site.xml中hbase.zookeeper.quorum配置的值相同
		conf.set("hbase.zookeeper.quorum", "hadoop");
		// 与hbase/conf/hbase-site.xml中hbase.zookeeper.property.clientPort配置的值相同
		conf.set("hbase.zookeeper.property.clientPort", "2181");
		HBaseHelper hbase = new HBaseHelper(conf);
		// 2.测试相应操作
		// 2.1创建表
		String tableName = "blog";
		hbase.deleteTable(tableName);
		String colFamilies[] = { "article", "author" };
		hbase.createTable(tableName, colFamilies);
		// 2.2插入一条记录
		hbase.insertRecord(tableName, "1", "article", "title", "Hadoop学习资料");
		hbase.insertRecord(tableName, "1", "author", "name", "hugengyong");
		hbase.insertRecord(tableName, "1", "article", "content", "Hadoop学习,HBase学习-http://blog.csdn.net/hugengyong");

		// 2.3查询一条记录
		Result rs1 = hbase.getOneRecord(tableName, "1");
		for (KeyValue kv : rs1.raw()) {
			System.out.println(new String(kv.getRow()));
			System.out.println(new String(kv.getFamily()));
			System.out.println(new String(kv.getQualifier()));
			System.out.println(new String(kv.getValue()));
		}
		// 2.4查询整个Table
		List<Result> list = null;
		list = hbase.getAllRecord(tableName);
		Iterator<Result> it = list.iterator();
		while (it.hasNext()) {
			Result rs2 = it.next();
			for (KeyValue kv : rs2.raw()) {
				System.out.print("row key is : " + new String(kv.getRow()));
				System.out.print("family is  : " + new String(kv.getFamily()));
				System.out.print("qualifier is:"
						+ new String(kv.getQualifier()));
				System.out.print("timestamp is:" + kv.getTimestamp());
				System.out.println("Value  is  : " + new String(kv.getValue()));
			}

		}

	}

}

执行结果如下:

HBase 操作开始...
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:zookeeper.version=3.4.5-1392090, built on 09/30/2012 17:52 GMT
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:host.name=hugengyong-PC
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:java.version=1.6.0_13
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:java.vendor=Sun Microsystems Inc.
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:java.home=D:\JavaJDK\jdk\jre
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:java.class.path=D:\AndroidWork\Hadoop_5_HBase\bin;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\activation-1.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\asm-3.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\avro-1.5.3.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\avro-ipc-1.5.3.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-beanutils-1.7.0.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-beanutils-core-1.8.0.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-cli-1.2.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-codec-1.4.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-collections-3.2.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-configuration-1.6.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-digester-1.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-el-1.0.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-httpclient-3.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-io-2.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-lang-2.5.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-logging-1.1.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-math-2.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\commons-net-1.4.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\core-3.1.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\guava-11.0.2.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\hadoop-core-1.0.4.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\hamcrest-core-1.3.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\high-scale-lib-1.1.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\httpclient-4.1.2.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\httpcore-4.1.3.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jackson-core-asl-1.8.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jackson-jaxrs-1.8.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jackson-mapper-asl-1.8.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jackson-xc-1.8.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jamon-runtime-2.3.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jasper-compiler-5.5.23.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jasper-runtime-5.5.23.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jaxb-api-2.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jaxb-impl-2.2.3-1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jersey-core-1.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jersey-json-1.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jersey-server-1.8.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jettison-1.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jetty-6.1.26.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jetty-util-6.1.26.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jruby-complete-1.6.5.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jsp-2.1-6.1.14.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jsp-api-2.1-6.1.14.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\jsr305-1.3.9.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\junit-4.11.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\libthrift-0.8.0.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\log4j-1.2.16.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\metrics-core-2.1.2.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\netty-3.2.4.Final.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\protobuf-java-2.4.0a.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\servlet-api-2.5-6.1.14.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\slf4j-api-1.4.3.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\slf4j-log4j12-1.4.3.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\snappy-java-1.0.3.2.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\stax-api-1.0.1.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\velocity-1.7.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\xmlenc-0.52.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\lib\zookeeper-3.4.5.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\hbase-0.94.20.jar;F:\Hadoop\HadoopTools\hbase-0.94.20\hbase-0.94.20\hbase-0.94.20-tests.jar
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:java.library.path=D:\JavaJDK\jdk\bin;.;C:\Windows\Sun\Java\bin;C:\Windows\system32;C:\Windows;D:/JavaJDK/jdk/bin/../jre/bin/client;D:/JavaJDK/jdk/bin/../jre/bin;D:/JavaJDK/jdk/bin/../jre/lib/i386;D:\JavaJDK\jdk\bin;D:\JavaJDK\jdk\jre\bin;C:\Program Files\Common Files\NetSarang;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;D:\JavaJDK\eclipse-jee-juno-SR1-win32\eclipse;
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:java.io.tmpdir=C:\Users\ADMINI~1\AppData\Local\Temp\
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:java.compiler=<NA>
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:os.name=Windows Vista
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:os.arch=x86
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:os.version=6.1
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:user.name=Administrator
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:user.home=C:\Users\Administrator
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Client environment:user.dir=D:\AndroidWork\Hadoop_5_HBase
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop:2181 sessionTimeout=180000 watcher=hconnection
14/07/26 19:44:54 INFO zookeeper.ClientCnxn: Opening socket connection to server hadoop/192.168.1.107:2181. Will not attempt to authenticate using SASL (无法定位登录配置)
14/07/26 19:44:54 INFO zookeeper.RecoverableZooKeeper: The identifier of this process is 6120@hugengyong-PC
14/07/26 19:44:54 INFO zookeeper.ClientCnxn: Socket connection established to hadoop/192.168.1.107:2181, initiating session
14/07/26 19:44:54 INFO zookeeper.ClientCnxn: Session establishment complete on server hadoop/192.168.1.107:2181, sessionid = 0x1477414006d000e, negotiated timeout = 180000
创建HBase配置成功!
14/07/26 19:44:54 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop:2181 sessionTimeout=180000 watcher=catalogtracker-on-org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation@17b0998
14/07/26 19:44:54 INFO zookeeper.ClientCnxn: Opening socket connection to server hadoop/192.168.1.107:2181. Will not attempt to authenticate using SASL (无法定位登录配置)
14/07/26 19:44:54 INFO zookeeper.RecoverableZooKeeper: The identifier of this process is 6120@hugengyong-PC
14/07/26 19:44:54 INFO zookeeper.ClientCnxn: Socket connection established to hadoop/192.168.1.107:2181, initiating session
14/07/26 19:44:54 INFO zookeeper.ClientCnxn: Session establishment complete on server hadoop/192.168.1.107:2181, sessionid = 0x1477414006d000f, negotiated timeout = 180000
14/07/26 19:44:55 INFO zookeeper.ZooKeeper: Session: 0x1477414006d000f closed
14/07/26 19:44:55 INFO zookeeper.ClientCnxn: EventThread shut down
14/07/26 19:44:55 INFO client.HBaseAdmin: Started disable of blog
14/07/26 19:44:55 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop:2181 sessionTimeout=180000 watcher=catalogtracker-on-org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation@17b0998
14/07/26 19:44:55 INFO zookeeper.RecoverableZooKeeper: The identifier of this process is 6120@hugengyong-PC
14/07/26 19:44:55 INFO zookeeper.ClientCnxn: Opening socket connection to server hadoop/192.168.1.107:2181. Will not attempt to authenticate using SASL (无法定位登录配置)
14/07/26 19:44:55 INFO zookeeper.ClientCnxn: Socket connection established to hadoop/192.168.1.107:2181, initiating session
14/07/26 19:44:55 INFO zookeeper.ClientCnxn: Session establishment complete on server hadoop/192.168.1.107:2181, sessionid = 0x1477414006d0010, negotiated timeout = 180000
14/07/26 19:44:55 INFO zookeeper.ZooKeeper: Session: 0x1477414006d0010 closed
14/07/26 19:44:55 INFO zookeeper.ClientCnxn: EventThread shut down
14/07/26 19:44:56 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop:2181 sessionTimeout=180000 watcher=catalogtracker-on-org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation@17b0998
14/07/26 19:44:56 INFO zookeeper.RecoverableZooKeeper: The identifier of this process is 6120@hugengyong-PC
14/07/26 19:44:56 INFO zookeeper.ClientCnxn: Opening socket connection to server hadoop/192.168.1.107:2181. Will not attempt to authenticate using SASL (无法定位登录配置)
14/07/26 19:44:56 INFO zookeeper.ClientCnxn: Socket connection established to hadoop/192.168.1.107:2181, initiating session
14/07/26 19:44:56 INFO zookeeper.ClientCnxn: Session establishment complete on server hadoop/192.168.1.107:2181, sessionid = 0x1477414006d0011, negotiated timeout = 180000
14/07/26 19:44:56 INFO zookeeper.ClientCnxn: EventThread shut down
14/07/26 19:44:56 INFO zookeeper.ZooKeeper: Session: 0x1477414006d0011 closed
14/07/26 19:44:57 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop:2181 sessionTimeout=180000 watcher=catalogtracker-on-org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation@17b0998
14/07/26 19:44:57 INFO zookeeper.RecoverableZooKeeper: The identifier of this process is 6120@hugengyong-PC
14/07/26 19:44:57 INFO zookeeper.ClientCnxn: Opening socket connection to server hadoop/192.168.1.107:2181. Will not attempt to authenticate using SASL (无法定位登录配置)
14/07/26 19:44:57 INFO zookeeper.ClientCnxn: Socket connection established to hadoop/192.168.1.107:2181, initiating session
14/07/26 19:44:57 INFO zookeeper.ClientCnxn: Session establishment complete on server hadoop/192.168.1.107:2181, sessionid = 0x1477414006d0012, negotiated timeout = 180000
14/07/26 19:44:57 INFO zookeeper.ZooKeeper: Session: 0x1477414006d0012 closed
14/07/26 19:44:57 INFO zookeeper.ClientCnxn: EventThread shut down
禁用表blog!
14/07/26 19:44:57 INFO client.HBaseAdmin: Disabled blog
14/07/26 19:44:58 INFO client.HBaseAdmin: Deleted blog
删除表成功!
14/07/26 19:44:58 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop:2181 sessionTimeout=180000 watcher=catalogtracker-on-org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation@17b0998
14/07/26 19:44:58 INFO zookeeper.ClientCnxn: Opening socket connection to server hadoop/192.168.1.107:2181. Will not attempt to authenticate using SASL (无法定位登录配置)
14/07/26 19:44:58 INFO zookeeper.RecoverableZooKeeper: The identifier of this process is 6120@hugengyong-PC
14/07/26 19:44:58 INFO zookeeper.ClientCnxn: Socket connection established to hadoop/192.168.1.107:2181, initiating session
14/07/26 19:44:58 INFO zookeeper.ClientCnxn: Session establishment complete on server hadoop/192.168.1.107:2181, sessionid = 0x1477414006d0013, negotiated timeout = 180000
14/07/26 19:44:58 INFO zookeeper.ZooKeeper: Session: 0x1477414006d0013 closed
14/07/26 19:44:58 INFO zookeeper.ClientCnxn: EventThread shut down
14/07/26 19:45:00 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=hadoop:2181 sessionTimeout=180000 watcher=catalogtracker-on-org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation@17b0998
14/07/26 19:45:00 INFO zookeeper.ClientCnxn: Opening socket connection to server hadoop/192.168.1.107:2181. Will not attempt to authenticate using SASL (无法定位登录配置)
14/07/26 19:45:00 INFO zookeeper.RecoverableZooKeeper: The identifier of this process is 6120@hugengyong-PC
14/07/26 19:45:00 INFO zookeeper.ClientCnxn: Socket connection established to hadoop/192.168.1.107:2181, initiating session
14/07/26 19:45:00 INFO zookeeper.ClientCnxn: Session establishment complete on server hadoop/192.168.1.107:2181, sessionid = 0x1477414006d0014, negotiated timeout = 180000
创建表blog成功
14/07/26 19:45:00 INFO zookeeper.ZooKeeper: Session: 0x1477414006d0014 closed
14/07/26 19:45:00 INFO zookeeper.ClientCnxn: EventThread shut down
blog插入key:1行成功!
blog插入key:1行成功!
blog插入key:1行成功!
1
article
content
Hadoop学习,HBase学习-http://blog.csdn.net/hugengyong
1
article
title
Hadoop学习资料
1
author
name
hugengyong
row key is : 1family is  : articlequalifier is:contenttimestamp is:1406403867951Value  is  : Hadoop学习,HBase学习-http://blog.csdn.net/hugengyong
row key is : 1family is  : articlequalifier is:titletimestamp is:1406403867924Value  is  : Hadoop学习资料
row key is : 1family is  : authorqualifier is:nametimestamp is:1406403867940Value  is  : hugengyong

HBase数据库操作代码编写完成测试成功。

5.查看HBase目录结构如下图所示

Hadoop学习笔记之HBase客户端程序开发


今天的学习笔记就记录到这里。

下一节我们来分析一下HBase的用途,什么样的需求适合使用HBase数据库。

以及HBase的真实应用场景。