运行HBase时常会遇到个错误,我就有这样的经历。
ERROR: org.apache.hadoop.hbase.MasterNotRunningException: Retried 7 times
检查日志:org.apache.hadoop.ipc.RPC$VersionMismatch: Protocol org.apache.hadoop.hdfs.protocol.ClientProtocol version mismatch. (client = 42, server = 41)
如果是这个错误,说明RPC协议不一致所造成的,解决方法:将hbase/lib目录下的hadoop-core的jar文件删除,将hadoop目录下的hadoop-0.20.2-core.jar拷贝到hbase/lib下面,然后重新启动hbase即可。第二种错误是:没有启动hadoop,先启用hadoop,再启用hbase。
在Eclipse开发中,需要加入hadoop所有的jar包以及HBase二个jar包(hbase,zooKooper)。
HBase基础可见帖子:http://www.cnblogs.com/liqizhou/archive/2012/05/14/2499112.html
建表,通过HBaseAdmin类中的create静态方法来创建表。
HTable类是操作表,例如,静态方法put可以插入数据,该类初始化时可以传递一个行键,静态方法getScanner()可以获得某一列上的所有数据,返回Result类,Result类中有个静态方法getFamilyMap()可以获得以列名为key,值为value,这刚好与hadoop中map结果是一样的。
- package test;
- import java.io.IOException;
- import java.util.Map;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.HColumnDescriptor;
- import org.apache.hadoop.hbase.HTableDescriptor;
- import org.apache.hadoop.hbase.client.HBaseAdmin;
- import org.apache.hadoop.hbase.client.HTable;
- import org.apache.hadoop.hbase.client.Put;
- import org.apache.hadoop.hbase.client.Result;
- public class Htable {
- /**
- * @param args
- */
- public static void main(String[] args) throws IOException {
- // TODO Auto-generated method stub
- Configuration hbaseConf = HBaseConfiguration.create();
- HBaseAdmin admin = new HBaseAdmin(hbaseConf);
- HTableDescriptor htableDescriptor = new HTableDescriptor("table"
- .getBytes()); //set the name of table
- htableDescriptor.addFamily(new HColumnDescriptor("fam1")); //set the name of column clusters
- admin.createTable(htableDescriptor); //create a table
- HTable table = new HTable(hbaseConf, "table"); //get instance of table.
- for (int i = 0; i < 3; i++) { //for is number of rows
- Put putRow = new Put(("row" + i).getBytes()); //the ith row
- putRow.add("fam1".getBytes(), "col1".getBytes(), "vaule1"
- .getBytes()); //set the name of column and value.
- putRow.add("fam1".getBytes(), "col2".getBytes(), "vaule2"
- .getBytes());
- putRow.add("fam1".getBytes(), "col3".getBytes(), "vaule3"
- .getBytes());
- table.put(putRow);
- }
- for(Result result: table.getScanner("fam1".getBytes())){//get data of column clusters
- for(Map.Entry<byte[], byte[]> entry : result.getFamilyMap("fam1".getBytes()).entrySet()){//get collection of result
- String column = new String(entry.getKey());
- String value = new String(entry.getValue());
- System.out.println(column+","+value);
- }
- }
- admin.disableTable("table".getBytes()); //disable the table
- admin.deleteTable("table".getBytes()); //drop the tbale
- }
- }
以