HBase java 增删改查操做
- package hbase;
- import java.io.IOException;
- import java.util.ArrayList;
- import java.util.List;
- import java.util.Map;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.hbase.HBaseConfiguration;
- import org.apache.hadoop.hbase.HColumnDescriptor;
- import org.apache.hadoop.hbase.HTableDescriptor;
- import org.apache.hadoop.hbase.KeyValue;
- import org.apache.hadoop.hbase.MasterNotRunningException;
- import org.apache.hadoop.hbase.ZooKeeperConnectionException;
- import org.apache.hadoop.hbase.client.Delete;
- import org.apache.hadoop.hbase.client.Get;
- import org.apache.hadoop.hbase.client.HBaseAdmin;
- import org.apache.hadoop.hbase.client.HTable;
- import org.apache.hadoop.hbase.client.HTablePool;
- import org.apache.hadoop.hbase.client.Put;
- import org.apache.hadoop.hbase.client.Result;
- import org.apache.hadoop.hbase.client.ResultScanner;
- import org.apache.hadoop.hbase.client.Scan;
- import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
- import org.apache.hadoop.hbase.filter.Filter;
- import org.apache.hadoop.hbase.filter.FilterList;
- import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
- import org.apache.hadoop.hbase.util.Bytes;
-
- /**
- */
- public class HbaseCrudTest {
-
- public static Configuration configuration;
- public static final String tablename = "cn_timchen:tim.test123";
- static{
- System.setProperty("hadoop.home.dir", "D:\\hadoop");
- configuration = HBaseConfiguration.create();
- configuration.set("hbase.zookeeper.property.clientPort", "2181");
- configuration.set("hbase.zookeeper.quorum", "master");
- }
-
- public static void main(String[] args){
- // createTable(tablename);
- // insertData(tablename);
- // getData(tablename,"rowkey2");
- // getScan(tablename, "family1");
- // addColumnFamily(tablename);
- // deleteRow(tablename, "rowkey3");
- getAllData(tablename);
-
- }
-
- /**
- * 建立HTable
- */
- @SuppressWarnings("resource")
- public static void createTable(String tableName){
- System.out.println("start create table ...");
- try {
- HBaseAdmin hBaseAdmin = new HBaseAdmin(configuration);
- if(hBaseAdmin.tableExists(tableName)){
- hBaseAdmin.disableTable(tableName);
- hBaseAdmin.deleteTable(tableName);
- System.out.println(tableName + " is exist, delete...");
- }
- HTableDescriptor desc = new HTableDescriptor(tableName);
- desc.addFamily(new HColumnDescriptor("columnFamily1"));
- desc.addFamily(new HColumnDescriptor("columnFamily2"));
- desc.addFamily(new HColumnDescriptor("columnFamily3"));
- hBaseAdmin.createTable(desc);
- } catch (MasterNotRunningException e) {
- e.printStackTrace();
- } catch (ZooKeeperConnectionException e) {
- e.printStackTrace();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * 插入数据
- */
- public static void insertData(String tableName){
- System.out.println("start insert data ...");
- HTablePool pool = new HTablePool(configuration, 1000);
- //一个Put表明一行数据,再new 一个Put表示第二行数据,每行一个惟一的RowKey,此处RowKey为put构造方法中传入的值
- Put put = new Put("rowkey2".getBytes());
-
- put.add("columnFamily1".getBytes(), null, "avc".getBytes());//本行数据的第一列
- put.add("columnFamily2".getBytes(), null, "res".getBytes());//本行数据的第二列
-
- put.add("columnFamily3".getBytes(), "column1".getBytes(), "dff".getBytes());
- put.add("columnFamily3".getBytes(), "column2".getBytes(), "ddf".getBytes());
-
- try {
- pool.getTable(tableName).put(put);
- } catch (IOException e) {
- e.printStackTrace();
- }
- System.out.println("end insert data ...");
- }
-
- /**
- * 建立好表后,添加列簇
- */
- public static void addColumnFamily(String tableName){
- System.err.println("start add family column ...");
- try {
- HTablePool pool = new HTablePool(configuration, 1000);
- HTable table = new HTable(configuration,tableName);
- HTableDescriptor desc = new HTableDescriptor(table.getTableDescriptor());
- desc.addFamily(new HColumnDescriptor(Bytes.toBytes("columnFamily4")));
- HBaseAdmin admin = new HBaseAdmin(configuration);
- admin.disableTable(tableName);
- admin.modifyTable(Bytes.toBytes(tableName), desc);
- admin.enableTable(tableName);
- } catch (Exception e1) {
- }
- System.err.println("end add family column!");
- }
-
- /**
- * 删除表
- */
- public static void dropTable(String tableName){
- try {
- HBaseAdmin admin = new HBaseAdmin(configuration);
- admin.disableTable(tableName);
- admin.deleteTable(tableName);
- } catch (MasterNotRunningException e) {
- e.printStackTrace();
- } catch (ZooKeeperConnectionException e) {
- e.printStackTrace();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * 删除某一行的数据
- */
- public static void deleteRow(String tableName, String rowkey){
- try {
- HTable table = new HTable(configuration, tableName);
- List deletes = new ArrayList();
- Delete d1 = new Delete(rowkey.getBytes());
- deletes.add(d1);
- table.delete(deletes);
- System.out.println("删除成功!");
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * 获取表中的全部数据
- */
- public static void getAllData(String tableName){
- HTablePool pool = new HTablePool(configuration, 1000);
- ResultScanner rs = null;
- try {
- rs = pool.getTable(tableName).getScanner(new Scan());
- for(Result r: rs){
- System.out.println("得到rowKey:" + new String(r.getRow()));
- for(KeyValue keyValue: r.raw()){
- System.out.println("列:" + new String(keyValue.getFamily()) + ":" + new String(keyValue.getRow())
- + "====value:" + new String(keyValue.getValue()));
- }
- }
- } catch (IOException e) {
- e.printStackTrace();
- }finally{
- rs.close();
-
- }
- }
-
- /**
- * 根据rowKey的值获取该行的数据
- */
- public static void getDataByRowKey(String tableName, String rowKey){
- HTablePool pool = new HTablePool(configuration, 1000);
- Get get = new Get(rowKey.getBytes()); //根据rowkey查询
- try {
- Result result = pool.getTable(tableName).get(get);
- System.err.println("得到rowkey:" + new String(result.getRow()));
- for(KeyValue keyValue:result.raw()){
- System.err.println("列:" + new String(keyValue.getFamily())
- + "===值:" + new String(keyValue.getValue()));
- }
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * 根据指定columnFamily:column 来获取查询的数据
- */
- public static void getScan(String tableName, String columnFamily, String column){
- HTablePool pool = new HTablePool(configuration, 1000);
- Filter filter = new SingleColumnValueFilter(Bytes.toBytes(columnFamily),
- null, CompareOp.EQUAL, Bytes.toBytes("ggg"));
- Scan scan = new Scan();
- scan.setFilter(filter);
- try {
- ResultScanner rs = pool.getTable(tableName).getScanner(scan);
- for(Result r:rs){
- System.err.println("实际得到到的rowkey:" + new String(r.getRow()));
- for(KeyValue keyValue:r.raw()){
- System.err.println("列:" + new String(keyValue.getFamily())
- + "====值:" + new String(keyValue.getValue()));
- }
- }
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
-
- /**
- * 多个限制条件查询columnFamily:column来获取查询结果
- */
- public static void QueryByCondition3(String tableName){
- HTablePool pool = new HTablePool(configuration, 1000);
- HTable table = (HTable) pool.getTable(tableName);
-
- List<Filter> filters = new ArrayList<Filter>();
-
- Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes("column1"),
- null, CompareOp.EQUAL, Bytes.toBytes("aaa"));
- filters.add(filter1);
-
- Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes("column2"),
- null, CompareOp.EQUAL, Bytes.toBytes("bbb"));
- filters.add(filter2);
-
- Filter filter3 = new SingleColumnValueFilter(Bytes.toBytes("column3"),
- null, CompareOp.EQUAL, Bytes.toBytes("ccc"));
- filters.add(filter3);
-
- FilterList filterList1 = new FilterList(filters);
-
- Scan scan = new Scan();
- scan.setFilter(filterList1);
- ResultScanner rs;
- try {
- rs = table.getScanner(scan);
- for(Result r: rs){
- System.out.println("得到的rowkey:" + new String(r.getRow()));
- for(KeyValue keyValue: r.raw()){
- System.out.println("列:" + new String(keyValue.getFamily())
- + "====值:" + new String(keyValue.getValue()));
- }
- }
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- }
- 注意:在上面的语句中,当执行到HTable table = (HTable)pool.getTable(tableName);
- 会报类型强转的异常:
- org.apache.Hadoop.Hbase.client.HTablePool$PooledHTable cannot be cast to org.apache.hadoop.hbase.client.HTable
- 这是由于:pool.getTable(tableNmae)返回的是HTableInterface,该类型不能强制转换为HTable。
- 解决办法:
- 将
- HTable table = (HTable)pool.getTable(tableName);
- table.put(put);
- 改成:
- pool.getTable(tableName).put(put);
- 注意:
- HBase建立表时须要制定Column family才能建立成功。
-
- 另外:在上面运行过程当中可能出现不能调用hadoop状况,新须要添加如下两个步骤:
- 1. 在本地hadoop/bin中覆盖hadoop2.6(x64)V0.2.zip内容便可。
- 2. window/system32中添加hadoop2.6(x64)V0.2.zip中的hadoop.dll便可。
欢迎关注本站公众号,获取更多信息