HBase java 增删改查操做

  1. package hbase;  
  2. import java.io.IOException;  
  3. import java.util.ArrayList;  
  4. import java.util.List;  
  5. import java.util.Map;  
  6. import org.apache.hadoop.conf.Configuration;  
  7. import org.apache.hadoop.hbase.HBaseConfiguration;  
  8. import org.apache.hadoop.hbase.HColumnDescriptor;  
  9. import org.apache.hadoop.hbase.HTableDescriptor;  
  10. import org.apache.hadoop.hbase.KeyValue;  
  11. import org.apache.hadoop.hbase.MasterNotRunningException;  
  12. import org.apache.hadoop.hbase.ZooKeeperConnectionException;  
  13. import org.apache.hadoop.hbase.client.Delete;  
  14. import org.apache.hadoop.hbase.client.Get;  
  15. import org.apache.hadoop.hbase.client.HBaseAdmin;  
  16. import org.apache.hadoop.hbase.client.HTable;  
  17. import org.apache.hadoop.hbase.client.HTablePool;  
  18. import org.apache.hadoop.hbase.client.Put;  
  19. import org.apache.hadoop.hbase.client.Result;  
  20. import org.apache.hadoop.hbase.client.ResultScanner;  
  21. import org.apache.hadoop.hbase.client.Scan;  
  22. import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;  
  23. import org.apache.hadoop.hbase.filter.Filter;  
  24. import org.apache.hadoop.hbase.filter.FilterList;  
  25. import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;  
  26. import org.apache.hadoop.hbase.util.Bytes;  
  27.   
  28. /**  
  29.  */  
  30. public class HbaseCrudTest {  
  31.   
  32.     public static Configuration configuration;  
  33.     public static final String tablename = "cn_timchen:tim.test123";  
  34.     static{  
  35.         System.setProperty("hadoop.home.dir", "D:\\hadoop");  
  36.         configuration = HBaseConfiguration.create();  
  37.         configuration.set("hbase.zookeeper.property.clientPort", "2181");  
  38.         configuration.set("hbase.zookeeper.quorum", "master");  
  39.     }  
  40.   
  41.     public static void main(String[] args){  
  42. //        createTable(tablename);  
  43. //        insertData(tablename);  
  44. //        getData(tablename,"rowkey2");  
  45. //        getScan(tablename, "family1");  
  46. //        addColumnFamily(tablename);  
  47. //        deleteRow(tablename, "rowkey3");  
  48.         getAllData(tablename);  
  49.   
  50.     }  
  51.   
  52.     /**  
  53.      * 建立HTable  
  54.      */  
  55.     @SuppressWarnings("resource")  
  56.     public static void createTable(String tableName){  
  57.         System.out.println("start create table ...");  
  58.         try {  
  59.             HBaseAdmin hBaseAdmin = new HBaseAdmin(configuration);  
  60.             if(hBaseAdmin.tableExists(tableName)){  
  61.                 hBaseAdmin.disableTable(tableName);  
  62.                 hBaseAdmin.deleteTable(tableName);  
  63.                 System.out.println(tableName + " is exist, delete...");  
  64.             }  
  65.             HTableDescriptor desc = new HTableDescriptor(tableName);  
  66.             desc.addFamily(new HColumnDescriptor("columnFamily1"));  
  67.             desc.addFamily(new HColumnDescriptor("columnFamily2"));  
  68.             desc.addFamily(new HColumnDescriptor("columnFamily3"));  
  69.             hBaseAdmin.createTable(desc);  
  70.         } catch (MasterNotRunningException e) {  
  71.             e.printStackTrace();  
  72.         } catch (ZooKeeperConnectionException e) {  
  73.             e.printStackTrace();  
  74.         } catch (IOException e) {  
  75.             e.printStackTrace();  
  76.         }  
  77.     }  
  78.   
  79.     /**  
  80.      * 插入数据  
  81.      */  
  82.     public static void insertData(String tableName){  
  83.         System.out.println("start insert data ...");  
  84.         HTablePool pool = new HTablePool(configuration, 1000);  
  85.         //一个Put表明一行数据,再new 一个Put表示第二行数据,每行一个惟一的RowKey,此处RowKey为put构造方法中传入的值  
  86.         Put put = new Put("rowkey2".getBytes());  
  87.   
  88.         put.add("columnFamily1".getBytes(), null, "avc".getBytes());//本行数据的第一列  
  89.         put.add("columnFamily2".getBytes(), null, "res".getBytes());//本行数据的第二列  
  90.   
  91.         put.add("columnFamily3".getBytes(), "column1".getBytes(), "dff".getBytes());  
  92.         put.add("columnFamily3".getBytes(), "column2".getBytes(), "ddf".getBytes());  
  93.   
  94.         try {  
  95.             pool.getTable(tableName).put(put);  
  96.         } catch (IOException e) {  
  97.             e.printStackTrace();  
  98.         }  
  99.         System.out.println("end insert data ...");  
  100.     }  
  101.   
  102.     /**  
  103.      * 建立好表后,添加列簇  
  104.      */  
  105.     public static void addColumnFamily(String tableName){  
  106.         System.err.println("start add family column ...");  
  107.         try {  
  108.             HTablePool pool = new HTablePool(configuration, 1000);  
  109.             HTable table = new HTable(configuration,tableName);  
  110.             HTableDescriptor desc = new HTableDescriptor(table.getTableDescriptor());  
  111.             desc.addFamily(new HColumnDescriptor(Bytes.toBytes("columnFamily4")));  
  112.             HBaseAdmin admin = new HBaseAdmin(configuration);  
  113.             admin.disableTable(tableName);  
  114.             admin.modifyTable(Bytes.toBytes(tableName), desc);  
  115.             admin.enableTable(tableName);  
  116.         } catch (Exception e1) {  
  117.         }  
  118.         System.err.println("end add family column!");  
  119.     }  
  120.   
  121.     /**  
  122.      * 删除表  
  123.      */  
  124.     public static void dropTable(String tableName){  
  125.         try {  
  126.             HBaseAdmin admin = new HBaseAdmin(configuration);  
  127.             admin.disableTable(tableName);  
  128.             admin.deleteTable(tableName);  
  129.         } catch (MasterNotRunningException e) {  
  130.             e.printStackTrace();  
  131.         } catch (ZooKeeperConnectionException e) {  
  132.             e.printStackTrace();  
  133.         } catch (IOException e) {  
  134.             e.printStackTrace();  
  135.         }  
  136.     }  
  137.   
  138.     /**  
  139.      * 删除某一行的数据  
  140.      */  
  141.     public static void deleteRow(String tableName, String rowkey){  
  142.         try {  
  143.             HTable table = new HTable(configuration, tableName);  
  144.             List deletes = new ArrayList();  
  145.             Delete d1 = new Delete(rowkey.getBytes());  
  146.             deletes.add(d1);  
  147.             table.delete(deletes);  
  148.             System.out.println("删除成功!");  
  149.         } catch (IOException e) {  
  150.             e.printStackTrace();  
  151.         }  
  152.     }  
  153.   
  154.     /**  
  155.      * 获取表中的全部数据  
  156.      */  
  157.     public static void getAllData(String tableName){  
  158.         HTablePool pool = new HTablePool(configuration, 1000);  
  159.         ResultScanner rs = null;  
  160.         try {  
  161.             rs = pool.getTable(tableName).getScanner(new Scan());  
  162.             for(Result r: rs){  
  163.                 System.out.println("得到rowKey:" + new String(r.getRow()));  
  164.                 for(KeyValue keyValue: r.raw()){  
  165.                     System.out.println("列:" + new String(keyValue.getFamily()) + ":" + new String(keyValue.getRow())  
  166.                             + "====value:" + new String(keyValue.getValue()));  
  167.                 }  
  168.             }  
  169.         } catch (IOException e) {  
  170.             e.printStackTrace();  
  171.         }finally{  
  172.             rs.close();  
  173.   
  174.         }  
  175.     }  
  176.   
  177.     /**  
  178.      * 根据rowKey的值获取该行的数据  
  179.      */  
  180.     public static void getDataByRowKey(String tableName, String rowKey){  
  181.         HTablePool pool = new HTablePool(configuration, 1000);  
  182.         Get get = new Get(rowKey.getBytes()); //根据rowkey查询  
  183.         try {  
  184.             Result result = pool.getTable(tableName).get(get);  
  185.             System.err.println("得到rowkey:" + new String(result.getRow()));  
  186.             for(KeyValue keyValue:result.raw()){  
  187.                 System.err.println("列:" + new String(keyValue.getFamily())  
  188.                         + "===值:" + new String(keyValue.getValue()));  
  189.             }  
  190.         } catch (IOException e) {  
  191.             e.printStackTrace();  
  192.         }  
  193.     }  
  194.   
  195.     /**  
  196.      * 根据指定columnFamily:column 来获取查询的数据  
  197.      */  
  198.     public static void getScan(String tableName, String columnFamily, String column){  
  199.         HTablePool pool = new HTablePool(configuration, 1000);  
  200.         Filter filter = new SingleColumnValueFilter(Bytes.toBytes(columnFamily),  
  201.                 null, CompareOp.EQUAL, Bytes.toBytes("ggg"));  
  202.         Scan scan = new Scan();  
  203.         scan.setFilter(filter);  
  204.         try {  
  205.             ResultScanner rs = pool.getTable(tableName).getScanner(scan);  
  206.             for(Result r:rs){  
  207.                 System.err.println("实际得到到的rowkey:" + new String(r.getRow()));  
  208.                 for(KeyValue keyValue:r.raw()){  
  209.                     System.err.println("列:" + new String(keyValue.getFamily())  
  210.                             + "====值:" + new String(keyValue.getValue()));  
  211.                 }  
  212.             }  
  213.         } catch (IOException e) {  
  214.             // TODO Auto-generated catch block  
  215.             e.printStackTrace();  
  216.         }  
  217.     }  
  218.   
  219.     /**  
  220.      * 多个限制条件查询columnFamily:column来获取查询结果  
  221.      */  
  222.     public static void QueryByCondition3(String tableName){  
  223.         HTablePool pool = new HTablePool(configuration, 1000);  
  224.         HTable table = (HTable) pool.getTable(tableName);  
  225.   
  226.         List<Filter> filters = new ArrayList<Filter>();  
  227.   
  228.         Filter filter1 = new SingleColumnValueFilter(Bytes.toBytes("column1"),  
  229.                     null, CompareOp.EQUAL, Bytes.toBytes("aaa"));  
  230.         filters.add(filter1);  
  231.   
  232.         Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes("column2"),  
  233.                     null, CompareOp.EQUAL, Bytes.toBytes("bbb"));  
  234.         filters.add(filter2);  
  235.   
  236.         Filter filter3 = new SingleColumnValueFilter(Bytes.toBytes("column3"),  
  237.                     null, CompareOp.EQUAL, Bytes.toBytes("ccc"));  
  238.         filters.add(filter3);  
  239.   
  240.         FilterList filterList1 = new FilterList(filters);  
  241.   
  242.         Scan scan = new Scan();  
  243.         scan.setFilter(filterList1);  
  244.         ResultScanner rs;  
  245.         try {  
  246.             rs = table.getScanner(scan);  
  247.             for(Result r: rs){  
  248.                 System.out.println("得到的rowkey:" + new String(r.getRow()));  
  249.                 for(KeyValue keyValue: r.raw()){  
  250.                     System.out.println("列:" + new String(keyValue.getFamily())  
  251.                             + "====值:" + new String(keyValue.getValue()));  
  252.                 }  
  253.             }  
  254.         } catch (IOException e) {  
  255.             // TODO Auto-generated catch block  
  256.             e.printStackTrace();  
  257.         }   
  258.     }   
  259. }  
  260. 注意:在上面的语句中,当执行到HTable table = (HTable)pool.getTable(tableName);   
  261. 会报类型强转的异常:   
  262. org.apache.Hadoop.Hbase.client.HTablePool$PooledHTable cannot be cast to org.apache.hadoop.hbase.client.HTable   
  263. 这是由于:pool.getTable(tableNmae)返回的是HTableInterface,该类型不能强制转换为HTable。   
  264. 解决办法:   
  265. 将   
  266. HTable table = (HTable)pool.getTable(tableName);   
  267. table.put(put);   
  268. 改成:   
  269. pool.getTable(tableName).put(put);  
  270. 注意:   
  271. HBase建立表时须要制定Column family才能建立成功。  
  272.   
  273. 另外:在上面运行过程当中可能出现不能调用hadoop状况,新须要添加如下两个步骤:   
  274. 1. 在本地hadoop/bin中覆盖hadoop2.6(x64)V0.2.zip内容便可。   
  275. 2. window/system32中添加hadoop2.6(x64)V0.2.zip中的hadoop.dll便可。 
相关文章
相关标签/搜索