栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 前沿技术 > 大数据 > 大数据系统

Hbase JavaApi 批量获取数据(scan)和插入代码(put) 代码

Hbase JavaApi 批量获取数据(scan)和插入代码(put) 代码

import com.alibaba.fastjson.JSONObject;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HbaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;

import java.io.*;
import java.util.List;

    

public class test2 {
    public static void main(String[] args) {

        //hbase 连接


        try {
            Configuration Hbase_CONF;
            Hbase_CONF = HbaseConfiguration.create();
            Hbase_CONF.set("hbase.zookeeper.property.clientPort", "2181");
            Hbase_CONF.set("hbase.zookeeper.quorum", "192.168.xx.xx");
            Hbase_CONF.set("hbase.master", "192.168.xx.xx:60000");
            Hbase_CONF.set("zookeeper.znode.parent", "/hbase");
            Hbase_CONF.setInt("hbase.hconnection.threads.max", 100);
            Hbase_CONF.setInt("hbase.hconnection.threads.core", 50);
            Hbase_CONF.setLong("hbase.hconnection.threads.keepalivetime", 1000);

            ConnectionFactory.createConnection(Hbase_CONF);
            Connection hbaseConnection = ConnectionFactory.createConnection(Hbase_CONF);
            Table table = hbaseConnection.getTable(TableName.valueOf("0_library_token"));
            System.out.println("Table Name: " + table.getName());
            Scan          scan          = new Scan();


            scan.addColumn(Bytes.toBytes("F"), Bytes.toBytes("F"));
            scan.setMaxVersions(1111111111);//不设置默认,hbase只取row中的1个cell
            ResultScanner resultScanner = table.getScanner(scan);
            for (Result result : resultScanner) {
                List cells = result.listCells();//hbase的一个row对应的所有cell
                byte[] row = result.getRow();
                String rowKey = Bytes.toString(row);//rowkey

                System.out.println("rowKey为"+rowKey);
                    for (Cell cell : cells) {//value
                        String jsonstr = Bytes.toString(CellUtil.clonevalue(cell));
                        //单个value中的cell
                        JSonObject jsonObject = JSONObject.parseObject(jsonstr);
                        System.out.println("path为" + jsonObject.toString );
                    }
                
                System.out.println("************************");

            }


        } catch (IOException e) {
            e.printStackTrace();
        }


    }

插入:

        Configuration Hbase_CONF;
        Hbase_CONF = HbaseConfiguration.create();
        Hbase_CONF.set("hbase.zookeeper.property.clientPort", "2181");
        Hbase_CONF.set("hbase.zookeeper.quorum", "192.168.xx.xx");
        Hbase_CONF.set("hbase.master", "192.168.xx.xx:60000");
        Hbase_CONF.set("zookeeper.znode.parent", "/hbase");
        Hbase_CONF.setInt("hbase.hconnection.threads.max", 100);
        Hbase_CONF.setInt("hbase.hconnection.threads.core", 50);
        Hbase_CONF.setLong("hbase.hconnection.threads.keepalivetime", 1000);        
try {
            ConnectionFactory.createConnection(Hbase_CONF);
            Connection hbaseConnection = ConnectionFactory.createConnection(Hbase_CONF);
            Table table = hbaseConnection.getTable(TableName.valueOf("0_file_pv"));

            


            Put put = new Put("rk0001".getBytes()); //指定rowkey
            put.addColumn("F".getBytes(), "F".getBytes(), "shandong".getBytes());
            
            //插入数据
            table.put(put);

            table.close();
            hbaseConnection.close();
            System.out.println("结束***");
        } catch (IOException e) {
            e.printStackTrace();
        }

包:

 
            org.apache.hadoop
            hadoop-common
            ${hadoop.version}
        
        
            org.apache.hadoop
            hadoop-hdfs
            ${hadoop.version}
        

        
            org.apache.hadoop
            hadoop-client
            ${hadoop.version}
        
        
        
            org.apache.hadoop
            hadoop-mapreduce-client-core
            ${hadoop.version}
        
        
            org.apache.hbase
            hbase-client
            ${hbase.version}
        
        
            org.apache.hbase
            hbase-server
            ${hbase.version}
        

properties

    
        UTF-8
        2.6.5
        1.2.5
    

注意hosts文件中的地址与连接地址保持一致

转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/629770.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号