Windows7+Eclipse环境下Hbase Java客户端的开发

标签: windows7 eclipse 环境 | 发表时间:2016-04-16 10:29 | 作者:
出处:http://m635674608.iteye.com
  1. 构建Hbase集群,请参考: Centos 下Hbase0.98.10-hadoop2 集群的配置
  2. 在Eclipse中创建Maven的工程
  3. 将集群的hbase-site.xml文件放到工程的classes目录下

  4. 配置操作系统的

    C:\windows\system32\drivers\etc文件,将Hbase集群的IP以及域名配置到该文件中

    192.168.40.108   hadoop108
    192.168.40.148   hadoop148 
    192.168.40.104   hadoop104 
    192.168.40.107   hadoop107 
    192.168.40.105   hadoop105
  5. 编写Maven的pom.xml文件,依赖内容如下

    <dependencies>
    
    <dependency>
    <groupId>org.apache.avro</groupId>
    <artifactId>avro</artifactId>
    <version>1.7.7</version>
    </dependency>
    
    <dependency>
    <groupId>org.apache.avro</groupId>
    <artifactId>avro-tools</artifactId>
    <version>1.7.7</version>
    </dependency>
    
    <dependency>
    <groupId>org.apache.avro</groupId>
    <artifactId>avro-maven-plugin</artifactId>
    <version>1.7.7</version>
    </dependency>
    <dependency>
    <groupId>org.apache.avro</groupId>
    <artifactId>avro-compiler</artifactId>
    <version>1.7.7</version>
    </dependency>
    
    <dependency>
    <groupId>org.apache.hbase</groupId>
    <artifactId>hbase-client</artifactId>
    <version>0.98.8-hadoop1</version>
    </dependency>
    
    <dependency>
    <groupId>org.apache.hbase</groupId>
    <artifactId>hbase</artifactId>
    <version>0.90.2</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-core</artifactId>
    <version>1.2.1</version>
    </dependency>
    
    <dependency>
    <groupId>junit</groupId>
    <artifactId>junit</artifactId>
    <version>3.8.1</version>
    <scope>test</scope>
    </dependency>
    </dependencies>
  6. 编辑Java源码

    package com.eric.hbase;
    
    import java.io.IOException;
    import java.util.ArrayList;
    import java.util.List;
    
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.hbase.HBaseConfiguration;
    import org.apache.hadoop.hbase.HColumnDescriptor;
    import org.apache.hadoop.hbase.HTableDescriptor;
    import org.apache.hadoop.hbase.KeyValue;
    import org.apache.hadoop.hbase.MasterNotRunningException;
    import org.apache.hadoop.hbase.ZooKeeperConnectionException;
    import org.apache.hadoop.hbase.client.Delete;
    import org.apache.hadoop.hbase.client.Get;
    import org.apache.hadoop.hbase.client.HBaseAdmin;
    import org.apache.hadoop.hbase.client.HTable;
    import org.apache.hadoop.hbase.client.Put;
    import org.apache.hadoop.hbase.client.Result;
    import org.apache.hadoop.hbase.client.ResultScanner;
    import org.apache.hadoop.hbase.client.Scan;
    import org.apache.hadoop.hbase.util.Bytes;
    
    public class BaseOperation {
    
    private static final String TABLE_NAME = "demo_table";
    
    public static Configuration conf = null;
    public HTable table = null;
    public HBaseAdmin admin = null;
    
    static {
    conf = HBaseConfiguration.create();
    System.out.println(conf.get("hbase.zookeeper.quorum"));
    }
    
    /**
     * 创建一张表
     */
    public static void creatTable(String tableName, String[] familys)
    throws Exception {
    HBaseAdmin admin = new HBaseAdmin(conf);
    if (admin.tableExists(tableName)) {
    System.out.println("table already exists!");
    } else {
    HTableDescriptor tableDesc = new HTableDescriptor(tableName);
    for (int i = 0; i < familys.length; i++) {
    tableDesc.addFamily(new HColumnDescriptor(familys[i]));
    }
    admin.createTable(tableDesc);
    System.out.println("create table " + tableName + " ok.");
    }
    }
    
    /**
     * 删除表
     */
    public static void deleteTable(String tableName) throws Exception {
    try {
    HBaseAdmin admin = new HBaseAdmin(conf);
    admin.disableTable(tableName);
    admin.deleteTable(tableName);
    System.out.println("delete table " + tableName + " ok.");
    } catch (MasterNotRunningException e) {
    e.printStackTrace();
    } catch (ZooKeeperConnectionException e) {
    e.printStackTrace();
    }
    }
    
    /**
     * 插入一行记录
     */
    public static void addRecord(String tableName, String rowKey,
    String family, String qualifier, String value) throws Exception {
    try {
    HTable table = new HTable(conf, tableName);
    Put put = new Put(Bytes.toBytes(rowKey));
    put.add(Bytes.toBytes(family), Bytes.toBytes(qualifier),
    Bytes.toBytes(value));
    table.put(put);
    System.out.println("insert recored " + rowKey + " to table "
    + tableName + " ok.");
    } catch (IOException e) {
    e.printStackTrace();
    }
    }
    
    /**
     * 删除一行记录
     */
    public static void delRecord(String tableName, String rowKey)
    throws IOException {
    HTable table = new HTable(conf, tableName);
    List list = new ArrayList();
    Delete del = new Delete(rowKey.getBytes());
    list.add(del);
    table.delete(list);
    System.out.println("del recored " + rowKey + " ok.");
    }
    
    /**
     * 查找一行记录
     */
    public static void getOneRecord(String tableName, String rowKey)
    throws IOException {
    HTable table = new HTable(conf, tableName);
    Get get = new Get(rowKey.getBytes());
    Result rs = table.get(get);
    for (KeyValue kv : rs.raw()) {
    System.out.print(new String(kv.getRow()) + " ");
    System.out.print(new String(kv.getFamily()) + ":");
    System.out.print(new String(kv.getQualifier()) + " ");
    System.out.print(kv.getTimestamp() + " ");
    System.out.println(new String(kv.getValue()));
    }
    }
    
    /**
     * 显示所有数据
     */
    public static void getAllRecord(String tableName) {
    try {
    HTable table = new HTable(conf, tableName);
    Scan s = new Scan();
    ResultScanner ss = table.getScanner(s);
    for (Result r : ss) {
    for (KeyValue kv : r.raw()) {
    System.out.print(new String(kv.getRow()) + " ");
    System.out.print(new String(kv.getFamily()) + ":");
    System.out.print(new String(kv.getQualifier()) + " ");
    System.out.print(kv.getTimestamp() + " ");
    System.out.println(new String(kv.getValue()));
    }
    }
    } catch (IOException e) {
    e.printStackTrace();
    }
    }
    
    public static void main(String[] agrs) {
    try {
    String tablename = "scores";
    String[] familys = { "grade", "course" };
    BaseOperation.creatTable(tablename, familys);
    
    // add record zkb
    BaseOperation.addRecord(tablename, "zkb", "grade", "", "5");
    BaseOperation.addRecord(tablename, "zkb", "course", "", "90");
    BaseOperation.addRecord(tablename, "zkb", "course", "math", "97");
    BaseOperation.addRecord(tablename, "zkb", "course", "art", "87");
    // add record baoniu
    BaseOperation.addRecord(tablename, "baoniu", "grade", "", "4");
    BaseOperation
    .addRecord(tablename, "baoniu", "course", "math", "89");
    
    System.out.println("===========get one record========");
    BaseOperation.getOneRecord(tablename, "zkb");
    
    System.out.println("===========show all record========");
    BaseOperation.getAllRecord(tablename);
    
    System.out.println("===========del one record========");
    BaseOperation.delRecord(tablename, "baoniu");
    BaseOperation.getAllRecord(tablename);
    
    System.out.println("===========show all record========");
    BaseOperation.getAllRecord(tablename);
    } catch (Exception e) {
    e.printStackTrace();
    }
    }
    
    }
  7. 运行程序,输出如下:

    hadoop107,hadoop108,hadoop104
    log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory).
    log4j:WARN Please initialize the log4j system properly.
    log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
    table already exists!
    insert recored zkb to table scores ok.
    insert recored zkb to table scores ok.
    insert recored zkb to table scores ok.
    insert recored zkb to table scores ok.
    insert recored baoniu to table scores ok.
    insert recored baoniu to table scores ok.
    ===========get one record========
    zkb course: 1425258910718 90
    zkb course:art 1425258910727 87
    zkb course:math 1425258910722 97
    zkb grade: 1425258910705 5
    ===========show all record========
    baoniu course:math 1425258910734 89
    baoniu grade: 1425258910730 4
    zkb course: 1425258910718 90
    zkb course:art 1425258910727 87
    zkb course:math 1425258910722 97
    zkb grade: 1425258910705 5
    ===========del one record========
    del recored baoniu ok.
    zkb course: 1425258910718 90
    zkb course:art 1425258910727 87
    zkb course:math 1425258910722 97
    zkb grade: 1425258910705 5
    ===========show all record========
    zkb course: 1425258910718 90
    zkb course:art 1425258910727 87
    zkb course:math 1425258910722 97
    zkb grade: 1425258910705 5

 

http://www.tuicool.com/articles/r6ZZBjU

http://xpenxpen.iteye.com/blog/2158922

http://www.cnblogs.com/ggjucheng/p/3381328.html



已有 0 人发表留言,猛击->> 这里<<-参与讨论


ITeye推荐



相关 [windows7 eclipse 环境] 推荐:

Windows7+Eclipse环境下Hbase Java客户端的开发

- - zzm
Centos 下Hbase0.98.10-hadoop2 集群的配置. 在Eclipse中创建Maven的工程. 将集群的hbase-site.xml文件放到工程的classes目录下. C:\windows\system32\drivers\etc文件,将Hbase集群的IP以及域名配置到该文件中.

Android + Eclipse + PhoneGap 环境配置

- - CSDN博客推荐文章
用了3天的时间,终于把环境搭建完毕,郁闷了N天,终于完成了. 这里我只是讲述我安装的过程,仅供大家参考. 环境搭建首先要去下载一些安装包:. (下载前注意一下,电脑是32位还是64位的请注意选择安装包). java环境的JDK:http://www.oracle.com/technetwork/java/javase/downloads/index.html.

eclipse配置nodejs开发环境

- - CSDN博客云计算推荐文章
首先说明一下本人的开发环境,个人兴趣爱好问题,这边使用的很多环境都是基于Linux系统下做的,这次也不例外. 前提条件:NodeJs已经在系统中正确安装. 1、下载安装eclipse,地址: http://www.eclipse.org/downloads/. Eclipse Standard 版本即可.

使用windows7的virtual PC打造原装IE6、IE7、IE8测试环境

- LiuWeifeng - 断桥残雪部落格
公司使用的是windows7操作系统,自己之前一直使用IETester来测试页面在IE6~IE8下的兼容性,可是在测试js的时候还是会出现实际使用的还是windows7的IE8内核问题. 所以自己想通过virtual PC来搭建个虚拟的测试环境,为什么选择virtual PC呢. 1、windows 7系统自带,并且xp mode就可以直接安装上xp系统.

基于Eclipse的Hadoop开发环境配置方法

- - CSDN博客推荐文章
(1)启动hadoop守护进程. 在Terminal中输入如下命令:. (2)在Eclipse上安装Hadoop插件. 找到hadoop的安装路径,我的是hadoop-0.20.2,将/home/wenqisun/hadoop-0.20.2/contrib/eclipse-plugin/下的hadoop-0.20.2- eclipse-plugin.jar拷贝到eclipse安装目录下的plugins里,我的是在/home/wenqisun/eclipse /plugins/下.

windows 7 with eclipse 下hadoop应用开发环境搭建

- - CSDN博客云计算推荐文章
最近开始着手高校云平台的搭建,前些天做了hadoop集群测试环境的安装与配置的经验分享, 这篇文章主要介绍win7 64位下 eclipse4.2 连接远程Redhat Linux 5下hadoop-1.2.0集群开发环境搭建. 1、window 7 64位. 三、安装配置hadoop集群. 四、在Eclipse下安装配置hadoop插件.

Eclipse for php + Xdebug搭建PHP的调试环境

- - CSDN博客Web前端推荐文章
Eclipse for php + Xdebug搭建PHP的调试环境.     第一步:到Eclipse的官网去下载PHP-Eclipse: http://www.eclipse.org/downloads/packages/eclipse-php-developers/heliossr1. 第二步:下载Xdebug (下载地址位于: http://xdebug.org/download.php,我的PHP版本为 PHP 5.3.28 Thread Safety, Apach为2.2.25,使用的端口号8080),我下载的Xdebug版本为php_xdebug-2.2.3-5.3-vc9.dll,并且放入到"PHP/ext目录中.

用Python+Django在Eclipse环境下开发web网站 - Ruthless

- - 博客园_Ruthless
如果这是你第一次使用Django,那么你必须进行一些初始设置. 也就是通过自动生成代码来建立一个Django项目--一个Django项目的设置集,包含了数据库配置、Django详细选项设置和应用特性配置,具体操作步骤如下所示. 1.新建Django项目. 3.测试新建的模块是否正常. 服务器启动起来后,去浏览器输入网址:http://127.0.0.1:8000,你会看到一个令人愉悦的,柔和的淡蓝色 “Welcome to Django” 页面.

windows7 设计趋势

- jary - 平凡的世界:看看,闹闹,混混,写写
1.强化信息分层,改变以前比较扁平的信息架构:. IE,文件夹,即使窗口众多,也会有条不紊. 2.不拘泥于windows本身的开发架构,而是基于用户常见行为去进行信息架构:. 新建文件夹,共享,播放,刻录,均是用户针对文件夹常见的操作行为. 3.在信息架构中,更加注重关联性引导:. 比以往更加强大的右键菜单,但炫耀的并不是强大的功能,只是展示用户的普通需求.

从Windows 1.0升级到Windows7

- Sheriff - Solidot
一位勇敢的操作系统研究者发布了一则YouTube视频(YouTube,Youku),展现了从Windows 1.0一路升级到Windows7的经过. 作者是在一个VMWar虚拟机上安装微软的操作系统,视频展示了DOS和Windows的安装,通过安装一些DOS游戏来观察较新的操作系统是如何处理向后兼容性的,以及Windows中偏好设置是如何在升级中保留下来的.