为了账号安全,请及时绑定邮箱和手机立即绑定

habse与hdfs 数据之间的相互转换

标签:
Hbase


一、从Hbase表1中读取数据再把统计结果存到表2

在Hbase中建立相应的表1:

create'hello','cf'

put'hello','1','cf:hui','hello world'

put'hello','2','cf:hui','hello hadoop'

put'hello','3','cf:hui','hello hive'

put'hello','4','cf:hui','hello hadoop'

put'hello','5','cf:hui','hello world'

put'hello','6','cf:hui','hello world'

java代码:

importjava.io.IOException;

importjava.util.Iterator;

importorg.apache.hadoop.conf.Configuration;

importorg.apache.hadoop.hbase.HBaseConfiguration;

importorg.apache.hadoop.hbase.HColumnDescriptor;

importorg.apache.hadoop.hbase.HTableDescriptor;

importorg.apache.hadoop.hbase.client.HBaseAdmin;

importorg.apache.hadoop.hbase.client.Put;

importorg.apache.hadoop.hbase.client.Result;

importorg.apache.hadoop.hbase.client.Scan;

importorg.apache.hadoop.hbase.io.ImmutableBytesWritable;

importorg.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;

importorg.apache.hadoop.hbase.mapreduce.TableMapper;

importorg.apache.hadoop.hbase.mapreduce.TableReducer;

importorg.apache.hadoop.hbase.util.Bytes;

importorg.apache.hadoop.io.IntWritable;

importorg.apache.hadoop.io.NullWritable;

importorg.apache.hadoop.io.Text;

importorg.apache.hadoop.mapreduce.Job;

publicclassHBaseToHbase{

publicstaticvoidmain(String[] args)throwsIOException, ClassNotFoundException, InterruptedException{

String hbaseTableName1 ="hello";

String hbaseTableName2 ="mytb2";


prepareTB2(hbaseTableName2); 


Configuration conf =newConfiguration();


Job job = Job.getInstance(conf); 

job.setJarByClass(HBaseToHbase.class); 

job.setJobName("mrreadwritehbase");


Scan scan =newScan();

scan.setCaching(500);

scan.setCacheBlocks(false);


TableMapReduceUtil.initTableMapperJob(hbaseTableName1, scan, doMapper.class, Text.class, IntWritable.class, job); 

TableMapReduceUtil.initTableReducerJob(hbaseTableName2, doReducer.class, job); 

System.exit(job.waitForCompletion(true) ?1:0);


publicstaticclassdoMapperextendsTableMapper{

privatefinalstaticIntWritable one =newIntWritable(1);

@Override

protectedvoidmap(ImmutableBytesWritable key, Result value, Context context)throwsIOException, InterruptedException{

String rowValue = Bytes.toString(value.list().get(0).getValue());

context.write(newText(rowValue), one);


publicstaticclassdoReducerextendsTableReducer{

@Override

protectedvoidreduce(Text key, Iterable values, Context context)throwsIOException, InterruptedException{

System.out.println(key.toString()); 

intsum =0;

Iterator haha = values.iterator(); 

while(haha.hasNext()) {

sum += haha.next().get(); 

Put put =newPut(Bytes.toBytes(key.toString()));

put.add(Bytes.toBytes("mycolumnfamily"), Bytes.toBytes("count"), Bytes.toBytes(String.valueOf(sum)));

context.write(NullWritable.get(), put); 


publicstaticvoidprepareTB2(String hbaseTableName)throwsIOException{

HTableDescriptor tableDesc =newHTableDescriptor(hbaseTableName);

HColumnDescriptor columnDesc =newHColumnDescriptor("mycolumnfamily");

tableDesc.addFamily(columnDesc); 

Configuration  cfg = HBaseConfiguration.create(); 

HBaseAdmin admin =newHBaseAdmin(cfg);

if(admin.tableExists(hbaseTableName)) {

System.out.println("Table exists,trying drop and create!");

admin.disableTable(hbaseTableName); 

admin.deleteTable(hbaseTableName); 

admin.createTable(tableDesc); 

}else{

System.out.println("create table: "+ hbaseTableName);

admin.createTable(tableDesc); 

在Linux中执行该代码:

[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/javac HBaseToHbase.java

[hadoop@h71 q1]$ /usr/jdk1.7.0_25/bin/jar cvf xx.jar HBaseToHbase*class

[hadoop@h71 q1]$ hadoop jar xx.jar HBaseToHbase

查看mytb2表:

hbase(main):009:0> scan'mytb2'

ROW                                                          COLUMN+CELL                                                                                                                                                                   

hello hadoop                                                column=mycolumnfamily:count, timestamp=1489817182454,value=2

hello hive                                                  column=mycolumnfamily:count, timestamp=1489817182454,value=1

hello world                                                 column=mycolumnfamily:count, timestamp=1489817182454,value=3

3row(s)in0.0260seco



作者:清风_d587
链接:https://www.jianshu.com/p/01411078c449


点击查看更多内容
TA 点赞

若觉得本文不错,就分享一下吧!

评论

作者其他优质文章

正在加载中
  • 推荐
  • 评论
  • 收藏
  • 共同学习,写下你的评论
感谢您的支持,我会继续努力的~
扫码打赏,你说多少就多少
赞赏金额会直接到老师账户
支付方式
打开微信扫一扫,即可进行扫码打赏哦
今天注册有机会得

100积分直接送

付费专栏免费学

大额优惠券免费领

立即参与 放弃机会
意见反馈 帮助中心 APP下载
官方微信

举报

0/150
提交
取消