《educoder平台HBase开发:表的扫描与扫描的缓存和批量处理》由会员分享,可在线阅读,更多相关《educoder平台HBase开发:表的扫描与扫描的缓存和批量处理(5页珍藏版)》请在金锄头文库上搜索。
1、第1关:批量处理package step1;import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.Cell;import org.apache.hadoop.hbase.CellUtil;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.TableName;import org.apache.
2、hadoop.hbase.client.*;import org.apache.hadoop.hbase.client.coprocessor.Batch;import org.apache.hadoop.hbase.util.Bytes;public class Task public Object batchOp(String tablename) throws Exception /* Begin */Configuration conf = HBaseConfiguration.create();Connection conn = ConnectionFactory.createCon
3、nection(conf);Table table = conn.getTable(TableName.valueOf(tablename);List rows = new ArrayList();/删除操作Delete delete = new Delete(Bytes.toBytes(row1);Delete delete2 = new Delete(Bytes.toBytes(row2);rows.add(delete);rows.add(delete2);/获取操作Get get = new Get(Bytes.toBytes(row3);Get get2 = new Get(Byte
4、s.toBytes(row10);rows.add(get);rows.add(get2);/定义结果数组长度Object results = new Objectrows.size();table.batch(rows, results);/这是一个同步的操作,批量操作的结果将会在操作之后放在results中/delete和put操作的结果为NONE 即没有结果return results;/* End */命令行:启动下Hadoop和hbase第2关:扫描表中所有的数据package step2;import org.apache.hadoop.conf.Configuration;imp
5、ort org.apache.hadoop.hbase.Cell;import org.apache.hadoop.hbase.CellUtil;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.*;public class Task public void scanTable(String tablename) throws Exception /* Begin */Configurat
6、ion conf = HBaseConfiguration.create();Connection conn = ConnectionFactory.createConnection(conf);Table table = conn.getTable(TableName.valueOf(tablename);Scan scan = new Scan();ResultScanner scanner = table.getScanner(scan);for(Result result : scanner)for (Cell cell : result.listCells() System.out.
7、println(new String(CellUtil.cloneValue(cell),utf-8); /* End */第3关:在扫描中使用缓存和批量参数package step3;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.Cell;import org.apache.hadoop.hbase.CellUtil;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.TableN
8、ame;import org.apache.hadoop.hbase.client.*;import org.apache.hadoop.hbase.util.Bytes;public class Task public void scanTable(String tablename) throws Exception /* Begin */Configuration conf = HBaseConfiguration.create();Connection conn = ConnectionFactory.createConnection(conf);Table table = conn.g
9、etTable(TableName.valueOf(tablename);Scan scan = new Scan();scan.setCaching(200);scan.setStartRow(Bytes.toBytes(1);/设置从row3开始扫描scan.setStopRow(Bytes.toBytes(row199);ResultScanner scanner = table.getScanner(scan);for (Result result : scanner) for(Cell cell :result.listCells() System.out.println(new String(CellUtil.cloneValue(cell),utf-8);/* End */