Browse Source

迅捷开发环境,hbase测试用例提交。

lilb3 6 years ago
parent
commit
cc39ee2f82

+ 61 - 0
ipu-hbase-example/pom.xml

@ -0,0 +1,61 @@
1
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
2
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
3
	<modelVersion>4.0.0</modelVersion>
4
5
	<parent>
6
		<groupId>com.ai.ipu</groupId>
7
		<artifactId>ipu-aggregator</artifactId>
8
		<version>3.1-SNAPSHOT</version>
9
	</parent>
10
11
	<groupId>com.ai.ipu.example</groupId>
12
	<artifactId>ipu-hbase-example</artifactId>
13
	<version>1.0</version>
14
	<packaging>jar</packaging>
15
16
	<name>ipu-hbase-example</name>
17
	<url>http://maven.apache.org</url>
18
19
	<properties>
20
		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
21
		<ipu>3.1-SNAPSHOT</ipu>
22
		<jdk>1.8</jdk>
23
		<junit>4.12</junit>
24
		<hbase>1.2.6</hbase>
25
	</properties>
26
27
	<dependencies>
28
		<dependency>
29
			<groupId>junit</groupId>
30
			<artifactId>junit</artifactId>
31
			<version>${junit}</version>
32
		</dependency>
33
		<dependency>
34
			<groupId>org.apache.hbase</groupId>
35
			<artifactId>hbase-client</artifactId>
36
			<version>${hbase}</version>
37
		</dependency>
38
		<dependency>
39
			<groupId>org.apache.hbase</groupId>
40
			<artifactId>hbase-server</artifactId>
41
			<version>${hbase}</version>
42
		</dependency>
43
44
		<dependency>
45
			<groupId>com.ai.ipu</groupId>
46
			<artifactId>ipu-basic</artifactId>
47
		</dependency>
48
	</dependencies>
49
50
	<build>
51
		<plugins>
52
			<plugin>
53
                <groupId>org.apache.maven.plugins</groupId>
54
                <artifactId>maven-surefire-plugin</artifactId>
55
                <configuration>
56
                    <skip>true</skip>
57
                </configuration>
58
            </plugin>
59
		</plugins>
60
	</build>
61
</project>

+ 76 - 0
ipu-hbase-example/src/test/java/com/ai/ipu/example/hbase/HbaseConfig.java

@ -0,0 +1,76 @@
1
package com.ai.ipu.example.hbase;
2
3
import com.ai.ipu.basic.file.ResourceBundleUtil;
4
import com.ai.ipu.basic.log.ILogger;
5
import com.ai.ipu.basic.log.IpuLoggerFactory;
6
7
/**
8
 * 类描述
9
 *
10
 * @Author: lilb3@asiainfo.com
11
 * @Date: 2019-06-18 17:05
12
 **/
13
public class HbaseConfig {
14
    private static final ILogger LOGGER = IpuLoggerFactory.createLogger(HbaseConfig.class);
15
    private static final String CONFIG_FILE_PATH = "hbase";
16
    /*hadoop的安装目录*/
17
    private static String hadoopHomeDir;
18
    /*hbase在hadoop上的存储路径*/
19
    private static String hbaseRootdir;
20
    /*hbase连接的zookeeper集群地址*/
21
    private static String hbaseZookeeperQuorum;
22
    /*hbase表名*/
23
    private static String tableName;
24
    /*hbase表的列族*/
25
    private static String tableCloumns;
26
    /*studeng的格式*/
27
    private static String key;
28
    private static String zhangShan;
29
    private static String liSi;
30
    private static String wangWu;
31
32
    public static String getHadoopHomeDir() {
33
        return hadoopHomeDir;
34
    }
35
36
    public static String getHbaseRootdir() {
37
        return hbaseRootdir;
38
    }
39
40
    public static String getHbaseZookeeperQuorum() {
41
        return hbaseZookeeperQuorum;
42
    }
43
44
    public static String getTableName() {
45
        return tableName;
46
    }
47
48
    public static String getTableCloumns() {
49
        return tableCloumns;
50
    }
51
52
    public static String getKey() {
53
        return key;
54
    }
55
56
    public static String getZhangShan() {
57
        return zhangShan;
58
    }
59
60
    public static String getLiSi() {
61
        return liSi;
62
    }
63
64
    public static String getWangWu() {
65
        return wangWu;
66
    }
67
68
    /*加载配置文件*/
69
    static {
70
        try {
71
            ResourceBundleUtil.initialize(CONFIG_FILE_PATH, HbaseConfig.class);
72
        } catch (Exception e) {
73
            LOGGER.error(CONFIG_FILE_PATH + "配置文件读取失败", e);
74
        }
75
    }
76
}

+ 375 - 0
ipu-hbase-example/src/test/java/com/ai/ipu/example/hbase/HbaseExample.java

@ -0,0 +1,375 @@
1
package com.ai.ipu.example.hbase;
2
3
import org.apache.hadoop.conf.Configuration;
4
import org.apache.hadoop.hbase.Cell;
5
import org.apache.hadoop.hbase.CellUtil;
6
import org.apache.hadoop.hbase.HColumnDescriptor;
7
import org.apache.hadoop.hbase.HTableDescriptor;
8
import org.apache.hadoop.hbase.TableName;
9
import org.apache.hadoop.hbase.client.Admin;
10
import org.apache.hadoop.hbase.client.Connection;
11
import org.apache.hadoop.hbase.client.ConnectionFactory;
12
import org.apache.hadoop.hbase.client.Delete;
13
import org.apache.hadoop.hbase.client.Get;
14
import org.apache.hadoop.hbase.client.Put;
15
import org.apache.hadoop.hbase.client.Result;
16
import org.apache.hadoop.hbase.client.ResultScanner;
17
import org.apache.hadoop.hbase.client.Scan;
18
import org.apache.hadoop.hbase.client.Table;
19
import org.apache.hadoop.hbase.filter.CompareFilter;
20
import org.apache.hadoop.hbase.filter.FilterList;
21
import org.apache.hadoop.hbase.filter.PrefixFilter;
22
import org.apache.hadoop.hbase.filter.RegexStringComparator;
23
import org.apache.hadoop.hbase.filter.RowFilter;
24
import org.apache.hadoop.hbase.util.Bytes;
25
import org.junit.After;
26
import org.junit.Before;
27
import org.junit.Test;
28
29
import java.util.ArrayList;
30
import java.util.List;
31
32
public class HbaseExample {
33
    private static final String SPLIT_STRING = "###";
34
    private static final String[] TABLE_COLUMNS = HbaseConfig.getTableCloumns().split(SPLIT_STRING);
35
    private static final String[] KEYS = HbaseConfig.getKey().split(SPLIT_STRING);
36
    private static final String TABLE_NAME = HbaseConfig.getTableName();
37
    private Connection connection = null;
38
39
    private Table table = null;
40
41
    private Admin admin = null;
42
43
    /**
44
     * 初始化配置
45
     *
46
     * @throws Exception 异常
47
     */
48
    @Before
49
    public void setBefore() throws Exception {
50
        Configuration configuration = new Configuration();
51
        System.setProperty("hadoop.home.dir", HbaseConfig.getHadoopHomeDir());
52
        configuration.set("hbase.rootdir", HbaseConfig.getHbaseRootdir());
53
        configuration.set("hbase.zookeeper.quorum", HbaseConfig.getHbaseZookeeperQuorum());
54
        connection = ConnectionFactory.createConnection(configuration);
55
        admin = connection.getAdmin();
56
    }
57
58
    /**
59
     * 关闭连接
60
     *
61
     * @throws Exception 异常
62
     */
63
    @After
64
    public void setAfter() throws Exception {
65
        if (connection != null) connection.close();
66
    }
67
68
    /**
69
     * 创建表
70
     *
71
     * @throws Exception 异常
72
     */
73
    @Test
74
    public void createTable() throws Exception {
75
        if (admin.tableExists(TableName.valueOf(TABLE_NAME))) {
76
            System.out.println("表已经存在");
77
        } else {
78
            HTableDescriptor hTableDescriptor = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
79
            //添加列族columnFamily ,不必指定列
80
            String[] tableColumns = HbaseConfig.getTableCloumns().split(SPLIT_STRING);
81
            for (String column : tableColumns) {
82
                hTableDescriptor.addFamily(new HColumnDescriptor(column));
83
            }
84
            admin.createTable(hTableDescriptor);
85
            System.out.println("表" + TABLE_NAME + "创建成功");
86
        }
87
    }
88
89
    /**
90
     * 查看表和表结构
91
     *
92
     * @throws Exception 异常
93
     */
94
    @Test
95
    public void listTable() throws Exception {
96
        HTableDescriptor[] tableList = admin.listTables();
97
        if (tableList.length > 0) {
98
            for (HTableDescriptor ta : tableList) {
99
                System.out.println(ta.getNameAsString());
100
                for (HColumnDescriptor column :
101
                        ta.getColumnFamilies()) {
102
                    System.out.println("\t" + column.getNameAsString());
103
                }
104
            }
105
        }
106
    }
107
108
    /**
109
     * 添加单条记录
110
     *
111
     * @throws Exception 异常
112
     */
113
    @Test
114
    public void putOne() throws Exception {
115
116
        Table table = connection.getTable(TableName.valueOf(TABLE_NAME));
117
        //创建 put,并制定 put 的Rowkey
118
        Put put = new Put(Bytes.toBytes("wangwu"));
119
        String[] values = HbaseConfig.getWangWu().split(SPLIT_STRING);
120
        for (int i = 0; i < TABLE_COLUMNS.length; i++) {
121
            put.addColumn(Bytes.toBytes(TABLE_COLUMNS[i]), Bytes.toBytes(KEYS[2 * i]), Bytes.toBytes(values[2 * i]));
122
            put.addColumn(Bytes.toBytes(TABLE_COLUMNS[i]), Bytes.toBytes(KEYS[2 * i + 1]), Bytes.toBytes(values[2 * i + 1]));
123
        }
124
        table.put(put);
125
        System.out.println("添加成功。");
126
    }
127
128
    /**
129
     * 添加多条记录
130
     *
131
     * @throws Exception 异常
132
     */
133
    @Test
134
    public void putList() throws Exception {
135
136
        Table table = connection.getTable(TableName.valueOf(TABLE_NAME));
137
        //创建 put,并制定 put 的Rowkey
138
        Put put = new Put(Bytes.toBytes("zhangsan"));
139
        String[] values = HbaseConfig.getZhangShan().split(SPLIT_STRING);
140
        for (int i = 0; i < TABLE_COLUMNS.length; i++) {
141
            put.addColumn(Bytes.toBytes(TABLE_COLUMNS[i]), Bytes.toBytes(KEYS[2 * i]), Bytes.toBytes(values[2 * i]));
142
            put.addColumn(Bytes.toBytes(TABLE_COLUMNS[i]), Bytes.toBytes(KEYS[2 * i + 1]), Bytes.toBytes(values[2 * i + 1]));
143
        }
144
145
        Put put1 = new Put(Bytes.toBytes("lisi"));
146
        String[] values1 = HbaseConfig.getLiSi().split(SPLIT_STRING);
147
        for (int i = 0; i < TABLE_COLUMNS.length; i++) {
148
            put1.addColumn(Bytes.toBytes(TABLE_COLUMNS[i]), Bytes.toBytes(KEYS[2 * i]), Bytes.toBytes(values1[2 * i]));
149
            put1.addColumn(Bytes.toBytes(TABLE_COLUMNS[i]), Bytes.toBytes(KEYS[2 * i + 1]), Bytes.toBytes(values1[2 * i + 1]));
150
        }
151
152
        List<Put> list = new ArrayList<>();
153
        list.add(put);
154
        list.add(put1);
155
156
        table.put(list);
157
        System.out.println("添加多条记录成功。");
158
    }
159
160
    /**
161
     * 根据Rowkey进行查询
162
     *
163
     * @throws Exception 异常
164
     */
165
    @Test
166
    public void scanByRowKey() throws Exception {
167
        String rowKey = "zhangsan";
168
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
169
        Get get = new Get(rowKey.getBytes());
170
171
        Result result = table.get(get);
172
        System.out.println("列族\t列名\t内容");
173
        for (Cell cell : result.rawCells()) {
174
            System.out.println(
175
                    Bytes.toString(CellUtil.cloneRow(cell)) + "\t" +  //Rowkey
176
                            Bytes.toString(CellUtil.cloneFamily(cell)) + "\t" + //CF
177
                            Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t" +//qualifier
178
                            Bytes.toString(CellUtil.cloneValue(cell)) //value
179
            );
180
        }
181
    }
182
183
    /**
184
     * 根据Rowkey和列族和列进行查询内容
185
     *
186
     * @throws Exception 异常
187
     */
188
    @Test
189
    public void scanByRowKey1() throws Exception {
190
        String rowKey = "zhangsan";
191
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
192
        Get get = new Get(rowKey.getBytes());
193
        get.addColumn(Bytes.toBytes(TABLE_COLUMNS[2]), Bytes.toBytes(KEYS[5]));
194
195
        Result result = table.get(get);
196
        System.out.println("列族rowKey = \"zhangsan\"列族为" + TABLE_COLUMNS[2] + ",列为" + KEYS[5] + "的值是:");
197
        String value = Bytes.toString(result.getValue(Bytes.toBytes(TABLE_COLUMNS[2]), Bytes.toBytes(KEYS[5])));
198
        System.out.println(value);
199
    }
200
201
    /**
202
     * 全表查询
203
     *
204
     * @throws Exception 异常
205
     */
206
    @Test
207
    public void scanTable() throws Exception {
208
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
209
        Scan scan = new Scan();
210
        ResultScanner rs = table.getScanner(scan);
211
        for (Result result : rs) {
212
            printResult(result);
213
        }
214
    }
215
216
    /**
217
     * Scan 查询 添加个条件,Scan 对象参数是  Rowkey
218
     *
219
     * @throws Exception 异常
220
     */
221
    @Test
222
    public void scanTable1() throws Exception {
223
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
224
        Scan scan = new Scan(Bytes.toBytes("wangwu"), Bytes.toBytes("wangwu"));
225
226
        ResultScanner rs = table.getScanner(scan);
227
        for (Result result : rs) {
228
            printResult(result);
229
        }
230
    }
231
232
    /**
233
     * 定义私有方法 用于打印表的信息
234
     *
235
     * @param result 需要打印的结果
236
     */
237
    private void printResult(Result result) {
238
        for (Cell cell :
239
                result.rawCells()) {
240
            System.out.println(
241
                    //Rowkey
242
                    Bytes.toString(CellUtil.cloneRow(cell)) + "\t" +
243
                            //CF
244
                            Bytes.toString(CellUtil.cloneFamily(cell)) + "\t" +
245
                            //qualifier
246
                            Bytes.toString(CellUtil.cloneQualifier(cell)) + "\t" +
247
                            //value
248
                            Bytes.toString(CellUtil.cloneValue(cell))
249
            );
250
        }
251
        System.out.println("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~");
252
    }
253
254
255
    /**
256
     * 条件过滤  Filter
257
     * 查询后最名 为  wu的
258
     * Hbase 中数据是按照字典顺序进行排列的
259
     * 不会因为插入的时间不同而不同
260
     *
261
     * @throws Exception 异常
262
     */
263
    @Test
264
    public void rowFilter() throws Exception {
265
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
266
        String reg = "^*wu";
267
        org.apache.hadoop.hbase.filter.Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator(reg));
268
        Scan scan = new Scan();
269
        scan.setFilter(filter);
270
        ResultScanner results = table.getScanner(scan);
271
        for (Result result : results) {
272
            printResult(result);
273
        }
274
    }
275
276
    /**
277
     * 前缀名过滤
278
     *
279
     * @throws Exception 异常
280
     */
281
    @Test
282
    public void prefixFilter() throws Exception {
283
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
284
        org.apache.hadoop.hbase.filter.Filter filter = new PrefixFilter(Bytes.toBytes("li"));
285
        Scan scan = new Scan();
286
        scan.setFilter(filter);
287
        ResultScanner results = table.getScanner(scan);
288
        for (Result result : results) {
289
            printResult(result);
290
        }
291
    }
292
293
    /**
294
     * 多条件过滤  FilterList
295
     * <p>
296
     * MUST_PASS_ALL 全都满足条件
297
     * <p>
298
     * MUST_PASS_ONE  满足一个就可以
299
     *
300
     * @throws Exception 异常
301
     */
302
    @Test
303
    public void filterList() throws Exception {
304
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
305
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE);
306
        org.apache.hadoop.hbase.filter.Filter filter = new PrefixFilter(Bytes.toBytes("li"));
307
        org.apache.hadoop.hbase.filter.Filter filter2 = new PrefixFilter(Bytes.toBytes("zh"));
308
        filterList.addFilter(filter);
309
        filterList.addFilter(filter2);
310
        Scan scan = new Scan();
311
        scan.setFilter(filterList);
312
        ResultScanner results = table.getScanner(scan);
313
        for (Result result : results) {
314
            printResult(result);
315
        }
316
    }
317
318
    /**
319
     * 删除数据
320
     *
321
     * @throws Exception 异常
322
     */
323
    @Test
324
    public void delete() throws Exception {
325
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
326
        Delete delete = new Delete(Bytes.toBytes("wangwu"));
327
        table.delete(delete);
328
        System.out.println("删除成功。");
329
    }
330
331
    /**
332
     * 删除数据
333
     *
334
     * @throws Exception 异常
335
     */
336
    @Test
337
    public void multiDelete() throws Exception {
338
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
339
        Delete delete = new Delete(Bytes.toBytes("zhangsan"));
340
        //删除当前版本的内容
341
        // delete.addColumn(Bytes.toBytes("more"),Bytes.toBytes("good"));
342
        //删除所有版本
343
        //delete.addColumns(Bytes.toBytes("more"),Bytes.toBytes("good"));
344
345
        /*
346
         * addColumn addColumns 是不一样的 ,记住了,
347
         *
348
         * addColumn 是删除单元格当前最近版本的 内容,
349
         *
350
         * addColumns 是删除单元格所有版本的内容
351
         * */
352
        //删除列族
353
        delete.addFamily(Bytes.toBytes("info"));
354
355
        table.delete(delete);
356
        System.out.println("删除列成功。");
357
    }
358
359
    /**
360
     * 删除表
361
     *
362
     * @throws Exception 异常
363
     */
364
    @Test
365
    public void dropTable() throws Exception {
366
        table = connection.getTable(TableName.valueOf(TABLE_NAME));
367
        if (admin.tableExists(TableName.valueOf(TABLE_NAME))) {
368
            admin.disableTable(TableName.valueOf(TABLE_NAME));
369
            admin.deleteTable(TableName.valueOf(TABLE_NAME));
370
            System.out.println("删除成功");
371
        } else {
372
            System.out.println("表不存在");
373
        }
374
    }
375
}

+ 15 - 0
ipu-hbase-example/src/test/resources/hbase.properties

@ -0,0 +1,15 @@
1
#hadoop的安装目录 mysql
2
hadoop.home.dir=/home/mysql/software/hadoop273
3
#hbase在hadoop上的存储路径 iZm5e5xe1w25avi0io1f5aZ
4
hbase.rootdir=hdfs://iZm5e5xe1w25avi0io1f5aZ:9000/hbase
5
#hbase连接的zookeeper集群地址 iZm5e5xe1w25avi0io1f5aZ:2101,iZm5e5xe1w25avi0io1f5aZ:2102,iZm5e5xe1w25avi0io1f5aZ:2103
6
hbase.zookeeper.quorum=iZm5e5xe1w25avi0io1f5aZ:2101,iZm5e5xe1w25avi0io1f5aZ:2102,iZm5e5xe1w25avi0io1f5aZ:2103
7
#hbase表名
8
table.name=school
9
#hbase表的列族 <不能修改>
10
table.cloumns=info###address###more
11
#student的格式
12
key=age###sex###city###address###hobby###good
13
zhang.shan=18###男###beijing###tianjing###girl###basketball
14
li.si=22###女###nanjing###fuzhou###boy###ball
15
wang.wu=20###男###changsha###yueyang###girl###walk

+ 8 - 0
ipu-hbase-example/src/test/resources/log4j.properties

@ -0,0 +1,8 @@
1
log4j.rootLogger=error, console
2
log4j.logger.org.apache.zookeeper=error
3
log4j.logger.com.ai.ipu.example=debug
4
5
log4j.appender.console=org.apache.log4j.ConsoleAppender
6
log4j.appender.console.target=System.out
7
log4j.appender.console.layout=org.apache.log4j.PatternLayout
8
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %5p [%t] (%F:%L) - %m%n