Kafka数据写入Iceberg
[toc]
create table hadoop_catalog4.iceberg_db.kafka_test_log3
(
data String
) WITH (
'connector' = 'kafka',
'topic' = 'userChange',
'properties.bootstrap.servers' = '10.16.0.2:9092',
'properties.group.id' = 'test02221011-2',
'scan.startup.mode' = 'earliest-offset',
'format' = 'raw'
);
create table kafka_behavior_log_raw
(
log STRING
) WITH (
'connector' = 'kafka',
'topic' = 'behavior_log',
'properties.bootstrap.servers' = 'hadoop101:9092,hadoop102:9092,hadoop103:9092',
'properties.group.id' = 'rickGroup7',
'scan.startup.mode' = 'earliest-offset',
'format' = 'raw'
)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
CREATE CATALOG hadoop_catalog4 WITH (
'type'='iceberg',
'catalog-type'='hadoop',
'warehouse'='hdfs:///user/hive/warehouse/iceberg_hadoop_catalog4',
'property-version'='1'
);
use catalog hadoop_catalog4;
create database iceberg_db;
create table `hadoop_catalog4`.`iceberg_db`.`ib_hadoop_test_log`(
data String
);
insert into hadoop_catalog4.iceberg_db.ib_hadoop_test_log select data from hadoop_catalog4.iceberg_db.kafka_test_log ;
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
发现表创建成功查看不到数据;
不是社区不支持kafka写iceberg,而是flink需要依赖chechpoint/savepoint提交数据,你没开启所以查不到数据
上次更新: 2023/03/10, 20:58:04