Pamion 部署
使用的组件版本
|----------|--------|
| StarRack | 3.3.0 |
| Flink | 1.19.1 |
| Paino | 0.8.2 |
| Kafka | 3、7.2 |
创建安装目录
cd /usr/share/
mkdir -p streaminglake
cd streaminglake/
[root@hdp-2 streaminglake]# mkdir -p flink
[root@hdp-2 streaminglake]# mkdir -p starrocks
[root@hdp-2 streaminglake]# mkdir -p paimon
[root@hdp-2 streaminglake]# mkdir -p kafka
下载资料
下载 Flink和相关包
cd flink
wget "https://mirrors.aliyun.com/apache/flink/flink-1.19.1/flink-1.19.1-bin-scala_2.12.tgz"
赋予执行权限
chmod u+x flink-1.19.1-bin-scala_2.12.tgz
解压
tar -xf flink-1.19.1-bin-scala_2.12.tgz
下载 flink-hadoop 依赖包
wget "https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.7.5-10.0/flink-shaded-hadoop-2-uber-2.7.5-10.0.jar"
下载 flink-sql-connector-kafka 依赖包
wget "https://repo1.maven.org/maven2/org/apache/flink/flink-sql-connector-kafka/3.2.0-1.18/flink-sql-connector-kafka-3.2.0-1.18.jar"
chmod u+x flink-sql-connector-kafka-3.2.0-1.18.jar
下载 flink-connector-starrocks 依赖包
wget "https://github.com/StarRocks/starrocks-connector-for-apache-flink/releases/download/v1.2.9/flink-connector-starrocks-1.2.9_flink-1.18.jar"
chmod u+x flink-connector-starrocks-1.2.9_flink-1.18.jar
下载 Paimon 和相关依赖包
cd paino
wget "https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-flink-1.19/0.8.2/paimon-flink-1.19-0.8.2.jar"
#如果使用对象存储,需要下载下面的包
wget "https://repo.maven.apache.org/maven2/org/apache/paimon/paimon-oss/0.8.2/paimon-oss-0.8.2.jar"
授权执行
chmod u+x *
拷贝所有依赖包的flink/lib下
cp paimon-flink-1.19-0.8.2.jar paimon-oss-0.8.2.jar flink-shaded-hadoop-2-uber-2.7.5-10.0.jar flink-connector-starrocks-1.2.9_flink-1.18.jar flink-sql-connector-kafka-3.2.0-1.18.jar flink-1.19.1/lib/
启动 flink 集群
cd flink-1.19.1
#修改flink-1.19.1/conf/config.yaml中numberOfTaskSlots为10,允许同时执行的任务
numberOfTaskSlots: 10
./bin/start-cluster.sh
flink 创建pamion表
bash flink 客户端:
./bin/sql-client.sh embedded
创建 Iceberg Catalog 和表
```sql
-- if you're trying out Paimon in a distributed environment,
-- the warehouse path should be set to a shared file system, S3 minio
CREATE CATALOG my_catalog WITH (
'type' = 'paimon',
'warehouse' = 's3://warehouse/wh',
's3.endpoint'='http://192.168.116.130:9000',
's3.access-key' = 'admin',
's3.secret-key' = 'password',
's3.region' = 'us-east-1'
);
USE CATALOG my_catalog;
-- create a word count table
CREATE TABLE word_count (
word STRING PRIMARY KEY NOT ENFORCED,
cnt BIGINT
);
创建完表,可以登录 minio:9001地址进行查看数据文件;
USE CATALOG my_catalog;
-- insert data into word count table
insert into word_count values ('hello',2);