Flink使用paimon
/usr/dif/7.1.0-0/flink/bin/sql-client.sh
CREATE CATALOG paimon_hive_catalog
WITH ( 'type'='paimon',
'metastore' = 'hive',
'hive-conf-dir'='/etc/hive/conf/',
'hadoop-conf-dir' = '/etc/hive/conf');
use catalog paimon_hive_catalog;
use ${DB};
XXXXXXXXXXXXXXXXXXXXXXXXXX
Spark on Hive(kyuubi)使用paimon
/usr/dif/7.1.0-0/kyuubi/bin/beeline -u 'jdbc:hive2://XXX:2181,XXX:2181,XXX:2181/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=kyuubi;principal=ocdp/_HOST@XXX.COM;#spark.sql.catalog.paimon=org.apache.paimon.spark.SparkCatalog;spark.sql.catalog.spark_catalog=org.apache.paimon.spark.SparkGenericCatalog;spark.sql.catalog.paimon.warehouse=hdfs://XXX/apps/hive/warehouse;spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions;spark.yarn.queue=XXX;spark.executor.cores=4;spark.driver.cores=2;spark.executor.instances=10;spark.executor.memory=8g'
XXXXXXXXXXXXXXXXXXXXXXXXXX
Spark Jar使用paimon
/usr/dif/7.1.0-0/spark/bin/spark-submit
--master yarn
--deploy-mode client
--driver-memory 1G
--num-executors 2
--executor-cores 2
--executor-memory 4G
--name "SparkODSJob"
--class com.goertek.it.main.gmes.OdsGMesHiveMain
--conf spark.sql.catalog.spark_catalog=org.apache.paimon.spark.SparkGenericCatalog
--conf spark.sql.catalog.paimon=org.apache.paimon.spark.SparkCatalog
--conf spark.sql.catalog.paimon.warehouse=hdfs://goertekwf/apps/hive/warehouse
--conf spark.sql.catalog.paimon.metastore=hive
--conf spark.sql.extensions=org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions
--conf spark.yarn.principal=wf_bpit_mes@XXX.COM
--conf spark.yarn.keytab=/data01/dataos/apps/keytabs/wf_bpit_mes.keytab
--conf 'spark.driver.extraJavaOptions=-Djava.security.krb5.conf=/etc/krb5.conf'
--conf 'spark.driverEnv.KRB5_CONFIG=/etc/krb5.conf'
hdfs:///spark_jar/spark3-1.0-SNAPSHOT.jar
"test" "152" "165" "${batchNo}"