python sink to kafka

from pyflink.datastream import StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

import json

import re

import logging

import sys

from pyflink.datastream.state import ValueStateDescriptor, MapStateDescriptor

from pyflink.datastream.connectors.kafka import FlinkKafkaConsumer, TypeInformation,FlinkKafkaProducer

from pyflink.common.typeinfo import Types

from pyflink.datastream.connectors.elasticsearch import Elasticsearch7SinkBuilder, ElasticsearchEmitter, FlushBackoffType

from pyflink.datastream.connectors import DeliveryGuarantee

from pyflink.common.serialization import SimpleStringSchema

from datetime import datetime

logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(asctime)s-%(levelname)s-%(message)s")

logger = logging.getLogger(name)

创建 StreamExecutionEnvironment 对象

env = StreamExecutionEnvironment.get_execution_environment()

env.set_parallelism(1)

env.add_jars("file:///root/flink-sql-connector-kafka_2.11-1.14.4.jar")

from pyflink.datastream import DataStream, StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

from pyflink.common.typeinfo import Types

env = StreamExecutionEnvironment.get_execution_environment()

data = DataStream(env._j_stream_execution_environment.socketTextStream('192.168.137.201', 8899))

TEST_KAFKA_SERVERS = "192.168.137.201:9092"

TEST_KAFKA_TOPIC = "test_topic_elink"

TEST_GROUP_ID = "pyflink_elink_midsys"

def get_kafka_customer_properties(kafka_servers: str, group_id: str):

properties = {

"bootstrap.servers": kafka_servers,

"fetch.max.bytes": "67108864",

"key.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"value.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"enable.auto.commit": "false", # 关闭kafka 自动提交,此处不能传bool 类型会报错

"group.id": group_id,

}

return properties

properties = get_kafka_customer_properties(TEST_KAFKA_SERVERS, TEST_GROUP_ID)

producer_properties = {

'bootstrap.servers': '192.168.137.201:9092'

}

producer = FlinkKafkaProducer(

topic=TEST_KAFKA_TOPIC,

producer_config=properties,

serialization_schema=SimpleStringSchema()

)

data.add_sink(producer)

data.print()

env.execute()

相关推荐
久绊A1 小时前
Python 基本语法的详细解释
开发语言·windows·python
Hylan_J5 小时前
【VSCode】MicroPython环境配置
ide·vscode·python·编辑器
莫忘初心丶5 小时前
在 Ubuntu 22 上使用 Gunicorn 启动 Flask 应用程序
python·ubuntu·flask·gunicorn
失败尽常态5238 小时前
用Python实现Excel数据同步到飞书文档
python·excel·飞书
2501_904447748 小时前
OPPO发布新型折叠屏手机 起售价8999
python·智能手机·django·virtualenv·pygame
青龙小码农8 小时前
yum报错:bash: /usr/bin/yum: /usr/bin/python: 坏的解释器:没有那个文件或目录
开发语言·python·bash·liunx
大数据追光猿8 小时前
Python应用算法之贪心算法理解和实践
大数据·开发语言·人工智能·python·深度学习·算法·贪心算法
Leuanghing8 小时前
【Leetcode】11. 盛最多水的容器
python·算法·leetcode
xinxiyinhe9 小时前
如何设置Cursor中.cursorrules文件
人工智能·python
诸神缄默不语10 小时前
如何用Python 3自动打开exe程序
python·os·subprocess·python 3