python sink to kafka

from pyflink.datastream import StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

import json

import re

import logging

import sys

from pyflink.datastream.state import ValueStateDescriptor, MapStateDescriptor

from pyflink.datastream.connectors.kafka import FlinkKafkaConsumer, TypeInformation,FlinkKafkaProducer

from pyflink.common.typeinfo import Types

from pyflink.datastream.connectors.elasticsearch import Elasticsearch7SinkBuilder, ElasticsearchEmitter, FlushBackoffType

from pyflink.datastream.connectors import DeliveryGuarantee

from pyflink.common.serialization import SimpleStringSchema

from datetime import datetime

logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(asctime)s-%(levelname)s-%(message)s")

logger = logging.getLogger(name)

创建 StreamExecutionEnvironment 对象

env = StreamExecutionEnvironment.get_execution_environment()

env.set_parallelism(1)

env.add_jars("file:///root/flink-sql-connector-kafka_2.11-1.14.4.jar")

from pyflink.datastream import DataStream, StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

from pyflink.common.typeinfo import Types

env = StreamExecutionEnvironment.get_execution_environment()

data = DataStream(env._j_stream_execution_environment.socketTextStream('192.168.137.201', 8899))

TEST_KAFKA_SERVERS = "192.168.137.201:9092"

TEST_KAFKA_TOPIC = "test_topic_elink"

TEST_GROUP_ID = "pyflink_elink_midsys"

def get_kafka_customer_properties(kafka_servers: str, group_id: str):

properties = {

"bootstrap.servers": kafka_servers,

"fetch.max.bytes": "67108864",

"key.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"value.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"enable.auto.commit": "false", # 关闭kafka 自动提交,此处不能传bool 类型会报错

"group.id": group_id,

}

return properties

properties = get_kafka_customer_properties(TEST_KAFKA_SERVERS, TEST_GROUP_ID)

producer_properties = {

'bootstrap.servers': '192.168.137.201:9092'

}

producer = FlinkKafkaProducer(

topic=TEST_KAFKA_TOPIC,

producer_config=properties,

serialization_schema=SimpleStringSchema()

)

data.add_sink(producer)

data.print()

env.execute()

相关推荐
多米Domi0113 小时前
0x3f第33天复习 (16;45-18:00)
数据结构·python·算法·leetcode·链表
freepopo3 小时前
天津商业空间设计:材质肌理里的温度与质感[特殊字符]
python·材质
森叶3 小时前
Java 比 Python 高性能的原因:重点在高并发方面
java·开发语言·python
小二·4 小时前
Python Web 开发进阶实战:混沌工程初探 —— 主动注入故障,构建高韧性系统
开发语言·前端·python
Lkygo4 小时前
LlamaIndex使用指南
linux·开发语言·python·llama
小二·4 小时前
Python Web 开发进阶实战:低代码平台集成 —— 可视化表单构建器 + 工作流引擎实战
前端·python·低代码
Wise玩转AI4 小时前
团队管理:AI编码工具盛行下,如何防范设计能力退化与知识浅薄化?
python·ai编程·ai智能体·开发范式
赵谨言5 小时前
Python串口的三相交流电机控制系统研究
大数据·开发语言·经验分享·python
鹿角片ljp5 小时前
Engram 论文精读:用条件记忆模块重塑稀疏大模型
python·自然语言处理·nlp
Blossom.1185 小时前
AI Agent的长期记忆革命:基于向量遗忘曲线的动态压缩系统
运维·人工智能·python·深度学习·自动化·prompt·知识图谱