python sink to kafka

from pyflink.datastream import StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

import json

import re

import logging

import sys

from pyflink.datastream.state import ValueStateDescriptor, MapStateDescriptor

from pyflink.datastream.connectors.kafka import FlinkKafkaConsumer, TypeInformation,FlinkKafkaProducer

from pyflink.common.typeinfo import Types

from pyflink.datastream.connectors.elasticsearch import Elasticsearch7SinkBuilder, ElasticsearchEmitter, FlushBackoffType

from pyflink.datastream.connectors import DeliveryGuarantee

from pyflink.common.serialization import SimpleStringSchema

from datetime import datetime

logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(asctime)s-%(levelname)s-%(message)s")

logger = logging.getLogger(name)

创建 StreamExecutionEnvironment 对象

env = StreamExecutionEnvironment.get_execution_environment()

env.set_parallelism(1)

env.add_jars("file:///root/flink-sql-connector-kafka_2.11-1.14.4.jar")

from pyflink.datastream import DataStream, StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

from pyflink.common.typeinfo import Types

env = StreamExecutionEnvironment.get_execution_environment()

data = DataStream(env._j_stream_execution_environment.socketTextStream('192.168.137.201', 8899))

TEST_KAFKA_SERVERS = "192.168.137.201:9092"

TEST_KAFKA_TOPIC = "test_topic_elink"

TEST_GROUP_ID = "pyflink_elink_midsys"

def get_kafka_customer_properties(kafka_servers: str, group_id: str):

properties = {

"bootstrap.servers": kafka_servers,

"fetch.max.bytes": "67108864",

"key.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"value.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"enable.auto.commit": "false", # 关闭kafka 自动提交,此处不能传bool 类型会报错

"group.id": group_id,

}

return properties

properties = get_kafka_customer_properties(TEST_KAFKA_SERVERS, TEST_GROUP_ID)

producer_properties = {

'bootstrap.servers': '192.168.137.201:9092'

}

producer = FlinkKafkaProducer(

topic=TEST_KAFKA_TOPIC,

producer_config=properties,

serialization_schema=SimpleStringSchema()

)

data.add_sink(producer)

data.print()

env.execute()

相关推荐
逢生博客16 分钟前
使用 Python 项目管理工具 uv 快速创建 MCP 服务(Cherry Studio、Trae 添加 MCP 服务)
python·sqlite·uv·deepseek·trae·cherry studio·mcp服务
堕落似梦23 分钟前
Pydantic增强SQLALchemy序列化(FastAPI直接输出SQLALchemy查询集)
python
坐吃山猪1 小时前
Python-Agent调用多个Server-FastAPI版本
开发语言·python·fastapi
Bruce-li__2 小时前
使用Django REST Framework快速开发API接口
python·django·sqlite
小兜全糖(xdqt)2 小时前
python 脚本引用django中的数据库model
python·django
Arenaschi2 小时前
SQLite 是什么?
开发语言·网络·python·网络协议·tcp/ip
纪元A梦2 小时前
华为OD机试真题——推荐多样性(2025A卷:200分)Java/python/JavaScript/C++/C语言/GO六种最佳实现
java·javascript·c++·python·华为od·go·华为od机试题
仙人掌_lz2 小时前
人工智能与机器学习:Python从零实现性回归模型
人工智能·python·机器学习·线性回归
Awesome Baron2 小时前
《Learning Langchain》阅读笔记8-RAG(4)在vector store中存储embbdings
python·jupyter·chatgpt·langchain·llm
阡之尘埃2 小时前
Python数据分析案例73——基于多种异常值监测算法探查内幕交易信息
人工智能·python·机器学习·数据分析·异常检测·无监督学习