python sink to kafka

from pyflink.datastream import StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

import json

import re

import logging

import sys

from pyflink.datastream.state import ValueStateDescriptor, MapStateDescriptor

from pyflink.datastream.connectors.kafka import FlinkKafkaConsumer, TypeInformation,FlinkKafkaProducer

from pyflink.common.typeinfo import Types

from pyflink.datastream.connectors.elasticsearch import Elasticsearch7SinkBuilder, ElasticsearchEmitter, FlushBackoffType

from pyflink.datastream.connectors import DeliveryGuarantee

from pyflink.common.serialization import SimpleStringSchema

from datetime import datetime

logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(asctime)s-%(levelname)s-%(message)s")

logger = logging.getLogger(name)

创建 StreamExecutionEnvironment 对象

env = StreamExecutionEnvironment.get_execution_environment()

env.set_parallelism(1)

env.add_jars("file:///root/flink-sql-connector-kafka_2.11-1.14.4.jar")

from pyflink.datastream import DataStream, StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

from pyflink.common.typeinfo import Types

env = StreamExecutionEnvironment.get_execution_environment()

data = DataStream(env._j_stream_execution_environment.socketTextStream('192.168.137.201', 8899))

TEST_KAFKA_SERVERS = "192.168.137.201:9092"

TEST_KAFKA_TOPIC = "test_topic_elink"

TEST_GROUP_ID = "pyflink_elink_midsys"

def get_kafka_customer_properties(kafka_servers: str, group_id: str):

properties = {

"bootstrap.servers": kafka_servers,

"fetch.max.bytes": "67108864",

"key.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"value.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"enable.auto.commit": "false", # 关闭kafka 自动提交,此处不能传bool 类型会报错

"group.id": group_id,

}

return properties

properties = get_kafka_customer_properties(TEST_KAFKA_SERVERS, TEST_GROUP_ID)

producer_properties = {

'bootstrap.servers': '192.168.137.201:9092'

}

producer = FlinkKafkaProducer(

topic=TEST_KAFKA_TOPIC,

producer_config=properties,

serialization_schema=SimpleStringSchema()

)

data.add_sink(producer)

data.print()

env.execute()

相关推荐
自信的小螺丝钉4 分钟前
【大模型手撕】pytorch实现LayerNorm, RMSNorm
人工智能·pytorch·python·归一化·rmsnorm·layernorm
深耕AI4 分钟前
PyTorch图像预处理:ToTensor()与Normalize()的本质区别
人工智能·pytorch·python
鲸屿19542 分钟前
python之socket网络编程
开发语言·网络·python
里昆1 小时前
【AI】Tensorflow在jupyterlab中运行要注意的问题
人工智能·python·tensorflow
AI视觉网奇1 小时前
pycharm 最新版上一次编辑位置
python
2401_828890641 小时前
使用 BERT 实现意图理解和实体识别
人工智能·python·自然语言处理·bert·transformer
多恩Stone2 小时前
【3DV 进阶-2】Hunyuan3D2.1 训练代码详细理解下-数据读取流程
人工智能·python·算法·3d·aigc
xiaopengbc3 小时前
在 Python 中实现观察者模式的具体步骤是什么?
开发语言·python·观察者模式
Python大数据分析@3 小时前
python用selenium怎么规避检测?
开发语言·python·selenium·网络爬虫