python sink to kafka

from pyflink.datastream import StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

import json

import re

import logging

import sys

from pyflink.datastream.state import ValueStateDescriptor, MapStateDescriptor

from pyflink.datastream.connectors.kafka import FlinkKafkaConsumer, TypeInformation,FlinkKafkaProducer

from pyflink.common.typeinfo import Types

from pyflink.datastream.connectors.elasticsearch import Elasticsearch7SinkBuilder, ElasticsearchEmitter, FlushBackoffType

from pyflink.datastream.connectors import DeliveryGuarantee

from pyflink.common.serialization import SimpleStringSchema

from datetime import datetime

logging.basicConfig(stream=sys.stdout, level=logging.INFO, format="%(asctime)s-%(levelname)s-%(message)s")

logger = logging.getLogger(name)

创建 StreamExecutionEnvironment 对象

env = StreamExecutionEnvironment.get_execution_environment()

env.set_parallelism(1)

env.add_jars("file:///root/flink-sql-connector-kafka_2.11-1.14.4.jar")

from pyflink.datastream import DataStream, StreamExecutionEnvironment

from pyflink.datastream.functions import RuntimeContext, FlatMapFunction, MapFunction

from pyflink.common.typeinfo import Types

env = StreamExecutionEnvironment.get_execution_environment()

data = DataStream(env._j_stream_execution_environment.socketTextStream('192.168.137.201', 8899))

TEST_KAFKA_SERVERS = "192.168.137.201:9092"

TEST_KAFKA_TOPIC = "test_topic_elink"

TEST_GROUP_ID = "pyflink_elink_midsys"

def get_kafka_customer_properties(kafka_servers: str, group_id: str):

properties = {

"bootstrap.servers": kafka_servers,

"fetch.max.bytes": "67108864",

"key.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"value.deserializer": "org.apache.kafka.common.serialization.StringDeserializer",

"enable.auto.commit": "false", # 关闭kafka 自动提交,此处不能传bool 类型会报错

"group.id": group_id,

}

return properties

properties = get_kafka_customer_properties(TEST_KAFKA_SERVERS, TEST_GROUP_ID)

producer_properties = {

'bootstrap.servers': '192.168.137.201:9092'

}

producer = FlinkKafkaProducer(

topic=TEST_KAFKA_TOPIC,

producer_config=properties,

serialization_schema=SimpleStringSchema()

)

data.add_sink(producer)

data.print()

env.execute()

相关推荐
凤头百灵鸟11 分钟前
Python语法基础篇(包含类型转换、拷贝、可变对象/不可变对象,函数,拆包,异常,模块,闭包,装饰器)
python
多多*1 小时前
LUA+Reids实现库存秒杀预扣减 记录流水 以及自己的思考
linux·开发语言·redis·python·bootstrap·lua
何双新2 小时前
第21讲、Odoo 18 配置机制详解
linux·python·开源
Wish3D2 小时前
阿里云OSS 上传文件 Python版本
开发语言·python·阿里云
阿福不是狗4 小时前
Python使用总结之Mac安装docker并配置wechaty
python·macos·docker
gen_5 小时前
mac环境下的python、pycharm和pip安装使用
python·macos·pycharm
AI视觉网奇5 小时前
pycharm 左右箭头 最近编辑
ide·python·pycharm
思绪无限5 小时前
Pycharm的终端无法使用Anaconda命令行问题详细解决教程
ide·python·pycharm·终端·命令行·anaconda·问题教程
漫步云端-r5 小时前
Pycharm的使用技巧总结
ide·python·pycharm
风逸hhh6 小时前
python打卡day46@浙大疏锦行
开发语言·python