SpringBoot2 集成 ClickHouse 实现高性能数据分析

一 第一种驱动方式

SpringBoo2 集成 Mybatis-plus 以及 ClickHouse 实现增删改查功能。

1.1 pom.xml 依赖

bash 复制代码
        <!--MyBatis Plus 依赖-->
        <dependency>
            <groupId>com.baomidou</groupId>
            <artifactId>mybatis-plus-boot-starter</artifactId>
            <version>3.5.3.1</version>
        </dependency>

        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>druid</artifactId>
            <version>1.1.9</version>
        </dependency>

       <!--clickhouse依赖-->
        <dependency>
            <groupId>ru.yandex.clickhouse</groupId>
            <artifactId>clickhouse-jdbc</artifactId>
            <version>0.3.2</version>
        </dependency>

1.2 properties 配置

bash 复制代码
#mybatis-plus配置
mybatis-plus.mapper-locations=classpath*:mapper/**/*Mapper.xml
mybatis-plus.global-config.db-config.id-type=auto
mybatis-plus.global-config.db-config.logic-delete-value=-1
mybatis-plus.global-config.db-config.logic-not-delete-value=0
mybatis-plus.configuration.auto-mapping-behavior=partial
mybatis-plus.configuration.map-underscore-to-camel-case=true
mybatis-plus.configuration.cache-enabled=false
mybatis-plus.configuration.call-setters-on-nulls=true
mybatis-plus.configuration.jdbc-type-for-null=null

#clickhouse配置
spring.datasource.click.driverClassName=ru.yandex.clickhouse.ClickHouseDriver
spring.datasource.click.url=jdbc:clickhouse://localhost:8123/test
spring.datasource.click.initialSize=10
spring.datasource.click.maxActive=100
spring.datasource.click.minIdle=10
spring.datasource.click.maxWait=6000
spring.datasource.click.password=12345678

1.3 实现代码

1)config

java 复制代码
package com.modules.common.config;

import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.annotation.EnableTransactionManagement;

/**
 * @ClassName MybatisPlusConfig
 * @Description MyBatisPlus配置类
 * @Author li'chao
 * @Date 2023-6-13 14:25
 * @Version 1.0
 **/
@Configuration
@EnableTransactionManagement
@MapperScan({"com.modules.mapper"})
public class MybatisPlusConfig {

    @Bean
    public MybatisPlusInterceptor mybatisPlusInterceptor() {
        MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
        interceptor.addInnerInterceptor(new PaginationInnerInterceptor(DbType.MYSQL));
        return interceptor;
    }
}
java 复制代码
package com.modules.common.config;

import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;


@Component
@ConfigurationProperties(prefix = "spring.datasource.click")
@Data
public class JdbcParamConfig {
    private String driverClassName ;
    private String url ;
    private Integer initialSize ;
    private Integer maxActive ;
    private Integer minIdle ;
    private Integer maxWait ;
    private String password;

    public String getDriverClassName() {
        return driverClassName;
    }

    public void setDriverClassName(String driverClassName) {
        this.driverClassName = driverClassName;
    }

    public String getUrl() {
        return url;
    }

    public void setUrl(String url) {
        this.url = url;
    }

    public Integer getInitialSize() {
        return initialSize;
    }

    public void setInitialSize(Integer initialSize) {
        this.initialSize = initialSize;
    }

    public Integer getMaxActive() {
        return maxActive;
    }

    public void setMaxActive(Integer maxActive) {
        this.maxActive = maxActive;
    }

    public Integer getMinIdle() {
        return minIdle;
    }

    public void setMinIdle(Integer minIdle) {
        this.minIdle = minIdle;
    }

    public Integer getMaxWait() {
        return maxWait;
    }

    public void setMaxWait(Integer maxWait) {
        this.maxWait = maxWait;
    }
}
java 复制代码
package com.modules.common.config;

import com.alibaba.druid.pool.DruidDataSource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import javax.annotation.Resource;
import javax.sql.DataSource;


@Configuration
public class DruidConfig {
    @Resource
    private JdbcParamConfig jdbcParamConfig ;
    @Bean
    public DataSource dataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(jdbcParamConfig.getUrl());
        datasource.setDriverClassName(jdbcParamConfig.getDriverClassName());
        datasource.setInitialSize(jdbcParamConfig.getInitialSize());
        datasource.setMinIdle(jdbcParamConfig.getMinIdle());
        datasource.setMaxActive(jdbcParamConfig.getMaxActive());
        datasource.setMaxWait(jdbcParamConfig.getMaxWait());
        datasource.setPassword(jdbcParamConfig.getPassword());
        return datasource;
    }
}

2)xml

XML 复制代码
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="com.modules.mapper.LinkInfoMapper">

    <select id="countByAll" resultType="string">
        select count() from t_link_info
    </select>

    <select id="sumByAll" resultType="string">
        select count() from t_link_info
    </select>

    <select id="sumByAll" resultType="string">
        select count() from t_link_info
    </select>

    <select id="selectList10" resultType="map">
        select * from t_link_info limit 10
    </select>

</mapper>

3)dao

java 复制代码
package com.modules.mapper;

import java.util.List;
import java.util.Map;


public interface LinkInfoMapper {

    public String countByAll();

}

4)service

java 复制代码
package com.modules.service;

import com.modules.mapper.LinkInfoMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

/**
 * @ClassName LinkInfoService
 * @Description TODO
 * @Author li'chao
 * @Date 2023-9-29 20:30
 * @Version 1.0
 **/
@Service
public class LinkInfoService {

    @Autowired
    private LinkInfoMapper linkInfoMapper;

    public String countByAll(){
        return linkInfoMapper.countByAll();
    }
}

5)controller

java 复制代码
package com.modules.controller;

import com.modules.common.web.BaseController;
import com.modules.common.web.Result;
import com.modules.entity.LinkInfo;
import com.modules.mapper.IGitHubEventsMapper;
import com.modules.service.LinkInfoService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.List;
import java.util.Map;

@Slf4j
@Api(tags = "ck管理")
@RestController
@RequestMapping("/clickhouse")
public class LinkInfoController extends BaseController {

    @Autowired
    private LinkInfoService linkInfoService;

    @ApiOperation(value = "求count", notes = "求count")
    @GetMapping("/countByAll")
    public Result countByAll() {
        Long startTime = System.currentTimeMillis();
        String count = linkInfoService.countByAll();
        Long endTime = System.currentTimeMillis();
        Long tempTime = (endTime - startTime);
        String str = "共:" + count + "条数据,用时:" +
                (((tempTime/86400000)>0)?((tempTime/86400000)+"d"):"")+
                ((((tempTime/86400000)>0)||((tempTime%86400000/3600000)>0))?((tempTime%86400000/3600000)+"h"):(""))+
                ((((tempTime/3600000)>0)||((tempTime%3600000/60000)>0))?((tempTime%3600000/60000)+"m"):(""))+
                ((((tempTime/60000)>0)||((tempTime%60000/1000)>0))?((tempTime%60000/1000)+"s"):(""))+
                ((tempTime%1000)+"ms");
        return success(str);
    }
}

二 第二种驱动方式

第一种方式适合数据量不大的情况,尤其是插入时也是数据量比较少的情况下,第二种主要是拉取kafak数据大批量的插入使用推荐。SpringBoo2 集成 Kafka 以及 ClickHouse 实现大批量插入。

2.1 pom.xml 依赖

bash 复制代码
        <!-- kafka -->
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>

        <!--clickhouse-jdbc-->
        <dependency>
            <groupId>com.clickhouse</groupId>
            <artifactId>clickhouse-jdbc</artifactId>
            <version>0.5.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.httpcomponents.client5</groupId>
            <artifactId>httpclient5</artifactId>
            <version>5.2.3</version>
        </dependency>

2.2 properties 配置

bash 复制代码
server:
  port: 9877
  servlet:
    context-path: /ck
  #启用优雅关机
  shutdown: graceful
spring:
  #缓冲10秒
  lifecycle:
    timeout-per-shutdown-phase: 10s
  #clickhouse配置
  clickhouse:
    username: default
    url: jdbc:clickhouse://localhost:8123/default
    password: 12345678
    session-timeout: 0
    socket-timeout: 600000
  #设置时间
  jackson:
    #全局格式化日期
    date-format: yyyy-MM-dd HH:mm:ss
    #设置时区
    time-zone: GMT+8
  #kafka配置
  kafka:
    #kafka地址
    bootstrap-servers: localhost:9092
    #消费者配置
    consumer:
      #从最早开始消费earliest/ latest
      auto-offset-reset: earliest
      #如果为true,则消费者的偏移量将会交给kafka在后台定期提交,默认值为true,false是给spring提交
      enable-auto-commit: false
      #消费者组ID
      group-id: group-id-lc
      #批量一次最大拉取数量
      max-poll-records: 1500
      #topic名称
      topic-name: screen_link
      #序列化
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      #session超时时间
      session:
        timeout: 6000
    listener:
      #在侦听器容器中运行的线程数
      concurrency: 5
      #开启批量监听
      type: batch

2.3 代码实现

1)config

java 复制代码
package com.modules.common.config;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties.AckMode;

import java.util.HashMap;
import java.util.Map;

/**
 * kafka配置类
 */
@Configuration
@EnableKafka
public class KafkaConsumerConfig {

	@Value("${spring.kafka.bootstrap-servers}")
	private String server;
	@Value("${spring.kafka.consumer.group-id}")
	private String groupId;
	@Value("${spring.kafka.consumer.max-poll-records}")
	private String maxPollRecords;
	@Value("${spring.kafka.listener.concurrency}")
	private Integer concurrency;
	@Value("${spring.kafka.consumer.enable-auto-commit}")
	private String enableAutoCommit;
	@Value("${spring.kafka.consumer.auto-offset-reset}")
	private String offsetReset;
	@Value("${spring.kafka.consumer.session.timeout}")
	private String sessionTimeout;

	@Bean
    public Map<String, Object> consumerConfigs() {
        Map<String, Object> consumerProps = new HashMap<>();
        //认证配置
//        if (iskerberos.equals("true")) {
        	System.out.println("iskerberos:"+iskerberos);
//        	consumerProps.put("security.protocol","SASL_PLAINTEXT");
//        	consumerProps.put("sasl.kerberos.service.name","kafka");
//            System.setProperty("java.security.auth.login.config",lconfig);
//            System.setProperty("java.security.krb5.conf","/etc/krb5.conf");
//		}
        consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
        consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        consumerProps.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords);
        consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
        consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, offsetReset);
        consumerProps.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
        consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        return consumerProps;
    }

    @Bean
    public ConsumerFactory<String, String> consumerFactory() {
        return new DefaultKafkaConsumerFactory<>(consumerConfigs());
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, String> batchFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory());
        factory.setConcurrency(concurrency);    //并发数
        factory.setBatchListener(true); //开启批量监听
        factory.getContainerProperties().setAckMode(AckMode.MANUAL);
        return factory;
    }
}
java 复制代码
package com.modules.common.config;

import com.clickhouse.jdbc.ClickHouseDataSource;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;

import javax.annotation.PostConstruct;
import java.sql.Connection;
import java.util.Properties;

/**
 * @ClassName ClickHouseConfig
 * @Description ClickHouse连接客户端配置
 * @Author li'chao
 * @Date 2023-12-13 11:26
 * @Version 1.0
 **/
@Log4j2
@Configuration
public class ClickHouseConfig {

    @Value("${spring.clickhouse.url}")
    private String server;

    @Value("${spring.clickhouse.username}")
    private String username;

    @Value("${spring.clickhouse.password}")
    private String password;

    @Value("${spring.clickhouse.socket-timeout}")
    private String socketTimeout;

    @Value("${spring.clickhouse.session-timeout}")
    private String sessionTimeout;

    private static Connection connection;

    /**
     * 启动初始化
     */
    @PostConstruct
    public void init() {
       try {
           log.info("ClickHouse连接客户端配置初始化");
           Properties properties = new Properties();
           // properties.setProperty(ClickHouseClientOption.CUSTOM_SETTINGS.getKey(),
           properties.setProperty("socket_timeout", socketTimeout);   //连接超时时间/毫秒
           properties.setProperty("session_timeout", sessionTimeout);  //会话超时时间/秒,默认0没有限制
           ClickHouseDataSource dataSource = new ClickHouseDataSource(server, properties);
           //用户名和密码
           connection = dataSource.getConnection(username, password);
       }catch (Exception e) {
            log.error("ClickHouse连接客户端配置初始化异常:{}", e);
       }
    }

    /**
     * 获取客户端
     * @return
     */
    public Connection getConnection(){
        return connection;
    }
}

2)kafka

java 复制代码
package com.modules.kafka;

import cn.hutool.json.JSON;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.modules.service.ResolveService;
import lombok.extern.log4j.Log4j2;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.util.List;

/**
 * @ClassName KafkaConsumer
 * @Description 消费监听
 * @Author li'chao
 * @Date 2023-7-13 9:52
 * @Version 1.0
 **/
@Log4j2
@Component
public class KafkaConsumer {

    @Autowired
    private ResolveService resolveService;

    @KafkaListener(id = "${spring.kafka.consumer.group-id}" ,topicPattern = "${spring.kafka.consumer.topic-name}", containerFactory = "batchFactory")
    private void batchMessage(List<ConsumerRecord<String, String>> recordList, Acknowledgment acknowledgment){
//        log.info("本批次消费条数:{}",recordList.size());
        recordList.forEach(str ->{
//            log.info("===========recordList消费:{}", str);
            JSONObject json = JSONUtil.parseObj(str.value());
            resolveService.resolveKafkaJsonData(JSONUtil.parseObj(json.get("Link").toString()));
        });
//        resolveService.resolveKafkaJsonData2(recordList);
        acknowledgment.acknowledge();
    }
}
java 复制代码
package com.modules.kafka;

import cn.hutool.json.JSONObject;
import org.springframework.stereotype.Component;

import java.io.Serializable;
import java.util.concurrent.ConcurrentLinkedQueue;

/**
 * 原子操作类
 */
@Component
public class KafkaConcurrentDO implements Serializable {

	public static ConcurrentLinkedQueue<JSONObject> concurrentList = new ConcurrentLinkedQueue<JSONObject>();

}

3)Scheduled

java 复制代码
package com.modules.scheduled;


import cn.hutool.json.JSONObject;
import com.modules.common.config.ClickHouseConfig;
import com.modules.common.utils.DateUtils;
import com.modules.kafka.KafkaConcurrentDO;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Date;

/**
 * 定时任务
 */
@Log4j2
@Component
@Configuration
@EnableScheduling
public class ScheduledTask {

	@Autowired
	private ClickHouseConfig clickHouseConfig;

	@Async
	@Scheduled(cron = "0/10 * * * * ?")
	public void kafakConsumerStart() {
		try{
			Long startTime = System.currentTimeMillis();
			//SQL字段
			String sql = "insert into my_link select id, create_time, beg_time, com_dur, data_len, data_pkt_num, down_data_len, down_data_pkt_num, down_pay_len, down_pkt_num, dst_addr, dst_mac, dst_port, end_time, ip_ver, pay_len, pkt_num, prot_info, prot_name, prot_num, src_addr, src_mac, src_port, uniqid, up_data_len, up_data_pkt_num, up_pay_len, up_pkt_num, trans_type, tcp_syn_cnt, tcp_rst_cnt, tcp_retry_cnt, pcap_file_name from input('id Int64, create_time DateTime64(6), beg_time Int64, com_dur Int32, data_len Int32, data_pkt_num Int32, down_data_len Int32, down_data_pkt_num Int32, down_pay_len Int32, down_pkt_num Int32, dst_addr String, dst_mac String, dst_port Int32, end_time Int64, ip_ver Int32, pay_len Int32, pkt_num Int32, prot_info String, prot_name String, prot_num Int32, src_addr String, src_mac String, src_port Int32, uniqid String, up_data_len Int32, up_data_pkt_num Int32, up_pay_len Int32, up_pkt_num Int32, trans_type String, tcp_syn_cnt Int32, tcp_rst_cnt Int32, tcp_retry_cnt Int32, pcap_file_name String')";
			//获取连接
			Connection conn = clickHouseConfig.getConnection();
			PreparedStatement ps = conn.prepareStatement(sql);
			log.info("本批次提交量:{}", KafkaConcurrentDO.concurrentList.size());
			//添加数据
			for(JSONObject jsonObject : KafkaConcurrentDO.concurrentList){
				ps.setLong(1, new Date().getTime());
				ps.setObject(2, DateUtils.getTime());
				ps.setLong(3, Long.parseLong(jsonObject.get("begTime").toString()));
				ps.setInt(4, Integer.parseInt(jsonObject.get("comDur").toString()));
				ps.setInt(5, Integer.parseInt(jsonObject.get("dataLen").toString()));
				ps.setInt(6, Integer.parseInt(jsonObject.get("dataPktNum").toString()));
				ps.setInt(7, Integer.parseInt(jsonObject.get("downDataLen").toString()));
				ps.setInt(8, Integer.parseInt(jsonObject.get("downDataPktNum").toString()));
				ps.setInt(9, Integer.parseInt(jsonObject.get("downPayLen").toString()));
				ps.setInt(10, Integer.parseInt(jsonObject.get("downPktNum").toString()));
				ps.setString(11, jsonObject.get("dstAddr").toString());
				ps.setString(12, jsonObject.get("dstMac").toString());
				ps.setInt(13, Integer.parseInt(jsonObject.get("dstPort").toString()));
				ps.setLong(14, Long.parseLong(jsonObject.get("endTime").toString()));
				ps.setInt(15, Integer.parseInt(jsonObject.get("ipVer").toString()));
				ps.setInt(16, Integer.parseInt(jsonObject.get("payLen").toString()));
				ps.setInt(17, Integer.parseInt(jsonObject.get("pktNum").toString()));
				ps.setString(18, jsonObject.get("protInfo").toString());
				ps.setString(19, jsonObject.get("protName").toString());
				ps.setInt(20, Integer.parseInt(jsonObject.get("protNum").toString()));
				ps.setString(21, jsonObject.get("srcAddr").toString());
				ps.setString(22, jsonObject.get("srcMac").toString());
				ps.setInt(23, Integer.parseInt(jsonObject.get("srcPort").toString()));
				ps.setString(24, jsonObject.get("uniqID").toString());
				ps.setInt(25, Integer.parseInt(jsonObject.get("upDataLen").toString()));
				ps.setInt(26, Integer.parseInt(jsonObject.get("upDataPktNum").toString()));
				ps.setInt(27, Integer.parseInt(jsonObject.get("upPayLen").toString()));
				ps.setInt(28, Integer.parseInt(jsonObject.get("upPktNum").toString()));
				ps.setString(29, jsonObject.get("transType").toString());
				ps.setInt(30, jsonObject.get("tcpSynCnt")==null?0:Integer.parseInt(jsonObject.get("tcpSynCnt").toString()));
				ps.setInt(31, jsonObject.get("tcpRstCnt")==null?0:Integer.parseInt(jsonObject.get("tcpRstCnt").toString()));
				ps.setInt(32, jsonObject.get("tcpRetryCnt")==null?0:Integer.parseInt(jsonObject.get("tcpRetryCnt").toString()));
				ps.setString(33, jsonObject.get("pcapFileName").toString());
				ps.addBatch();
			}
			//清除队列数据
			KafkaConcurrentDO.concurrentList.clear();
			//批量提交数据
			int[] count = ps.executeBatch();
			Long endTime = System.currentTimeMillis();
			Long tempTime = (endTime - startTime);
			log.info("本批次提交完成:{}/条,用时:{}/秒", count.length, tempTime / 1000);
		}catch (Exception e){
			log.error("kafakConsumerStart提交失败:{}", e);
		}
	}
}

2.4 其他

选择驱动不同,操作clickhouse方式也不太一样,可以根据官网推荐的实现。

Java Language Client Options for ClickHouse

例子

相关推荐
斯特凡今天也很帅32 分钟前
clickhouse常用语句汇总——持续更新中
数据库·sql·clickhouse
SelectDB技术团队1 天前
从 ClickHouse、Druid、Kylin 到 Doris:网易云音乐 PB 级实时分析平台降本增效
大数据·数据仓库·clickhouse·kylin·实时分析
risc1234566 天前
【ClickHouse】RollingBitmap
clickhouse
斯特凡今天也很帅6 天前
clickhouse如何查看操作记录,从日志来查看写入是否成功
数据库·clickhouse
袖清暮雨11 天前
ClickHouse讲解
大数据·数据库·数据仓库·clickhouse·oracle
江枫渔火L12 天前
使用clickhouse的ReplacingMergeTree引擎表做活跃玩家信息表
数据库·clickhouse
潇凝子潇14 天前
Doris ClickHouse Greenplum 对比
clickhouse·doris·greenplum
递归尽头是星辰16 天前
ClickHouse核心优势分析与场景实战
大数据·数据仓库·clickhouse·实时分析·实时查询
鲁尼的小宝贝17 天前
基于Flink的数据中台管理平台
java·大数据·clickhouse·flink·yarn
问道飞鱼22 天前
【大数据知识】今天聊聊Clickhouse部署方案
大数据·clickhouse·部署