nacos2.5.1版本基于docker的自定义部署(适配人大金仓)

nacos2.5.1版本基于docker的自定义部署(适配人大金仓)

因项目信创需求,要将nacos转换为人大金仓数据库进行生产环境部署,本文记录操作过程

适配人大金仓数据库

  1. 下载源码

    GitHub下载地址

    Gitee下载地址

国内下载速度慢,可选择gitee下载,版本为2.5.1,本地切换为2.5.1分支,下载maven依赖包

  1. 本地启动

    项目启动在console模块,数据源操作在plugin在datasource模块,打包在distribution模块

    尝试点击启动

    报错找不到符号com.alibaba.nacos.consistency.entity

    解决:安装idea插件protobuf,此插件会自动编译类文件到target,或者直接编译也行。

  2. 数据库脚本

nacos可以选择内嵌数据库H2,也可以使用外源数据库mysql,但是咱们这次需要创建人大金仓数据库,所以使用官方的mysql语句进行替换。官方的mysql语句可以直接从源码distribution-conf中取,还有对应的derby数据库语句。替换后的数据库语句如下:

sql 复制代码
SET CLIENT_ENCODING TO 'UTF8';

-- Kingbase 不支持 FOREIGN_KEY_CHECKS,可忽略或注释

-- SET FOREIGN_KEY_CHECKS = 0; -- 不支持,无需设置

-- ----------------------------

-- Table structure for config_info

-- ----------------------------

-- 删除已存在的表(如果存在)

DROP TABLE IF EXISTS config_info;

-- 创建 config_info 表

CREATE TABLE config_info (

id BIGSERIAL NOT NULL PRIMARY KEY,

data_id VARCHAR(255) NOT NULL,

group_id VARCHAR(128),

content TEXT NOT NULL,

md5 VARCHAR(32),

gmt_create TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

gmt_modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

src_user TEXT,

src_ip VARCHAR(50),

app_name VARCHAR(128),

tenant_id VARCHAR(128) DEFAULT '',

c_desc VARCHAR(256),

c_use VARCHAR(64),

effect VARCHAR(64),

type VARCHAR(64),

c_schema TEXT,

encrypted_data_key VARCHAR(1024) NOT NULL DEFAULT ''

);

-- 创建唯一索引 uk_configinfo_datagrouptenant

CREATE UNIQUE INDEX idx_configinfo_datagrouptenant ON config_info (data_id, group_id, tenant_id);

-- ----------------------------

-- Records of config_info

-- ----------------------------

INSERT INTO `config_info` VALUES (1, 'gateway.yaml', 'DEFAULT_GROUP', 'server:\n port: 6666\nspring:\n application:\n name: gateway-service\n cloud:\n gateway:\n default-filters:\n - name: Retry\n args:\n retries: 3\n methods: GET\n series: SERVER_ERROR\n exceptions: java.io.IOException, java.util.concurrent.TimeoutException\n backoff:\n firstBackoff: 5000ms\n maxBackoff: 5000ms\n factor: 2\n basedOnPreviousValue: false\n routes:\n - id: auth-service\n uri: lb://auth-service\n predicates:\n - Path=/auth/**\n - id: hrm-service\n uri: lb://hrm\n predicates:\n - Path=/hrm/**\n - id: user-service\n uri: lb://user-service\n predicates:\n - Path=/system/**\n - id: oa-service\n uri: lb://oa-service\n predicates:\n - Path=/oa/**\n - id: hrm-hou\n uri: lb://hrm-hou\n predicates:\n - Path=/hrm-hou/**\n - id: hrm-shi\n uri: lb://hrm-shi\n predicates:\n - Path=/hrm-shi/**\n nacos:\n discovery:\n server-addr: 172.18.6.3:8848,172.18.6.4:8848,172.18.6.5:8848\n prot: 6666\n ip: ${HOST_IP}\n data:\n redis:\n host: 172.18.6.5\n port: 6379\n password: abc!123\n database: 12\n connect-timeout: 2s\n timeout: 1s', '44fba84c3f95c4d1b1432bf60eef4af6', '2025-04-11 09:03:56', '2025-04-23 13:51:58', NULL, '10.21.224.22', 'gateway-service', '', '', '', '', 'yaml', '', '');

-- ----------------------------

-- Table structure for config_info_gray

-- ----------------------------

-- 删除已存在的表(如果存在)

DROP TABLE IF EXISTS config_info_gray;


-- 创建 config_info_gray 表

CREATE TABLE config_info_gray (

id BIGSERIAL NOT NULL PRIMARY KEY,

data_id VARCHAR(255) NOT NULL,

group_id VARCHAR(128) NOT NULL,

content TEXT NOT NULL,

md5 VARCHAR(32),

src_user TEXT,

src_ip VARCHAR(100),

gmt_create TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

gmt_modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

app_name VARCHAR(128),

tenant_id VARCHAR(128) DEFAULT '',

gray_name VARCHAR(128) NOT NULL,

gray_rule TEXT NOT NULL,

encrypted_data_key VARCHAR(256) NOT NULL DEFAULT ''

);

-- 创建唯一索引 uk_configinfogray_datagrouptenantgray

CREATE UNIQUE INDEX idx_configinfogray_datagrouptenantgray

ON config_info_gray (data_id, group_id, tenant_id, gray_name);

-- 创建索引 idx_dataid_gmt_modified

CREATE INDEX idx_dataid_gmt_modified

ON config_info_gray (data_id, gmt_modified);

-- 创建索引 idx_gmt_modified

CREATE INDEX idx_gmt_modified

ON config_info_gray (gmt_modified);

-- ----------------------------

-- Table structure for config_tags_relation

-- ----------------------------

-- config_tags_relation

DROP TABLE IF EXISTS config_tags_relation;

CREATE TABLE config_tags_relation (

id BIGINT NOT NULL,

tag_name VARCHAR(128) NOT NULL,

tag_type VARCHAR(64),

data_id VARCHAR(255) NOT NULL,

group_id VARCHAR(128) NOT NULL,

tenant_id VARCHAR(128) DEFAULT '',

nid BIGSERIAL NOT NULL PRIMARY KEY

);

-- 创建唯一索引

CREATE UNIQUE INDEX idx_configtagrelation_configidtag ON config_tags_relation (id, tag_name, tag_type);

-- 创建普通索引

CREATE INDEX idx_tenant_id ON config_tags_relation (tenant_id);

-- group_capacity

DROP TABLE IF EXISTS group_capacity;

CREATE TABLE group_capacity (

id BIGSERIAL NOT NULL PRIMARY KEY,

group_id VARCHAR(128) NOT NULL DEFAULT '',

quota INT DEFAULT 0,

usage INT DEFAULT 0,

max_size INT DEFAULT 0,

max_aggr_count INT DEFAULT 0,

max_aggr_size INT DEFAULT 0,

max_history_count INT DEFAULT 0,

gmt_create TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

gmt_modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP

);

-- 插入初始数据

INSERT INTO group_capacity VALUES (1, '', 0, 1, 0, 0, 0, 0, '2025-04-11 09:03:56', '2025-05-27 15:53:04');

INSERT INTO group_capacity VALUES (2, 'DEFAULT_GROUP', 0, 1, 0, 0, 0, 0, '2025-04-11 09:03:56', '2025-05-27 15:53:04');

-- his_config_info

DROP TABLE IF EXISTS his_config_info;

CREATE TABLE his_config_info (

id BIGINT NOT NULL,

nid BIGSERIAL NOT NULL PRIMARY KEY,

data_id VARCHAR(255) NOT NULL,

group_id VARCHAR(128) NOT NULL,

app_name VARCHAR(128),

content TEXT NOT NULL,

md5 VARCHAR(32),

gmt_create TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

gmt_modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

src_user TEXT,

src_ip VARCHAR(50),

op_type VARCHAR(10),

tenant_id VARCHAR(128) DEFAULT '',

encrypted_data_key VARCHAR(1024) NOT NULL DEFAULT '',

publish_type VARCHAR(50) DEFAULT 'formal',

gray_name VARCHAR(50),

ext_info TEXT

);

-- 索引

CREATE INDEX idx_gmt_create ON his_config_info (gmt_create);

CREATE INDEX idx_gmt_modified ON his_config_info (gmt_modified);

CREATE INDEX idx_did ON his_config_info (data_id);

-- permissions

DROP TABLE IF EXISTS permissions;

CREATE TABLE permissions (

role VARCHAR(50) NOT NULL,

resource VARCHAR(128) NOT NULL,

action VARCHAR(8) NOT NULL

);

-- 唯一索引

CREATE UNIQUE INDEX uk_role_permission ON permissions (role, resource, action);

-- roles

DROP TABLE IF EXISTS roles;

CREATE TABLE roles (

username VARCHAR(50) NOT NULL,

role VARCHAR(50) NOT NULL

);

-- 唯一索引

CREATE UNIQUE INDEX idx_user_role ON roles (username, role);

-- tenant_capacity

DROP TABLE IF EXISTS tenant_capacity;

CREATE TABLE tenant_capacity (

id BIGSERIAL NOT NULL PRIMARY KEY,

tenant_id VARCHAR(128) NOT NULL DEFAULT '',

quota INT DEFAULT 0,

usage INT DEFAULT 0,

max_size INT DEFAULT 0,

max_aggr_count INT DEFAULT 0,

max_aggr_size INT DEFAULT 0,

max_history_count INT DEFAULT 0,

gmt_create TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

gmt_modified TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP

);

-- 唯一索引

CREATE UNIQUE INDEX uk_tenant_id ON tenant_capacity (tenant_id);

-- tenant_info

DROP TABLE IF EXISTS tenant_info;

CREATE TABLE tenant_info (

id BIGSERIAL NOT NULL PRIMARY KEY,

kp VARCHAR(128) NOT NULL,

tenant_id VARCHAR(128) DEFAULT '',

tenant_name VARCHAR(128) DEFAULT '',

tenant_desc VARCHAR(256),

create_source VARCHAR(32),

gmt_create BIGINT NOT NULL,

gmt_modified BIGINT NOT NULL

);

-- 唯一索引 + 普通索引

CREATE UNIQUE INDEX uk_tenant_info_kptenantid ON tenant_info (kp, tenant_id);

CREATE INDEX idx_tenant_id ON tenant_info (tenant_id);

-- users

DROP TABLE IF EXISTS users;

CREATE TABLE users (

username VARCHAR(50) NOT NULL PRIMARY KEY,

password VARCHAR(500) NOT NULL,

enabled SMALLINT NOT NULL

);

建好数据库后,咱们可以单独给nacos库建一个用户管理nacos库,然后授予权限处理

sql 复制代码
CREATE user nacos WITH LOGIN PASSWORD 'nacos'; --创建用户

ALTER USER nacos LOGIN; --授权登录

GRANT USAGE ON SCHEMA nacos TO nacos;

ALTER USER nacos SET search_path TO nacos;

-- 授予当前模式下所有表的权限

GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA nacos TO nacos;

-- 授予后续新建表的默认权限(可选)

    ALTER DEFAULT PRIVILEGES IN SCHEMA nacos GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO nacos;

这样数据库就建好了,应用端也可以使用建好的naco用户来登录操作数据库。

  1. 修改源码

    更改完数据库之后,对应的nacos操作数据的实现类也都需要新增和改正,这样才能适配人大金仓数据库的查询。

    • 添加依赖
    xml 复制代码
    <!-- 版本控制-->
    <kingbase-connector-java.version>8.6.0</kingbase-connector-java.version>
    
    <!-- 人大金仓数据库驱动-->
    <dependency>
        <groupId>cn.com.kingbase</groupId>
        <artifactId>kingbase8</artifactId>
        <version>${kingbase-connector-java.version}</version>
    </dependency>
    • 更改数据库常量类
    java 复制代码
        package com.alibaba.nacos.plugin.datasource.constants;
    
        /**
         * The data source name.
         *
         * @author hyx
        **/
    
        public class DataSourceConstant {
    
        public static final String MYSQL = "mysql";
    
        public static final String KINGBASE = "kingbase";
    
        public static final String DERBY = "derby";
        
        }
    • 创建数据枚举类
    js 复制代码
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.enums.kingbase;
    
    import java.util.HashMap;
    import java.util.Map;
    
    /**
     * The kingbase implementation of ConfigInfoMapper.
     *
     * @author nacos team
     */
    public enum TrustedKingbaseFunctionEnum {
    
        /**
         * NOW().
         */
        NOW("NOW", "CURRENT_TIMESTAMP"),
    
        /**
         * LENGTH().
         */
        LENGTH("LENGTH", "LENGTH");
    
        private static final Map<String, String> FUNCTION_MAP = new HashMap<>();
    
        static {
            for (TrustedKingbaseFunctionEnum function : TrustedKingbaseFunctionEnum.values()) {
                FUNCTION_MAP.put(function.functionName, function.expression);
            }
        }
    
        private final String functionName;
        private final String expression;
    
        TrustedKingbaseFunctionEnum(String functionName, String expression) {
            this.functionName = functionName;
            this.expression = expression;
        }
    
        public static String getFunctionByName(String name) {
            return FUNCTION_MAP.getOrDefault(name, null);
        }
    }
    • 创建 AbstractMapperByKingbase
    scala 复制代码
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.plugin.datasource.enums.kingbase.TrustedKingbaseFunctionEnum;
    import com.alibaba.nacos.plugin.datasource.mapper.AbstractMapper;
    
    /**
     * 抽象类,提供 Kingbase 数据库相关的通用方法.
     *
     * @author nacos team
     */
    public abstract class AbstractMapperByKingbase extends AbstractMapper {
    
        @Override
        public String getFunction(String functionName) {
            return TrustedKingbaseFunctionEnum.getFunctionByName(functionName);
        }
    }
    • 修改 ExternalDataSourceProperties的JDBC驱动为人大金仓数据库驱动
    java 复制代码
    package com.alibaba.nacos.persistence.datasource;
    
    import com.alibaba.nacos.common.utils.CollectionUtils;
    import com.alibaba.nacos.common.utils.Preconditions;
    import com.alibaba.nacos.common.utils.StringUtils;
    import com.zaxxer.hikari.HikariDataSource;
    import org.springframework.boot.context.properties.bind.Bindable;
    import org.springframework.boot.context.properties.bind.Binder;
    import org.springframework.core.env.Environment;
    
    import java.util.ArrayList;
    import java.util.List;
    import java.util.Objects;
    
    import static com.alibaba.nacos.common.utils.CollectionUtils.getOrDefault;
    
    /**
     * Properties of external DataSource.
     *
     * @author Nacos
     */
    public class ExternalDataSourceProperties {
    
        //private static final String JDBC_DRIVER_NAME = "com.mysql.cj.jdbc.Driver";
    
        private static final String JDBC_DRIVER_NAME = "com.kingbase8.Driver";
    
        private static final String TEST_QUERY = "SELECT 1";
    
        private Integer num;
    
        private List<String> url = new ArrayList<>();
    
        private List<String> user = new ArrayList<>();
    
        private List<String> password = new ArrayList<>();
    
        public void setNum(Integer num) {
            this.num = num;
        }
    
        public void setUrl(List<String> url) {
            this.url = url;
        }
    
        public void setUser(List<String> user) {
            this.user = user;
        }
    
        public void setPassword(List<String> password) {
            this.password = password;
        }
    
    
        /**
         * Build serveral HikariDataSource.
         *
         * @param environment {@link Environment}
         * @param callback    Callback function when constructing data source
         * @return List of {@link HikariDataSource}
         */
        List<HikariDataSource> build(Environment environment, Callback<HikariDataSource> callback) {
            List<HikariDataSource> dataSources = new ArrayList<>();
            Binder.get(environment).bind("db", Bindable.ofInstance(this));
            Preconditions.checkArgument(Objects.nonNull(num), "db.num is null");
            Preconditions.checkArgument(CollectionUtils.isNotEmpty(user), "db.user or db.user.[index] is null");
            Preconditions.checkArgument(CollectionUtils.isNotEmpty(password), "db.password or db.password.[index] is null");
            for (int index = 0; index < num; index++) {
                int currentSize = index + 1;
                Preconditions.checkArgument(url.size() >= currentSize, "db.url.%s is null", index);
                DataSourcePoolProperties poolProperties = DataSourcePoolProperties.build(environment);
                if (StringUtils.isEmpty(poolProperties.getDataSource().getDriverClassName())) {
                    poolProperties.setDriverClassName(JDBC_DRIVER_NAME);
                }
                poolProperties.setJdbcUrl(url.get(index).trim());
                poolProperties.setUsername(getOrDefault(user, index, user.get(0)).trim());
                poolProperties.setPassword(getOrDefault(password, index, password.get(0)).trim());
                HikariDataSource ds = poolProperties.getDataSource();
                if (StringUtils.isEmpty(ds.getConnectionTestQuery())) {
                    ds.setConnectionTestQuery(TEST_QUERY);
                }
    
                dataSources.add(ds);
                callback.accept(ds);
            }
            Preconditions.checkArgument(CollectionUtils.isNotEmpty(dataSources), "no datasource available");
            return dataSources;
        }
    
        /**
         * Perform custom logic.
         *
         * @param <D> Type of data source
         */
        interface Callback<D> {
    
            /**
             * Accept the built data source and perform custom operations.
             *
             * @param datasource The data source instance
             */
            void accept(D datasource);
        }
    
    }
    • 增加ExternalDataSourceServiceImpl数据源获取
    scss 复制代码
    package com.alibaba.nacos.persistence.datasource;
    
    @Override
    public void init() {
        queryTimeout = ConvertUtils.toInt(System.getProperty("QUERYTIMEOUT"), 3);
        jt = new JdbcTemplate();
        // Set the maximum number of records to prevent memory expansion
        jt.setMaxRows(50000);
        jt.setQueryTimeout(queryTimeout);
    
        testMasterJT = new JdbcTemplate();
        testMasterJT.setQueryTimeout(queryTimeout);
    
        testMasterWritableJT = new JdbcTemplate();
        // Prevent the login interface from being too long because the main library is not available
        testMasterWritableJT.setQueryTimeout(1);
    
        //  Database health check
    
        testJtList = new ArrayList<>();
        isHealthList = new ArrayList<>();
    
        tm = new DataSourceTransactionManager();
        tjt = new TransactionTemplate(tm);
    
        // Transaction timeout needs to be distinguished from ordinary operations.
        tjt.setTimeout(TRANSACTION_QUERY_TIMEOUT);
    
        dataSourceType = DatasourcePlatformUtil.getDatasourcePlatform(defaultDataSourceType);
    
        if (DatasourceConfiguration.isUseExternalDB()) {
            try {
                reload();
            } catch (IOException e) {
                LOGGER.error("[ExternalDataSourceService] datasource reload error", e);
                throw new RuntimeException(DB_LOAD_ERROR_MSG, e);
            }
    
            if (this.dataSourceList.size() > DB_MASTER_SELECT_THRESHOLD) {
                PersistenceExecutor.scheduleTask(new SelectMasterTask(), 10, 10, TimeUnit.SECONDS);
            }
            PersistenceExecutor.scheduleTask(new CheckDbHealthTask(), 10, 10, TimeUnit.SECONDS);
        }
        // 增加驱动获取
        if (Objects.isNull(jt.getDataSource())) {
            jt.setDataSource(dataSourceList.get(0));
        }
    }
    • impl层增加人大金仓支持
    AbstractMapperByKingbase
    scala 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.plugin.datasource.enums.kingbase.TrustedKingbaseFunctionEnum;
    import com.alibaba.nacos.plugin.datasource.mapper.AbstractMapper;
    
    /**
     * 抽象类,提供 Kingbase 数据库相关的通用方法.
     *
     * @author nacos team
     */
    public abstract class AbstractMapperByKingbase extends AbstractMapper {
    
        @Override
        public String getFunction(String functionName) {
            return TrustedKingbaseFunctionEnum.getFunctionByName(functionName);
        }
    }
    ConfigInfoBetaMapperByKingbase
    java 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.ConfigInfoBetaMapper;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.util.ArrayList;
    import java.util.List;
    
    /**
     * The kingbase implementation of ConfigInfoBetaMapper.
     *
     * @author nacos team
     */
    public class ConfigInfoBetaMapperByKingbase extends AbstractMapperByKingbase implements ConfigInfoBetaMapper {
    
        @Override
        public MapperResult findAllConfigInfoBetaForDumpAllFetchRows(MapperContext context) {
            int startRow = context.getStartRow();
            int pageSize = context.getPageSize();
    
            // Kingbase 使用 LIMIT offset, size 的分页语法,与 MySQL 兼容
            String sql = " SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5,gmt_modified,beta_ips,encrypted_data_key "
                    + " FROM ( SELECT id FROM config_info_beta ORDER BY id LIMIT ? OFFSET ? ) g, config_info_beta t WHERE g.id = t.id ";
    
            List<Object> paramList = new ArrayList<>();
            paramList.add(pageSize);
            paramList.add(startRow);
    
            return new MapperResult(sql, paramList);
        }
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    }
    ConfigInfoGrayMapperByKingbase
    java 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.ConfigInfoGrayMapper;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.util.Arrays;
    
    /**
     * The kingbase implementation of ConfigInfoGrayMapper.
     *
     * @author nacos team
     */
    public class ConfigInfoGrayMapperByKingbase extends AbstractMapperByKingbase implements ConfigInfoGrayMapper {
    
        @Override
        public MapperResult findAllConfigInfoGrayForDumpAllFetchRows(MapperContext context) {
            int startRow = context.getStartRow();
            int pageSize = context.getPageSize();
    
            // 使用标准 LIMIT ? OFFSET ? 分页语法,适配 Kingbase
            String sql = " SELECT id,data_id,group_id,tenant_id,gray_name,gray_rule,app_name,content,md5,gmt_modified "
                    + " FROM config_info_gray ORDER BY id LIMIT ? OFFSET ? ";
    
            return new MapperResult(sql, Arrays.asList(pageSize, startRow));
        }
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    }
    ConfigInfoMapperByKingbase
    scss 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.common.utils.ArrayUtils;
    import com.alibaba.nacos.common.utils.CollectionUtils;
    import com.alibaba.nacos.common.utils.NamespaceUtil;
    import com.alibaba.nacos.common.utils.StringUtils;
    import com.alibaba.nacos.plugin.datasource.constants.ContextConstant;
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.constants.FieldConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.ConfigInfoMapper;
    import com.alibaba.nacos.plugin.datasource.mapper.ext.WhereBuilder;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.sql.Timestamp;
    import java.util.ArrayList;
    import java.util.List;
    
    /**
     * The kingbase implementation of ConfigInfoMapper.
     *
     * @author nacos team
     */
    public class ConfigInfoMapperByKingbase extends AbstractMapperByKingbase implements ConfigInfoMapper {
    
        private static final String DATA_ID = "dataId";
    
        private static final String GROUP = "group";
    
        private static final String APP_NAME = "appName";
    
        private static final String CONTENT = "content";
    
        private static final String TENANT = "tenant";
    
        @Override
        public MapperResult findConfigInfoByAppFetchRows(MapperContext context) {
            final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME);
            final String tenantId = (String) context.getWhereParameter(FieldConstant.TENANT_ID);
            String sql = "SELECT id,data_id,group_id,tenant_id,app_name,content FROM config_info"
                    + " WHERE tenant_id LIKE ? AND app_name = ? LIMIT ? OFFSET ?";
            List<Object> params = CollectionUtils.list(tenantId, appName, context.getPageSize(), context.getStartRow());
            return new MapperResult(sql, params);
        }
    
        @Override
        public MapperResult getTenantIdList(MapperContext context) {
            String sql = "SELECT tenant_id FROM config_info WHERE tenant_id != '" + NamespaceUtil.getNamespaceDefaultId()
                    + "' GROUP BY tenant_id LIMIT ? OFFSET ?";
            return new MapperResult(sql, CollectionUtils.list(context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public MapperResult getGroupIdList(MapperContext context) {
            String sql = "SELECT group_id FROM config_info WHERE tenant_id ='" + NamespaceUtil.getNamespaceDefaultId()
                    + "' GROUP BY group_id LIMIT ? OFFSET ?";
            return new MapperResult(sql, CollectionUtils.list(context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public MapperResult findAllConfigKey(MapperContext context) {
            String sql = " SELECT data_id,group_id,app_name FROM ( "
                    + " SELECT id FROM config_info WHERE tenant_id LIKE ? ORDER BY id LIMIT ? OFFSET ? ) g, config_info t WHERE g.id = t.id ";
            return new MapperResult(sql,
                    CollectionUtils.list(context.getWhereParameter(FieldConstant.TENANT_ID), context.getPageSize(),
                            context.getStartRow()));
        }
    
        @Override
        public MapperResult findAllConfigInfoBaseFetchRows(MapperContext context) {
            String sql = "SELECT t.id,data_id,group_id,content,md5 FROM ( SELECT id FROM config_info ORDER BY id LIMIT ? OFFSET ? ) "
                    + "g, config_info t WHERE g.id = t.id";
            return new MapperResult(sql, CollectionUtils.list(context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public MapperResult findAllConfigInfoFragment(MapperContext context) {
            boolean needContent = Boolean.parseBoolean(
                    (String) context.getContextParameter(ContextConstant.NEED_CONTENT));
            String sql = "SELECT id,data_id,group_id,tenant_id,app_name," + (needContent ? "content," : "")
                    + "md5,gmt_modified,type,encrypted_data_key FROM config_info WHERE id > ? ORDER BY id ASC LIMIT ? OFFSET ?";
            return new MapperResult(sql, CollectionUtils.list(context.getWhereParameter(FieldConstant.ID),
                    context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public MapperResult findChangeConfigFetchRows(MapperContext context) {
            final String tenant = (String) context.getWhereParameter(FieldConstant.TENANT_ID);
            final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID);
            final String group = (String) context.getWhereParameter(FieldConstant.GROUP_ID);
            final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME);
            final Timestamp startTime = (Timestamp) context.getWhereParameter(FieldConstant.START_TIME);
            final Timestamp endTime = (Timestamp) context.getWhereParameter(FieldConstant.END_TIME);
    
            List<Object> paramList = new ArrayList<>();
            StringBuilder where = new StringBuilder(" WHERE 1=1 ");
            if (!StringUtils.isBlank(dataId)) {
                where.append(" AND data_id LIKE ? ");
                paramList.add(dataId);
            }
            if (!StringUtils.isBlank(group)) {
                where.append(" AND group_id LIKE ? ");
                paramList.add(group);
            }
            if (!StringUtils.isBlank(tenant)) {
                where.append(" AND tenant_id = ? ");
                paramList.add(tenant);
            }
            if (!StringUtils.isBlank(appName)) {
                where.append(" AND app_name = ? ");
                paramList.add(appName);
            }
            if (startTime != null) {
                where.append(" AND gmt_modified >= ? ");
                paramList.add(startTime);
            }
            if (endTime != null) {
                where.append(" AND gmt_modified <= ? ");
                paramList.add(endTime);
            }
    
            String lastMaxId = (String) context.getWhereParameter(FieldConstant.LAST_MAX_ID);
            paramList.add(lastMaxId);
            paramList.add(context.getPageSize());
            paramList.add(context.getStartRow());
            String sql = "SELECT id,data_id,group_id,tenant_id,app_name,type,md5,gmt_modified FROM config_info"
                    + where.toString() + " AND id > ? ORDER BY id ASC LIMIT ? OFFSET ?";
            return new MapperResult(sql, paramList);
        }
    
        @Override
        public MapperResult listGroupKeyMd5ByPageFetchRows(MapperContext context) {
            String sql = "SELECT t.id,data_id,group_id,tenant_id,app_name,md5,type,gmt_modified,encrypted_data_key FROM "
                    + "( SELECT id FROM config_info ORDER BY id LIMIT ? OFFSET ? ) g, config_info t WHERE g.id = t.id";
            return new MapperResult(sql, CollectionUtils.list(context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public MapperResult findConfigInfoBaseLikeFetchRows(MapperContext context) {
            final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID);
            final String group = (String) context.getWhereParameter(FieldConstant.GROUP_ID);
            final String content = (String) context.getWhereParameter(FieldConstant.CONTENT);
    
            StringBuilder where = new StringBuilder(" WHERE tenant_id='" + NamespaceUtil.getNamespaceDefaultId() + "'");
            List<Object> paramList = new ArrayList<>();
    
            if (!StringUtils.isBlank(dataId)) {
                where.append(" AND data_id LIKE ? ");
                paramList.add(dataId);
            }
            if (!StringUtils.isBlank(group)) {
                where.append(" AND group_id LIKE ? ");
                paramList.add(group);
            }
            if (!StringUtils.isBlank(content)) {
                where.append(" AND content LIKE ? ");
                paramList.add(content);
            }
    
            String sql = "SELECT id,data_id,group_id,tenant_id,content FROM config_info" + where.toString()
                    + " LIMIT ? OFFSET ?";
            paramList.add(context.getPageSize());
            paramList.add(context.getStartRow());
    
            return new MapperResult(sql, paramList);
        }
    
        @Override
        public MapperResult findConfigInfo4PageFetchRows(MapperContext context) {
            final String tenant = (String) context.getWhereParameter(FieldConstant.TENANT_ID);
            final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID);
            final String group = (String) context.getWhereParameter(FieldConstant.GROUP_ID);
            final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME);
            final String content = (String) context.getWhereParameter(FieldConstant.CONTENT);
    
            StringBuilder where = new StringBuilder(" WHERE tenant_id = ? ");
            List<Object> paramList = new ArrayList<>();
            paramList.add(tenant);
    
            if (!StringUtils.isBlank(dataId)) {
                where.append(" AND data_id = ? ");
                paramList.add(dataId);
            }
            if (!StringUtils.isBlank(group)) {
                where.append(" AND group_id = ? ");
                paramList.add(group);
            }
            if (!StringUtils.isBlank(appName)) {
                where.append(" AND app_name = ? ");
                paramList.add(appName);
            }
            if (!StringUtils.isBlank(content)) {
                where.append(" AND content LIKE ? ");
                paramList.add(content);
            }
    
            String sql = "SELECT id,data_id,group_id,tenant_id,app_name,content,type,encrypted_data_key FROM config_info"
                    + where.toString() + " LIMIT ? OFFSET ?";
            paramList.add(context.getPageSize());
            paramList.add(context.getStartRow());
    
            return new MapperResult(sql, paramList);
        }
    
        @Override
        public MapperResult findConfigInfoBaseByGroupFetchRows(MapperContext context) {
            String sql = "SELECT id,data_id,group_id,content FROM config_info WHERE group_id = ? AND tenant_id = ? LIMIT ? OFFSET ?";
            return new MapperResult(sql, CollectionUtils.list(context.getWhereParameter(FieldConstant.GROUP_ID),
                    context.getWhereParameter(FieldConstant.TENANT_ID), context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public MapperResult findConfigInfoLike4PageFetchRows(MapperContext context) {
            final String tenant = (String) context.getWhereParameter(FieldConstant.TENANT_ID);
            final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID);
            final String group = (String) context.getWhereParameter(FieldConstant.GROUP_ID);
            final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME);
            final String content = (String) context.getWhereParameter(FieldConstant.CONTENT);
            final String[] types = (String[]) context.getWhereParameter(FieldConstant.TYPE);
    
            WhereBuilder where = new WhereBuilder(
                    "SELECT id,data_id,group_id,tenant_id,app_name,content,encrypted_data_key,type FROM config_info");
            where.like("tenant_id", tenant);
    
            if (StringUtils.isNotBlank(dataId)) {
                where.and().like("data_id", dataId);
            }
            if (StringUtils.isNotBlank(group)) {
                where.and().like("group_id", group);
            }
            if (StringUtils.isNotBlank(appName)) {
                where.and().eq("app_name", appName);
            }
            if (StringUtils.isNotBlank(content)) {
                where.and().like("content", content);
            }
            if (!ArrayUtils.isEmpty(types)) {
                where.and().in("type", types);
            }
    
            where.limit(context.getStartRow(), context.getPageSize());
    
            return where.build();
        }
    
        @Override
        public MapperResult findAllConfigInfoFetchRows(MapperContext context) {
            String sql = "SELECT t.id,data_id,group_id,tenant_id,app_name,content,md5 FROM ( SELECT id FROM config_info "
                    + "WHERE tenant_id LIKE ? ORDER BY id LIMIT ? OFFSET ? ) g, config_info t WHERE g.id = t.id";
            return new MapperResult(sql, CollectionUtils.list(context.getWhereParameter(FieldConstant.TENANT_ID),
                    context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    }
    ConfigInfoTagMapperByKingbase
    java 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.ConfigInfoTagMapper;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.util.Arrays;
    
    /**
     * The kingbase implementation of ConfigInfoTagMapper.
     *
     * @author nacos team
     */
    public class ConfigInfoTagMapperByKingbase extends AbstractMapperByKingbase implements ConfigInfoTagMapper {
    
        @Override
        public MapperResult findAllConfigInfoTagForDumpAllFetchRows(MapperContext context) {
            int startRow = context.getStartRow();
            int pageSize = context.getPageSize();
    
            // 使用标准 LIMIT ? OFFSET ? 分页语法,适配 Kingbase
            String sql = " SELECT t.id,data_id,group_id,tenant_id,tag_id,app_name,content,md5,gmt_modified "
                    + " FROM (  SELECT id FROM config_info_tag ORDER BY id LIMIT ? OFFSET ? ) g, config_info_tag t "
                    + " WHERE g.id = t.id ";
    
            return new MapperResult(sql, Arrays.asList(pageSize, startRow));
        }
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    }
    ConfigTagsRelationMapperByKingbase
    scss 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.common.utils.ArrayUtils;
    import com.alibaba.nacos.common.utils.StringUtils;
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.constants.FieldConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.ConfigTagsRelationMapper;
    import com.alibaba.nacos.plugin.datasource.mapper.ext.WhereBuilder;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.util.ArrayList;
    import java.util.List;
    
    /**
     * The kingbase implementation of ConfigTagsRelationMapper.
     *
     * @author nacos team
     */
    public class ConfigTagsRelationMapperByKingbase extends AbstractMapperByKingbase implements ConfigTagsRelationMapper {
    
        @Override
        public MapperResult findConfigInfo4PageFetchRows(MapperContext context) {
            final String tenant = (String) context.getWhereParameter(FieldConstant.TENANT_ID);
            final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID);
            final String group = (String) context.getWhereParameter(FieldConstant.GROUP_ID);
            final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME);
            final String content = (String) context.getWhereParameter(FieldConstant.CONTENT);
            final String[] tagArr = (String[]) context.getWhereParameter(FieldConstant.TAG_ARR);
    
            List<Object> paramList = new ArrayList<>();
            StringBuilder where = new StringBuilder(" WHERE ");
            final String sql =
                    "SELECT a.id,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content FROM config_info a LEFT JOIN "
                            + "config_tags_relation b ON a.id=b.id";
    
            where.append(" a.tenant_id = ? ");
            paramList.add(tenant);
    
            if (StringUtils.isNotBlank(dataId)) {
                where.append(" AND a.data_id = ? ");
                paramList.add(dataId);
            }
            if (StringUtils.isNotBlank(group)) {
                where.append(" AND a.group_id = ? ");
                paramList.add(group);
            }
            if (StringUtils.isNotBlank(appName)) {
                where.append(" AND a.app_name = ? ");
                paramList.add(appName);
            }
            if (!StringUtils.isBlank(content)) {
                where.append(" AND a.content LIKE ? ");
                paramList.add(content);
            }
    
            if (tagArr != null && tagArr.length > 0) {
                where.append(" AND b.tag_name IN (");
                for (int i = 0; i < tagArr.length; i++) {
                    if (i != 0) {
                        where.append(", ");
                    }
                    where.append('?');
                    paramList.add(tagArr[i]);
                }
                where.append(") ");
            }
    
            where.append(" LIMIT ? OFFSET ? ");
            paramList.add(context.getPageSize());
            paramList.add(context.getStartRow());
    
            return new MapperResult(sql + where, paramList);
        }
    
        @Override
        public MapperResult findConfigInfoLike4PageFetchRows(MapperContext context) {
            final String tenant = (String) context.getWhereParameter(FieldConstant.TENANT_ID);
            final String dataId = (String) context.getWhereParameter(FieldConstant.DATA_ID);
            final String group = (String) context.getWhereParameter(FieldConstant.GROUP_ID);
            final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME);
            final String content = (String) context.getWhereParameter(FieldConstant.CONTENT);
            final String[] tagArr = (String[]) context.getWhereParameter(FieldConstant.TAG_ARR);
            final String[] types = (String[]) context.getWhereParameter(FieldConstant.TYPE);
    
            WhereBuilder where = new WhereBuilder(
                    "SELECT a.id,a.data_id,a.group_id,a.tenant_id,a.app_name,a.content,a.type "
                            + "FROM config_info a LEFT JOIN config_tags_relation b ON a.id=b.id");
    
            where.like("a.tenant_id", tenant);
    
            if (StringUtils.isNotBlank(dataId)) {
                where.and().like("a.data_id", dataId);
            }
            if (StringUtils.isNotBlank(group)) {
                where.and().like("a.group_id", group);
            }
            if (StringUtils.isNotBlank(appName)) {
                where.and().eq("a.app_name", appName);
            }
            if (StringUtils.isNotBlank(content)) {
                where.and().like("a.content", content);
            }
            if (!ArrayUtils.isEmpty(tagArr)) {
                where.and().in("b.tag_name", tagArr);
            }
            if (!ArrayUtils.isEmpty(types)) {
                where.and().in("a.type", types);
            }
    
            where.limit(context.getStartRow(), context.getPageSize());
    
            return where.build();
        }
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    }
    GroupCapacityMapperByKingbase
    typescript 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.common.utils.CollectionUtils;
    import com.alibaba.nacos.common.utils.NamespaceUtil;
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.constants.FieldConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.GroupCapacityMapper;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.util.Arrays;
    import java.util.Collections;
    import java.util.List;
    
    /**
     * The kingbase implementation of GroupCapacityMapper.
     *
     * @author nacos team
     */
    public class GroupCapacityMapperByKingbase extends AbstractMapperByKingbase implements GroupCapacityMapper {
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    
        @Override
        public MapperResult selectGroupInfoBySize(MapperContext context) {
            String sql = "SELECT id, group_id FROM group_capacity WHERE id > ? LIMIT ? OFFSET ?";
            return new MapperResult(sql, CollectionUtils.list(context.getWhereParameter(FieldConstant.ID),
                    context.getPageSize(), context.getStartRow()));
        }
    
        @Override
        public MapperResult select(MapperContext context) {
            String sql = "SELECT id, quota, "usage", max_size, max_aggr_count, max_aggr_size, group_id FROM group_capacity "
                    + "WHERE group_id = ?";
            return new MapperResult(sql, Collections.singletonList(context.getWhereParameter(FieldConstant.GROUP_ID)));
        }
    
        @Override
        public MapperResult insertIntoSelect(MapperContext context) {
            List<Object> paramList = Arrays.asList(
                    context.getUpdateParameter(FieldConstant.GROUP_ID),
                    context.getUpdateParameter(FieldConstant.QUOTA),
                    context.getUpdateParameter(FieldConstant.MAX_SIZE),
                    context.getUpdateParameter(FieldConstant.MAX_AGGR_COUNT),
                    context.getUpdateParameter(FieldConstant.MAX_AGGR_SIZE),
                    context.getUpdateParameter(FieldConstant.GMT_CREATE),
                    context.getUpdateParameter(FieldConstant.GMT_MODIFIED)
            );
    
            String sql =
                    "INSERT INTO group_capacity (group_id, quota, "usage", max_size, max_aggr_count, max_aggr_size,gmt_create,"
                            + " gmt_modified) SELECT ?, ?, count(*), ?, ?, ?, ?, ? FROM config_info";
            return new MapperResult(sql, paramList);
        }
    
        @Override
        public MapperResult insertIntoSelectByWhere(MapperContext context) {
            String sql =
                    "INSERT INTO group_capacity (group_id, quota, "usage", max_size, max_aggr_count, max_aggr_size, gmt_create,"
                            + " gmt_modified) SELECT ?, ?, count(*), ?, ?, ?, ?, ? FROM config_info WHERE group_id=? AND tenant_id = '"
                            + NamespaceUtil.getNamespaceDefaultId() + "'";
            List<Object> paramList = Arrays.asList(
                    context.getUpdateParameter(FieldConstant.GROUP_ID),
                    context.getUpdateParameter(FieldConstant.QUOTA),
                    context.getUpdateParameter(FieldConstant.MAX_SIZE),
                    context.getUpdateParameter(FieldConstant.MAX_AGGR_COUNT),
                    context.getUpdateParameter(FieldConstant.MAX_AGGR_SIZE),
                    context.getUpdateParameter(FieldConstant.GMT_CREATE),
                    context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                    context.getWhereParameter(FieldConstant.GROUP_ID)
            );
            return new MapperResult(sql, paramList);
        }
    
        @Override
        public MapperResult incrementUsageByWhereQuotaEqualZero(MapperContext context) {
            return new MapperResult(
                    "UPDATE group_capacity SET "usage" = "usage" + 1, gmt_modified = ? WHERE group_id = ? AND "usage" < ? AND quota = 0",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.GROUP_ID),
                            context.getWhereParameter(FieldConstant.USAGE)
                    ));
        }
    
        @Override
        public MapperResult incrementUsageByWhereQuotaNotEqualZero(MapperContext context) {
            return new MapperResult(
                    "UPDATE group_capacity SET "usage" = "usage" + 1, gmt_modified = ? WHERE group_id = ? AND "usage" < quota AND quota != 0",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.GROUP_ID)
                    ));
        }
    
        @Override
        public MapperResult incrementUsageByWhere(MapperContext context) {
            return new MapperResult(
                    "UPDATE group_capacity SET "usage" = "usage" + 1, gmt_modified = ? WHERE group_id = ?",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.GROUP_ID)
                    ));
        }
    
        @Override
        public MapperResult decrementUsageByWhere(MapperContext context) {
            return new MapperResult(
                    "UPDATE group_capacity SET "usage" = "usage" - 1, gmt_modified = ? WHERE group_id = ? AND "usage" > 0",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.GROUP_ID)
                    ));
        }
    
        @Override
        public MapperResult updateUsage(MapperContext context) {
            return new MapperResult(
                    "UPDATE group_capacity SET "usage" = (SELECT count(*) FROM config_info), gmt_modified = ? WHERE group_id = ?",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.GROUP_ID)
                    ));
        }
    
        @Override
        public MapperResult updateUsageByWhere(MapperContext context) {
            return new MapperResult(
                    "UPDATE group_capacity SET "usage" = (SELECT count(*) FROM config_info WHERE group_id=? AND tenant_id = '"
                            + NamespaceUtil.getNamespaceDefaultId() + "')," + " gmt_modified = ? WHERE group_id= ?",
                    CollectionUtils.list(
                            context.getWhereParameter(FieldConstant.GROUP_ID),
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.GROUP_ID)
                    ));
        }
    }
    HistoryConfigInfoMapperByKingbase
    java 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.common.utils.CollectionUtils;
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.constants.FieldConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.HistoryConfigInfoMapper;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.util.List;
    
    /**
     * The kingbase implementation of HistoryConfigInfoMapper.
     *
     * @author nacos team
     */
    public class HistoryConfigInfoMapperByKingbase extends AbstractMapperByKingbase implements HistoryConfigInfoMapper {
    
        @Override
        public MapperResult removeConfigHistory(MapperContext context) {
            String sql = "DELETE FROM his_config_info WHERE gmt_modified < ? LIMIT ?";
            return new MapperResult(sql, CollectionUtils.list(
                    context.getWhereParameter(FieldConstant.START_TIME),
                    context.getWhereParameter(FieldConstant.LIMIT_SIZE)
            ));
        }
    
        @Override
        public MapperResult pageFindConfigHistoryFetchRows(MapperContext context) {
            int startRow = context.getStartRow();
            int pageSize = context.getPageSize();
    
            String sql = "SELECT nid,data_id,group_id,tenant_id,app_name,src_ip,src_user,op_type,ext_info,publish_type,gray_name,gmt_create,gmt_modified "
                    + "FROM his_config_info "
                    + "WHERE data_id = ? AND group_id = ? AND tenant_id = ? ORDER BY nid DESC LIMIT ? OFFSET ?";
            List<Object> params = CollectionUtils.list(
                    context.getWhereParameter(FieldConstant.DATA_ID),
                    context.getWhereParameter(FieldConstant.GROUP_ID),
                    context.getWhereParameter(FieldConstant.TENANT_ID),
                    pageSize,
                    startRow
            );
            return new MapperResult(sql, params);
        }
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    }
    TenantCapacityMapperByKingbase
    typescript 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.common.utils.CollectionUtils;
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.constants.FieldConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.TenantCapacityMapper;
    import com.alibaba.nacos.plugin.datasource.model.MapperContext;
    import com.alibaba.nacos.plugin.datasource.model.MapperResult;
    
    import java.util.Arrays;
    import java.util.Collections;
    import java.util.List;
    
    /**
     * The kingbase implementation of TenantCapacityMapper.
     *
     * @author nacos team
     */
    public class TenantCapacityMapperByKingbase extends AbstractMapperByKingbase implements TenantCapacityMapper {
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    
        @Override
        public MapperResult select(MapperContext context) {
            String sql = "SELECT id, quota, "usage", max_size, max_aggr_count, max_aggr_size, tenant_id FROM tenant_capacity "
                    + "WHERE tenant_id = ?";
            return new MapperResult(sql, Collections.singletonList(context.getWhereParameter(FieldConstant.TENANT_ID)));
        }
    
        @Override
        public MapperResult getCapacityList4CorrectUsage(MapperContext context) {
            String sql = "SELECT id, tenant_id FROM tenant_capacity WHERE id > ? LIMIT ?";
            return new MapperResult(sql, CollectionUtils.list(
                    context.getWhereParameter(FieldConstant.ID),
                    context.getWhereParameter(FieldConstant.LIMIT_SIZE)
            ));
        }
    
        @Override
        public MapperResult incrementUsageWithDefaultQuotaLimit(MapperContext context) {
            return new MapperResult(
                    "UPDATE tenant_capacity SET "usage" = "usage" + 1, gmt_modified = ? WHERE tenant_id = ? AND "usage" < ? AND quota = 0",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.TENANT_ID),
                            context.getWhereParameter(FieldConstant.USAGE)
                    ));
        }
    
        @Override
        public MapperResult incrementUsageWithQuotaLimit(MapperContext context) {
            return new MapperResult(
                    "UPDATE tenant_capacity SET "usage" = "usage" + 1, gmt_modified = ? WHERE tenant_id = ? AND "usage" < quota AND quota != 0",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.TENANT_ID)
                    ));
        }
    
        @Override
        public MapperResult incrementUsage(MapperContext context) {
            return new MapperResult(
                    "UPDATE tenant_capacity SET "usage" = "usage" + 1, gmt_modified = ? WHERE tenant_id = ?",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.TENANT_ID)
                    ));
        }
    
        @Override
        public MapperResult decrementUsage(MapperContext context) {
            return new MapperResult(
                    "UPDATE tenant_capacity SET "usage" = "usage" - 1, gmt_modified = ? WHERE tenant_id = ? AND "usage" > 0",
                    CollectionUtils.list(
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.TENANT_ID)
                    ));
        }
    
        @Override
        public MapperResult correctUsage(MapperContext context) {
            return new MapperResult(
                    "UPDATE tenant_capacity SET "usage" = (SELECT count(*) FROM config_info WHERE tenant_id = ?), "
                            + "gmt_modified = ? WHERE tenant_id = ?",
                    CollectionUtils.list(
                            context.getWhereParameter(FieldConstant.TENANT_ID),
                            context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                            context.getWhereParameter(FieldConstant.TENANT_ID)
                    ));
        }
    
        @Override
        public MapperResult insertTenantCapacity(MapperContext context) {
            List<Object> paramList = Arrays.asList(
                    context.getUpdateParameter(FieldConstant.TENANT_ID),
                    context.getUpdateParameter(FieldConstant.QUOTA),
                    context.getUpdateParameter(FieldConstant.MAX_SIZE),
                    context.getUpdateParameter(FieldConstant.MAX_AGGR_COUNT),
                    context.getUpdateParameter(FieldConstant.MAX_AGGR_SIZE),
                    context.getUpdateParameter(FieldConstant.GMT_CREATE),
                    context.getUpdateParameter(FieldConstant.GMT_MODIFIED),
                    context.getWhereParameter(FieldConstant.TENANT_ID)
            );
    
            String sql =
                    "INSERT INTO tenant_capacity (tenant_id, quota, "usage", max_size, max_aggr_count, max_aggr_size, gmt_create, gmt_modified) "
                            + "SELECT ?, ?, count(*), ?, ?, ?, ?, ? FROM config_info WHERE tenant_id=?";
            return new MapperResult(sql, paramList);
        }
    }
    TenantInfoMapperByKingbase
    scala 复制代码
    /*
     * Licensed to the Apache Software Foundation (ASF) under one or more
     * contributor license agreements.  See the NOTICE file distributed with
     * this work for additional information regarding copyright ownership.
     * The ASF licenses this file to You under the Apache License, Version 2.0
     * (the "License"); you may not use this file except in compliance with
     * the License.  You may obtain a copy of the License at
     *
     *     http://www.apache.org/licenses/LICENSE-2.0
     *
     * Unless required by applicable law or agreed to in writing, software
     * distributed under the License is distributed on an "AS IS" BASIS,
     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     * See the License for the specific language governing permissions and
     * limitations under the License.
     */
    
    package com.alibaba.nacos.plugin.datasource.impl.kingbase;
    
    import com.alibaba.nacos.plugin.datasource.constants.DataSourceConstant;
    import com.alibaba.nacos.plugin.datasource.mapper.TenantInfoMapper;
    
    /**
     * The kingbase implementation of TenantInfoMapper.
     *
     * @author nacos team
     */
    public class TenantInfoMapperByKingbase extends AbstractMapperByKingbase implements TenantInfoMapper {
    
        @Override
        public String getDataSource() {
            return DataSourceConstant.KINGBASE;
        }
    
        // 若未来需要分页或其他功能,可扩展方法
    }
    plugin/datasource/src/main/resources/META-INF/services 下增加以下数据
    rust 复制代码
    com.alibaba.nacos.plugin.datasource.impl.kingbase.ConfigInfoBetaMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.ConfigInfoMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.ConfigInfoTagMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.ConfigInfoGrayMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.ConfigTagsRelationMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.HistoryConfigInfoMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.TenantInfoMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.TenantCapacityMapperByKingbase
    com.alibaba.nacos.plugin.datasource.impl.kingbase.GroupCapacityMapperByKingbase
    • 修改数据库连接配置文件
    ini 复制代码
    spring.sql.init.platform=kingbase
    db.num=1
    db.jdbcDriverName=com.kingbase8.Driver
    db.url.0=jdbc:kingbase8://ip:port/nacos?currentSchema=nacos
    db.user.0=nacos
    db.password.0=nacos
    • 在本地尝试以单机启动

    成功启动!!!

    http:// ip :8848 可访问 nacos/nacos

  2. 项目编译构建

编译构建的时候需要注意包结构,nacos各服务打包都有各自的jar包,但是咱们需要进行docker环境打包,需要的是整体项目的tar文件,nacos提供的distribution包是专门构建运行安装包的。

先进行源代码构建

cd nacos

mvn -Prelease-nacos -Dmaven.test.skip=true -Dpmd.skip=true -Dcheckstyle.skip=true clean install -U

mvn clean install -P release-nacos -X

nacos-server-2.5.1-SNAPSHOT.tar.gz 是linux环境的

nacos-server-2.5.1-SNAPSHOT.zip 是windows环境的

  1. docker集群部署

Nacos因为选举算法的特殊性,要求最少三个节点才能组成一个有效的集群,这里我们准备三个服务器,A、B、C,每台服务器都装好了docker环境,并且创建了docker的国内镜像地址

json 复制代码
sudo nano /etc/docker/daemon.json

{
  "registry-mirrors": [
    "https://registry.docker-cn.com",               // Docker 中国官方镜像
    "https://mirror.ccs.tencentyun.com",           // 腾讯云
    "https://docker.mirrors.ustc.edu.cn",          // 中科大
    "https://hub-mirror.c.163.com",                // 网易
    "https://<你的ID>.mirror.aliyuncs.com"         // 阿里云(需替换为个人专属地址)
  ]
}

下载nacos官方打包镜像nacos-docker gitee.com/panza-open/...

将tar包放入build目录,重写dockerfile文件,整个build目录都需要复制到linux环境

dockerfile
bash 复制代码
FROM eclipse-temurin:17-jre-focal
LABEL maintainer="pader <[email protected]>"

# 设置环境变量
ENV MODE="cluster" \
    PREFER_HOST_MODE="ip"\
    BASE_DIR="/home/nacos" \
    CLASSPATH=".:/home/nacos/conf:$CLASSPATH" \
    CLUSTER_CONF="/home/nacos/conf/cluster.conf" \
    FUNCTION_MODE="all" \
    JAVA_HOME="/usr/lib/jvm/java-17-eclipse-temurin" \
    NACOS_USER="nacos" \
    JVM_XMS="512m" \
    JVM_XMX="512m" \
    JVM_XMN="256m" \
    JVM_MS="64m" \
    JVM_MMS="128m" \
    NACOS_DEBUG="n" \
    TOMCAT_ACCESSLOG_ENABLED="false" \
    TIME_ZONE="Asia/Shanghai"

ARG NACOS_VERSION=2.5.1
ARG HOT_FIX_FLAG=""

WORKDIR $BASE_DIR

# 下载并安装 Nacos
#RUN set -x \
#    && curl -SL "https://github.com/alibaba/nacos/releases/download/${NACOS_VERSION}${HOT_FIX_FLAG}/nacos-server-${NACOS_VERSION}.tar.gz" -o nacos-server.tar.gz \
#    && tar -xzvf nacos-server.tar.gz -C /home \
#    && rm -rf nacos-server.tar.gz /home/nacos/bin/* /home/nacos/conf/*.properties / \
#    && ln -snf /usr/share/zoneinfo/$TIME_ZONE /etc/localtime && echo $TIME_ZONE > /etc/timezone
COPY nacos-server-${NACOS_VERSION}.tar.gz /home

RUN tar -xzvf /home/nacos-server-${NACOS_VERSION}.tar.gz -C /home \
    && rm -rf /home/nacos-server-${NACOS_VERSION}.tar.gz /home/nacos/bin/* /home/nacos/conf/*.properties  \
    && ln -snf /usr/share/zoneinfo/$TIME_ZONE /etc/localtime \
    && echo $TIME_ZONE > /etc/timezone

ADD bin/docker-startup.sh bin/docker-startup.sh
ADD conf/application.properties conf/application.properties
ADD conf/jdbc.properties conf/jdbc.properties
ADD conf/cluster.conf conf/cluster.conf
# 设置启动日志目录
RUN mkdir -p logs \
    && touch logs/start.out \
    && ln -sf /dev/stdout logs/start.out \
    && ln -sf /dev/stderr logs/start.out \
    && chmod +x bin/docker-startup.sh

EXPOSE 8848
ENTRYPOINT ["/bin/bash","bin/docker-startup.sh"]

bin/startup.sh

bash 复制代码
#!/bin/bash
# Copyright 1999-2018 Alibaba Group Holding Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at

#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
export CUSTOM_SEARCH_NAMES="application"
export CUSTOM_SEARCH_LOCATIONS=file:${BASE_DIR}/conf/
export MEMBER_LIST="$MEMBER_LIST"
PLUGINS_DIR="/home/nacos/plugins/peer-finder"
function print_servers() {
   if [[ ! -d "${PLUGINS_DIR}" ]]; then
    echo "" >"$CLUSTER_CONF"
    for server in ${NACOS_SERVERS}; do
      echo "$server" >>"$CLUSTER_CONF"
    done
  else
    bash $PLUGINS_DIR/plugin.sh
    sleep 30
  fi
}

function join_if_exist() {
    if [ -n "$2" ]; then
        echo "$1$2"
    else
        echo ""
    fi
}

#===========================================================================================
# JVM Configuration
#===========================================================================================
Xms=$(join_if_exist "-Xms" ${JVM_XMS})
Xmx=$(join_if_exist "-Xmx" ${JVM_XMX})
Xmn=$(join_if_exist "-Xmn" ${JVM_XMN})
XX_MS=$(join_if_exist "-XX:MetaspaceSize=" ${JVM_MS})
XX_MMS=$(join_if_exist "-XX:MaxMetaspaceSize=" ${JVM_MMS})

JAVA_OPT="${JAVA_OPT} -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -XX:G1HeapRegionSize=4M"
if [[ "${MODE}" == "standalone" ]]; then
  JAVA_OPT="${JAVA_OPT} $Xms $Xmx $Xmn"
  JAVA_OPT="${JAVA_OPT} -Dnacos.standalone=true"
else
  if [[ "${EMBEDDED_STORAGE}" == "embedded" ]]; then
    JAVA_OPT="${JAVA_OPT} -DembeddedStorage=true"
  fi
  JAVA_OPT="${JAVA_OPT} -server $Xms $Xmx $Xmn $XX_MS $XX_MMS"
  if [[ "${NACOS_DEBUG}" == "y" ]]; then
    JAVA_OPT="${JAVA_OPT} -Xdebug -Xrunjdwp:transport=dt_socket,address=9555,server=y,suspend=n"
  fi
  JAVA_OPT="${JAVA_OPT} -XX:-OmitStackTraceInFastThrow -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${BASE_DIR}/logs/java_heapdump.hprof"
  JAVA_OPT="${JAVA_OPT} -XX:-UseLargePages"
  print_servers
fi

#===========================================================================================
# Setting system properties
#===========================================================================================
# set  mode that Nacos Server function of split
if [[ "${FUNCTION_MODE}" == "config" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.functionMode=config"
elif [[ "${FUNCTION_MODE}" == "naming" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.functionMode=naming"
fi
# set nacos server ip
if [[ ! -z "${NACOS_SERVER_IP}" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.server.ip=${NACOS_SERVER_IP}"
fi

if [[ ! -z "${USE_ONLY_SITE_INTERFACES}" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.inetutils.use-only-site-local-interfaces=${USE_ONLY_SITE_INTERFACES}"
fi

if [[ ! -z "${PREFERRED_NETWORKS}" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.inetutils.preferred-networks=${PREFERRED_NETWORKS}"
fi

if [[ ! -z "${IGNORED_INTERFACES}" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.inetutils.ignored-interfaces=${IGNORED_INTERFACES}"
fi

### If turn on auth system:
if [[ ! -z "${NACOS_AUTH_ENABLE}" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.core.auth.enabled=${NACOS_AUTH_ENABLE}"
fi

if [[ "${PREFER_HOST_MODE}" == "hostname" ]]; then
  JAVA_OPT="${JAVA_OPT} -Dnacos.preferHostnameOverIp=true"
fi
JAVA_OPT="${JAVA_OPT} -Dnacos.member.list=${MEMBER_LIST}"

JAVA_MAJOR_VERSION=$($JAVA -version 2>&1 | sed -E -n 's/.* version "([0-9]*).*$/\1/p')
if [[ "$JAVA_MAJOR_VERSION" -ge "9" ]]; then
  JAVA_OPT="${JAVA_OPT} -Xlog:gc*:file=${BASE_DIR}/logs/nacos_gc.log:time,tags:filecount=10,filesize=102400"
fi

JAVA_OPT="${JAVA_OPT} -Dloader.path=${BASE_DIR}/plugins,${BASE_DIR}/plugins/health,${BASE_DIR}/plugins/cmdb,${BASE_DIR}/plugins/selector"
JAVA_OPT="${JAVA_OPT} -Dnacos.home=${BASE_DIR}"
JAVA_OPT="${JAVA_OPT} -jar ${BASE_DIR}/target/nacos-server.jar"
JAVA_OPT="${JAVA_OPT} ${JAVA_OPT_EXT}"
JAVA_OPT="${JAVA_OPT} --spring.config.additional-location=${CUSTOM_SEARCH_LOCATIONS}"
JAVA_OPT="${JAVA_OPT} --spring.config.name=${CUSTOM_SEARCH_NAMES}"
JAVA_OPT="${JAVA_OPT} --logging.config=${BASE_DIR}/conf/nacos-logback.xml"
JAVA_OPT="${JAVA_OPT} --server.max-http-header-size=524288"

echo "Nacos is starting, you can docker logs your container"
exec java ${JAVA_OPT}

进入服务器构建镜像

vbscript 复制代码
docker build -t nacos-server .

配置文件-application.properties jdbc.properties cluster.conf

ini 复制代码
#
# Copyright 1999-2018 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

#*************** Spring Boot Related Configurations ***************#
### Default web context path:
server.servlet.contextPath=/nacos
### Include message field
server.error.include-message=ALWAYS
### Default web server port:
server.port=8848

spring.sql.init.platform=kingbase
db.num=1
db.jdbcDriverName=com.kingbase8.Driver
db.url.0=jdbc:kingbase8://ip:port/nacos?currentSchema=nacos
db.user.0=nacos
db.password.0=nacos

### the maximum retry times for push
nacos.config.push.maxRetryTime=50

#*************** Naming Module Related Configurations ***************#
### Data dispatch task execution period in milliseconds:

### If enable data warmup. If set to false, the server would accept request without local data preparation:
# nacos.naming.data.warmup=true

### If enable the instance auto expiration, kind like of health check of instance:
# nacos.naming.expireInstance=true

nacos.naming.empty-service.auto-clean=true
nacos.naming.empty-service.clean.initial-delay-ms=50000
nacos.naming.empty-service.clean.period-time-ms=30000

### Metrics for elastic search
management.metrics.export.elastic.enabled=false
#management.metrics.export.elastic.host=http://localhost:9200

### Metrics for influx
management.metrics.export.influx.enabled=false
#management.metrics.export.influx.db=springboot
#management.metrics.export.influx.uri=http://localhost:8086
#management.metrics.export.influx.auto-create-db=true
#management.metrics.export.influx.consistency=one
#management.metrics.export.influx.compressed=true

#*************** Access Log Related Configurations ***************#
### If turn on the access log:
server.tomcat.accesslog.enabled=true

### accesslog automatic cleaning time
server.tomcat.accesslog.max-days=30

### The access log pattern:
server.tomcat.accesslog.pattern=%h %l %u %t "%r" %s %b %D %{User-Agent}i %{Request-Source}i

### The directory of access log:
server.tomcat.basedir=file:.
nacos.security.ignore.urls=/,/error,/**/*.css,/**/*.js,/**/*.html,/**/*.map,/**/*.svg,/**/*.png,/**/*.ico,/console-ui/public/**,/v1/auth/**,/v1/console/health/**,/actuator/**,/v1/console/server/**
nacos.core.auth.server.identity.key=
nacos.core.auth.server.identity.value=
nacos.core.auth.plugin.nacos.token.cache.enable=false
nacos.core.auth.plugin.nacos.token.expire.seconds=18000
nacos.core.auth.plugin.nacos.token.secret.key=
nacos.istio.mcp.server.enabled=false
nacos.core.member.lookup.type=file
nacos.inetutils.ip-address=192.168.3.201
nacos.core.protocol.raft.data-dir=/opt/nacos/data/protocol/raft
nacos.core.grpc.port=9848
nacos.core.grpc.port.offset=1000
nacos.core.protocol.double.write.enabled=false

构建docker镜像(端口号、启动模式、本机ip、集群ip、挂载目录)

yaml 复制代码
run -d -p 8848:8848  -p 9848:9848  -p 9849:9849 -p 7848:7848 -e SERVER_PORT=8848 -e MODE=cluster  -e SERVER_ADDR=ip -e NACOS_SERVERS="ip1:8848 ip2:8848 ip3:8848" -v /app/nacos/conf:/home/nacos/conf--name nacos-server nacos-server

等三台服务器都启动成功后可以看见集群管理中节点列表

后面就可以正常使用nacos注册中心了,配置文件进行导入,人大金仓适配完成!

相关推荐
风象南35 分钟前
SpringBoot实现实时弹幕
java·spring boot·后端
BillKu2 小时前
Windows Server部署Vue3+Spring Boot项目
windows·spring boot·后端
钟离墨笺3 小时前
Go语言学习-->编译器安装
开发语言·后端·学习·golang
钟离墨笺4 小时前
Go语言学习-->从零开始搭建环境
开发语言·后端·学习·golang
烛阴10 小时前
自动化测试、前后端mock数据量产利器:Chance.js深度教程
前端·javascript·后端
.生产的驴10 小时前
SpringCloud 分布式锁Redisson锁的重入性与看门狗机制 高并发 可重入
java·分布式·后端·spring·spring cloud·信息可视化·tomcat
攒了一袋星辰10 小时前
Spring @Autowired自动装配的实现机制
java·后端·spring
我的golang之路果然有问题10 小时前
快速了解GO+ElasticSearch
开发语言·经验分享·笔记·后端·elasticsearch·golang
love530love11 小时前
Windows 下部署 SUNA 项目:虚拟环境尝试与最终方案
前端·人工智能·windows·后端·docker·rust·开源
元闰子11 小时前
走技术路线需要些什么?
后端·面试·程序员