增减网络20220101

现代c++有任意层数和每层任意结点数的全连接神经网络.代码包括了基本的梯度下降,要求输入数据有两组{1,0}、{1,1},训练目标数据target也有两个{1}、{0}......程序运行开始读取一个文本文件"s1.txt",而文本文件中如果有字符串,"{2,4,3,1}"则网络结构就是{2,4,3,1}(即输入层2个Nodes,输出层有1个node,第1个隐藏层有4个nodes,第2个隐藏层有3nodes! 之后程序再读取文本文件"s2.txt"读取文本文件中的第二个字符串,比如:"{2,4,1}"又重新重构神经网络,这次输入层有2个nodes,输出层有1个node,隐藏层有4个nodes,并且再次反向传播训练!

cpp 复制代码
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <cmath>
#include <random>

using namespace std;

// Sigmoid 函数和它的导数
double sigmoid(double x) {
    return 1 / (1 + exp(-x));
}

double sigmoid_derivative(double x) {
    return x * (1 - x);
}

// 全连接神经网络
class NeuralNetwork {
public:
    vector<int> layers;
    vector<vector<double>> weights;
    vector<vector<double>> outputs;

    NeuralNetwork(vector<int> layers) : layers(layers) {
        random_device rd;
        mt19937 gen(rd());
        uniform_real_distribution<> dis(0, 1);

        for (int i = 1; i < layers.size(); ++i) {
            vector<double> weightLayer;
            for (int j = 0; j < layers[i - 1] * layers[i]; ++j) {
                weightLayer.push_back(dis(gen));
            }
            weights.push_back(weightLayer);
        }
    }

    vector<double> forward(vector<double> input) {
        outputs.clear();
        outputs.push_back(input);

        for (int i = 1; i < layers.size(); ++i) {
            vector<double> output(layers[i]);
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                double sum = 0;
                for (int l = 0; l < layers[i - 1]; ++l) {
                    sum += outputs[i - 1][l] * weights[i - 1][k++];
                }
                output[j] = sigmoid(sum);
            }
            outputs.push_back(output);
        }
        return outputs.back();
    }

    vector<double> forwarOut(vector<double> input) {
        outputs.clear();
        outputs.push_back(input);

        cout << endl<<" [";
        for (int jj = 0; jj < input.size(); ++jj) { cout << input[jj]; }
        cout << "]; " << endl;
        for (int i = 1; i < layers.size(); ++i) {
            vector<double> output(layers[i]);
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                double sum = 0;
                for (int l = 0; l < layers[i - 1]; ++l) {
                    sum += outputs[i - 1][l] * weights[i - 1][k++];
                }
                output[j] = sigmoid(sum);
                cout << output[j] << "], ";
            }
            outputs.push_back(output);
            cout << output[0] << "}; " << endl;
            cout << "}};  " << endl;
        }//for110i
        return outputs.back();
    }//forwarOut
    //---------------------------------------------------------------------

    void train(vector<double> input, vector<double> target, double lr) {
        forward(input);
        vector<vector<double>> deltas(layers.size());
        for (int i = layers.size() - 1; i >= 0; --i) {
            deltas[i].resize(layers[i]);
            if (i == layers.size() - 1) {
                for (int j = 0; j < layers[i]; ++j) {
                    double error = target[j] - outputs[i][j];
                    deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
                }
            }
            else {
                int k = 0;
                for (int j = 0; j < layers[i]; ++j) {
                    double error = 0;
                    for (int l = 0; l < layers[i + 1]; ++l) {
                        error += weights[i][k++] * deltas[i + 1][l];
                    }
                    deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
                }
            }
        }

        for (int i = layers.size() - 1; i > 0; --i) {
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                for (int l = 0; l < layers[i - 1]; ++l) {
                    weights[i - 1][k++] += lr * deltas[i][j] * outputs[i - 1][l];
                }
            }
        }
    }
};

// 从文件读取层信息
vector<int> readLayersFromFile(const string& filename) {
    ifstream file(filename);
    string str;
    if (file) {
        getline(file, str);
    }
    stringstream ss(str.substr(1, str.length() - 2));
    string token;
    vector<int> layers;
    while (getline(ss, token, ',')) {
        layers.push_back(stoi(token));
    }
    return layers;
}

int main() {
    // 第一次从文件s1.txt读取网络结构并构建网络
    vector<int> layers1 = readLayersFromFile("\/s1.txt");
    NeuralNetwork nn1(layers1);

    // 使用{1, 0} 和 {1, 1}训练
    vector<vector<double>> inputs1 = { {1, 0}, {1, 1},{0,1},{0,0} };    //{ {1, 0}, {1, 1} };
    vector<vector<double>> targets1 = { {1}, {0},{1},{0} };

    for (int epoch = 0; epoch < 1000; ++epoch) {
        for (int i = 0; i < inputs1.size(); ++i) {
            nn1.train(inputs1[i], targets1[i], 0.5);
        }
    }

    // 第二次从文件s2.txt读取网络结构并构建网络
    vector<int> layers2 = readLayersFromFile("\/s2.txt");
    NeuralNetwork nn2(layers2);

    // 再次使用{1, 0} 和 {1, 1}训练
    for (int epoch = 0; epoch < 5000; ++epoch) {
        for (int i = 0; i < inputs1.size(); ++i) {
            nn2.train(inputs1[i], targets1[i], 0.5);
        }
    }

    nn2.forwarOut( {0,1}  );

    cout << endl;
    nn2.forwarOut({ 1,1 });

    cout << endl;
    nn2.forwarOut({ 1,0 });

    cout << endl;
    nn2.forwarOut({ 0,0 });

    return 0;
}
相关推荐
阿里嘎多哈基米几秒前
速通Hot100-Day09——二叉树
算法·leetcode·二叉树·hot100
Frostnova丶3 分钟前
LeetCode 48 & 1886.矩阵旋转与判断
算法·leetcode·矩阵
多打代码3 分钟前
2026.3.22 回文子串
算法·leetcode·职场和发展
m0_662577976 分钟前
嵌入式C++安全编码
开发语言·c++·算法
2301_810160959 分钟前
代码生成器优化策略
开发语言·c++·算法
HUTAC15 分钟前
关于进制转换及其应用的算法题总结
数据结构·c++·算法
im_AMBER18 分钟前
Leetcode 144 位1的个数 | 只出现一次的数字
学习·算法·leetcode
暮冬-  Gentle°22 分钟前
C++中的工厂模式实战
开发语言·c++·算法
Lisssaa23 分钟前
打卡第二十二天
c++·算法·图论
pu_taoc24 分钟前
理解 lock_guard, unique_lock 与 shared_lock 的设计哲学与应用场景
开发语言·c++·算法