增减网络20220101

现代c++有任意层数和每层任意结点数的全连接神经网络.代码包括了基本的梯度下降,要求输入数据有两组{1,0}、{1,1},训练目标数据target也有两个{1}、{0}......程序运行开始读取一个文本文件"s1.txt",而文本文件中如果有字符串,"{2,4,3,1}"则网络结构就是{2,4,3,1}(即输入层2个Nodes,输出层有1个node,第1个隐藏层有4个nodes,第2个隐藏层有3nodes! 之后程序再读取文本文件"s2.txt"读取文本文件中的第二个字符串,比如:"{2,4,1}"又重新重构神经网络,这次输入层有2个nodes,输出层有1个node,隐藏层有4个nodes,并且再次反向传播训练!

cpp 复制代码
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <cmath>
#include <random>

using namespace std;

// Sigmoid 函数和它的导数
double sigmoid(double x) {
    return 1 / (1 + exp(-x));
}

double sigmoid_derivative(double x) {
    return x * (1 - x);
}

// 全连接神经网络
class NeuralNetwork {
public:
    vector<int> layers;
    vector<vector<double>> weights;
    vector<vector<double>> outputs;

    NeuralNetwork(vector<int> layers) : layers(layers) {
        random_device rd;
        mt19937 gen(rd());
        uniform_real_distribution<> dis(0, 1);

        for (int i = 1; i < layers.size(); ++i) {
            vector<double> weightLayer;
            for (int j = 0; j < layers[i - 1] * layers[i]; ++j) {
                weightLayer.push_back(dis(gen));
            }
            weights.push_back(weightLayer);
        }
    }

    vector<double> forward(vector<double> input) {
        outputs.clear();
        outputs.push_back(input);

        for (int i = 1; i < layers.size(); ++i) {
            vector<double> output(layers[i]);
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                double sum = 0;
                for (int l = 0; l < layers[i - 1]; ++l) {
                    sum += outputs[i - 1][l] * weights[i - 1][k++];
                }
                output[j] = sigmoid(sum);
            }
            outputs.push_back(output);
        }
        return outputs.back();
    }

    vector<double> forwarOut(vector<double> input) {
        outputs.clear();
        outputs.push_back(input);

        cout << endl<<" [";
        for (int jj = 0; jj < input.size(); ++jj) { cout << input[jj]; }
        cout << "]; " << endl;
        for (int i = 1; i < layers.size(); ++i) {
            vector<double> output(layers[i]);
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                double sum = 0;
                for (int l = 0; l < layers[i - 1]; ++l) {
                    sum += outputs[i - 1][l] * weights[i - 1][k++];
                }
                output[j] = sigmoid(sum);
                cout << output[j] << "], ";
            }
            outputs.push_back(output);
            cout << output[0] << "}; " << endl;
            cout << "}};  " << endl;
        }//for110i
        return outputs.back();
    }//forwarOut
    //---------------------------------------------------------------------

    void train(vector<double> input, vector<double> target, double lr) {
        forward(input);
        vector<vector<double>> deltas(layers.size());
        for (int i = layers.size() - 1; i >= 0; --i) {
            deltas[i].resize(layers[i]);
            if (i == layers.size() - 1) {
                for (int j = 0; j < layers[i]; ++j) {
                    double error = target[j] - outputs[i][j];
                    deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
                }
            }
            else {
                int k = 0;
                for (int j = 0; j < layers[i]; ++j) {
                    double error = 0;
                    for (int l = 0; l < layers[i + 1]; ++l) {
                        error += weights[i][k++] * deltas[i + 1][l];
                    }
                    deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
                }
            }
        }

        for (int i = layers.size() - 1; i > 0; --i) {
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                for (int l = 0; l < layers[i - 1]; ++l) {
                    weights[i - 1][k++] += lr * deltas[i][j] * outputs[i - 1][l];
                }
            }
        }
    }
};

// 从文件读取层信息
vector<int> readLayersFromFile(const string& filename) {
    ifstream file(filename);
    string str;
    if (file) {
        getline(file, str);
    }
    stringstream ss(str.substr(1, str.length() - 2));
    string token;
    vector<int> layers;
    while (getline(ss, token, ',')) {
        layers.push_back(stoi(token));
    }
    return layers;
}

int main() {
    // 第一次从文件s1.txt读取网络结构并构建网络
    vector<int> layers1 = readLayersFromFile("\/s1.txt");
    NeuralNetwork nn1(layers1);

    // 使用{1, 0} 和 {1, 1}训练
    vector<vector<double>> inputs1 = { {1, 0}, {1, 1},{0,1},{0,0} };    //{ {1, 0}, {1, 1} };
    vector<vector<double>> targets1 = { {1}, {0},{1},{0} };

    for (int epoch = 0; epoch < 1000; ++epoch) {
        for (int i = 0; i < inputs1.size(); ++i) {
            nn1.train(inputs1[i], targets1[i], 0.5);
        }
    }

    // 第二次从文件s2.txt读取网络结构并构建网络
    vector<int> layers2 = readLayersFromFile("\/s2.txt");
    NeuralNetwork nn2(layers2);

    // 再次使用{1, 0} 和 {1, 1}训练
    for (int epoch = 0; epoch < 5000; ++epoch) {
        for (int i = 0; i < inputs1.size(); ++i) {
            nn2.train(inputs1[i], targets1[i], 0.5);
        }
    }

    nn2.forwarOut( {0,1}  );

    cout << endl;
    nn2.forwarOut({ 1,1 });

    cout << endl;
    nn2.forwarOut({ 1,0 });

    cout << endl;
    nn2.forwarOut({ 0,0 });

    return 0;
}
相关推荐
菜鸟求带飞_14 分钟前
算法打卡:第十一章 图论part01
java·数据结构·算法
浅念同学15 分钟前
算法.图论-建图/拓扑排序及其拓展
算法·图论
是小Y啦31 分钟前
leetcode 106.从中序与后续遍历序列构造二叉树
数据结构·算法·leetcode
liuyang-neu41 分钟前
力扣 42.接雨水
java·算法·leetcode
y_dd1 小时前
【machine learning-12-多元线性回归】
算法·机器学习·线性回归
m0_631270401 小时前
标准c语言(一)
c语言·开发语言·算法
万河归海4281 小时前
C语言——二分法搜索数组中特定元素并返回下标
c语言·开发语言·数据结构·经验分享·笔记·算法·visualstudio
小周的C语言学习笔记1 小时前
鹏哥C语言36-37---循环/分支语句练习(折半查找算法)
c语言·算法·visual studio
y_dd1 小时前
【machine learning-七-线性回归之成本函数】
算法·回归·线性回归
小魏冬琅1 小时前
K-means 算法的介绍与应用
算法·机器学习·kmeans