增减网络20220101

现代c++有任意层数和每层任意结点数的全连接神经网络.代码包括了基本的梯度下降,要求输入数据有两组{1,0}、{1,1},训练目标数据target也有两个{1}、{0}......程序运行开始读取一个文本文件"s1.txt",而文本文件中如果有字符串,"{2,4,3,1}"则网络结构就是{2,4,3,1}(即输入层2个Nodes,输出层有1个node,第1个隐藏层有4个nodes,第2个隐藏层有3nodes! 之后程序再读取文本文件"s2.txt"读取文本文件中的第二个字符串,比如:"{2,4,1}"又重新重构神经网络,这次输入层有2个nodes,输出层有1个node,隐藏层有4个nodes,并且再次反向传播训练!

cpp 复制代码
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <cmath>
#include <random>

using namespace std;

// Sigmoid 函数和它的导数
double sigmoid(double x) {
    return 1 / (1 + exp(-x));
}

double sigmoid_derivative(double x) {
    return x * (1 - x);
}

// 全连接神经网络
class NeuralNetwork {
public:
    vector<int> layers;
    vector<vector<double>> weights;
    vector<vector<double>> outputs;

    NeuralNetwork(vector<int> layers) : layers(layers) {
        random_device rd;
        mt19937 gen(rd());
        uniform_real_distribution<> dis(0, 1);

        for (int i = 1; i < layers.size(); ++i) {
            vector<double> weightLayer;
            for (int j = 0; j < layers[i - 1] * layers[i]; ++j) {
                weightLayer.push_back(dis(gen));
            }
            weights.push_back(weightLayer);
        }
    }

    vector<double> forward(vector<double> input) {
        outputs.clear();
        outputs.push_back(input);

        for (int i = 1; i < layers.size(); ++i) {
            vector<double> output(layers[i]);
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                double sum = 0;
                for (int l = 0; l < layers[i - 1]; ++l) {
                    sum += outputs[i - 1][l] * weights[i - 1][k++];
                }
                output[j] = sigmoid(sum);
            }
            outputs.push_back(output);
        }
        return outputs.back();
    }

    vector<double> forwarOut(vector<double> input) {
        outputs.clear();
        outputs.push_back(input);

        cout << endl<<" [";
        for (int jj = 0; jj < input.size(); ++jj) { cout << input[jj]; }
        cout << "]; " << endl;
        for (int i = 1; i < layers.size(); ++i) {
            vector<double> output(layers[i]);
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                double sum = 0;
                for (int l = 0; l < layers[i - 1]; ++l) {
                    sum += outputs[i - 1][l] * weights[i - 1][k++];
                }
                output[j] = sigmoid(sum);
                cout << output[j] << "], ";
            }
            outputs.push_back(output);
            cout << output[0] << "}; " << endl;
            cout << "}};  " << endl;
        }//for110i
        return outputs.back();
    }//forwarOut
    //---------------------------------------------------------------------

    void train(vector<double> input, vector<double> target, double lr) {
        forward(input);
        vector<vector<double>> deltas(layers.size());
        for (int i = layers.size() - 1; i >= 0; --i) {
            deltas[i].resize(layers[i]);
            if (i == layers.size() - 1) {
                for (int j = 0; j < layers[i]; ++j) {
                    double error = target[j] - outputs[i][j];
                    deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
                }
            }
            else {
                int k = 0;
                for (int j = 0; j < layers[i]; ++j) {
                    double error = 0;
                    for (int l = 0; l < layers[i + 1]; ++l) {
                        error += weights[i][k++] * deltas[i + 1][l];
                    }
                    deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
                }
            }
        }

        for (int i = layers.size() - 1; i > 0; --i) {
            int k = 0;
            for (int j = 0; j < layers[i]; ++j) {
                for (int l = 0; l < layers[i - 1]; ++l) {
                    weights[i - 1][k++] += lr * deltas[i][j] * outputs[i - 1][l];
                }
            }
        }
    }
};

// 从文件读取层信息
vector<int> readLayersFromFile(const string& filename) {
    ifstream file(filename);
    string str;
    if (file) {
        getline(file, str);
    }
    stringstream ss(str.substr(1, str.length() - 2));
    string token;
    vector<int> layers;
    while (getline(ss, token, ',')) {
        layers.push_back(stoi(token));
    }
    return layers;
}

int main() {
    // 第一次从文件s1.txt读取网络结构并构建网络
    vector<int> layers1 = readLayersFromFile("\/s1.txt");
    NeuralNetwork nn1(layers1);

    // 使用{1, 0} 和 {1, 1}训练
    vector<vector<double>> inputs1 = { {1, 0}, {1, 1},{0,1},{0,0} };    //{ {1, 0}, {1, 1} };
    vector<vector<double>> targets1 = { {1}, {0},{1},{0} };

    for (int epoch = 0; epoch < 1000; ++epoch) {
        for (int i = 0; i < inputs1.size(); ++i) {
            nn1.train(inputs1[i], targets1[i], 0.5);
        }
    }

    // 第二次从文件s2.txt读取网络结构并构建网络
    vector<int> layers2 = readLayersFromFile("\/s2.txt");
    NeuralNetwork nn2(layers2);

    // 再次使用{1, 0} 和 {1, 1}训练
    for (int epoch = 0; epoch < 5000; ++epoch) {
        for (int i = 0; i < inputs1.size(); ++i) {
            nn2.train(inputs1[i], targets1[i], 0.5);
        }
    }

    nn2.forwarOut( {0,1}  );

    cout << endl;
    nn2.forwarOut({ 1,1 });

    cout << endl;
    nn2.forwarOut({ 1,0 });

    cout << endl;
    nn2.forwarOut({ 0,0 });

    return 0;
}
相关推荐
Captain823Jack37 分钟前
nlp新词发现——浅析 TF·IDF
人工智能·python·深度学习·神经网络·算法·自然语言处理
Captain823Jack1 小时前
w04_nlp大模型训练·中文分词
人工智能·python·深度学习·神经网络·算法·自然语言处理·中文分词
是小胡嘛2 小时前
数据结构之旅:红黑树如何驱动 Set 和 Map
数据结构·算法
m0_748255022 小时前
前端常用算法集合
前端·算法
呆呆的猫2 小时前
【LeetCode】227、基本计算器 II
算法·leetcode·职场和发展
Tisfy2 小时前
LeetCode 1705.吃苹果的最大数目:贪心(优先队列) - 清晰题解
算法·leetcode·优先队列·贪心·
余额不足121383 小时前
C语言基础十六:枚举、c语言中文件的读写操作
linux·c语言·算法
火星机器人life5 小时前
基于ceres优化的3d激光雷达开源算法
算法·3d
虽千万人 吾往矣5 小时前
golang LeetCode 热题 100(动态规划)-更新中
算法·leetcode·动态规划
arnold666 小时前
华为OD E卷(100分)34-转盘寿司
算法·华为od