现代c++有任意层数和每层任意结点数的全连接神经网络.代码包括了基本的梯度下降,要求输入数据有两组{1,0}、{1,1},训练目标数据target也有两个{1}、{0}......程序运行开始读取一个文本文件"s1.txt",而文本文件中如果有字符串,"{2,4,3,1}"则网络结构就是{2,4,3,1}(即输入层2个Nodes,输出层有1个node,第1个隐藏层有4个nodes,第2个隐藏层有3nodes! 之后程序再读取文本文件"s2.txt"读取文本文件中的第二个字符串,比如:"{2,4,1}"又重新重构神经网络,这次输入层有2个nodes,输出层有1个node,隐藏层有4个nodes,并且再次反向传播训练!
cpp
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <cmath>
#include <random>
using namespace std;
// Sigmoid 函数和它的导数
double sigmoid(double x) {
return 1 / (1 + exp(-x));
}
double sigmoid_derivative(double x) {
return x * (1 - x);
}
// 全连接神经网络
class NeuralNetwork {
public:
vector<int> layers;
vector<vector<double>> weights;
vector<vector<double>> outputs;
NeuralNetwork(vector<int> layers) : layers(layers) {
random_device rd;
mt19937 gen(rd());
uniform_real_distribution<> dis(0, 1);
for (int i = 1; i < layers.size(); ++i) {
vector<double> weightLayer;
for (int j = 0; j < layers[i - 1] * layers[i]; ++j) {
weightLayer.push_back(dis(gen));
}
weights.push_back(weightLayer);
}
}
vector<double> forward(vector<double> input) {
outputs.clear();
outputs.push_back(input);
for (int i = 1; i < layers.size(); ++i) {
vector<double> output(layers[i]);
int k = 0;
for (int j = 0; j < layers[i]; ++j) {
double sum = 0;
for (int l = 0; l < layers[i - 1]; ++l) {
sum += outputs[i - 1][l] * weights[i - 1][k++];
}
output[j] = sigmoid(sum);
}
outputs.push_back(output);
}
return outputs.back();
}
vector<double> forwarOut(vector<double> input) {
outputs.clear();
outputs.push_back(input);
cout << endl<<" [";
for (int jj = 0; jj < input.size(); ++jj) { cout << input[jj]; }
cout << "]; " << endl;
for (int i = 1; i < layers.size(); ++i) {
vector<double> output(layers[i]);
int k = 0;
for (int j = 0; j < layers[i]; ++j) {
double sum = 0;
for (int l = 0; l < layers[i - 1]; ++l) {
sum += outputs[i - 1][l] * weights[i - 1][k++];
}
output[j] = sigmoid(sum);
cout << output[j] << "], ";
}
outputs.push_back(output);
cout << output[0] << "}; " << endl;
cout << "}}; " << endl;
}//for110i
return outputs.back();
}//forwarOut
//---------------------------------------------------------------------
void train(vector<double> input, vector<double> target, double lr) {
forward(input);
vector<vector<double>> deltas(layers.size());
for (int i = layers.size() - 1; i >= 0; --i) {
deltas[i].resize(layers[i]);
if (i == layers.size() - 1) {
for (int j = 0; j < layers[i]; ++j) {
double error = target[j] - outputs[i][j];
deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
}
}
else {
int k = 0;
for (int j = 0; j < layers[i]; ++j) {
double error = 0;
for (int l = 0; l < layers[i + 1]; ++l) {
error += weights[i][k++] * deltas[i + 1][l];
}
deltas[i][j] = error * sigmoid_derivative(outputs[i][j]);
}
}
}
for (int i = layers.size() - 1; i > 0; --i) {
int k = 0;
for (int j = 0; j < layers[i]; ++j) {
for (int l = 0; l < layers[i - 1]; ++l) {
weights[i - 1][k++] += lr * deltas[i][j] * outputs[i - 1][l];
}
}
}
}
};
// 从文件读取层信息
vector<int> readLayersFromFile(const string& filename) {
ifstream file(filename);
string str;
if (file) {
getline(file, str);
}
stringstream ss(str.substr(1, str.length() - 2));
string token;
vector<int> layers;
while (getline(ss, token, ',')) {
layers.push_back(stoi(token));
}
return layers;
}
int main() {
// 第一次从文件s1.txt读取网络结构并构建网络
vector<int> layers1 = readLayersFromFile("\/s1.txt");
NeuralNetwork nn1(layers1);
// 使用{1, 0} 和 {1, 1}训练
vector<vector<double>> inputs1 = { {1, 0}, {1, 1},{0,1},{0,0} }; //{ {1, 0}, {1, 1} };
vector<vector<double>> targets1 = { {1}, {0},{1},{0} };
for (int epoch = 0; epoch < 1000; ++epoch) {
for (int i = 0; i < inputs1.size(); ++i) {
nn1.train(inputs1[i], targets1[i], 0.5);
}
}
// 第二次从文件s2.txt读取网络结构并构建网络
vector<int> layers2 = readLayersFromFile("\/s2.txt");
NeuralNetwork nn2(layers2);
// 再次使用{1, 0} 和 {1, 1}训练
for (int epoch = 0; epoch < 5000; ++epoch) {
for (int i = 0; i < inputs1.size(); ++i) {
nn2.train(inputs1[i], targets1[i], 0.5);
}
}
nn2.forwarOut( {0,1} );
cout << endl;
nn2.forwarOut({ 1,1 });
cout << endl;
nn2.forwarOut({ 1,0 });
cout << endl;
nn2.forwarOut({ 0,0 });
return 0;
}