C++實(shí)現(xiàn)神經(jīng)BP神經(jīng)網(wǎng)絡(luò)
本文實(shí)例為大家分享了C++實(shí)現(xiàn)神經(jīng)BP神經(jīng)網(wǎng)絡(luò)的具體代碼,供大家參考,具體內(nèi)容如下
BP.h
#pragma once #include<vector> #include<stdlib.h> #include<time.h> #include<cmath> #include<iostream> using std::vector; using std::exp; using std::cout; using std::endl; class BP { private: int studyNum;//允許學(xué)習(xí)次數(shù) double h;//學(xué)習(xí)率 double allowError;//允許誤差 vector<int> layerNum;//每層的節(jié)點(diǎn)數(shù),不包括常量節(jié)點(diǎn)1 vector<vector<vector<double>>> w;//權(quán)重 vector<vector<vector<double>>> dw;//權(quán)重增量 vector<vector<double>> b;//偏置 vector<vector<double>> db;//偏置增量 vector<vector<vector<double>>> a;//節(jié)點(diǎn)值 vector<vector<double>> x;//輸入 vector<vector<double>> y;//期望輸出 void iniwb();//初始化w與b void inidwdb();//初始化dw與db double sigmoid(double z);//激活函數(shù) void forward();//前向傳播 void backward();//后向傳播 double Error();//計(jì)算誤差 public: BP(vector<int>const& layer_num, vector<vector<double>>const & input_a0, vector<vector<double>> const & output_y, double hh = 0.5, double allerror = 0.001, int studynum = 1000); BP(); void setLayerNumInput(vector<int>const& layer_num, vector<vector<double>> const & input); void setOutputy(vector<vector<double>> const & output_y); void setHErrorStudyNum(double hh, double allerror,int studynum); void run();//運(yùn)行BP神經(jīng)網(wǎng)絡(luò) vector<double> predict(vector<double>& input);//使用已經(jīng)學(xué)習(xí)好的神經(jīng)網(wǎng)絡(luò)進(jìn)行預(yù)測(cè) ~BP(); };
BP.cpp
#include "BP.h" BP::BP(vector<int>const& layer_num, vector<vector<double>>const & input, vector<vector<double>> const & output_y, double hh, double allerror,int studynum) { layerNum = layer_num; x = input;//輸入多少個(gè)節(jié)點(diǎn)的數(shù)據(jù),每個(gè)節(jié)點(diǎn)有多少份數(shù)據(jù) y = output_y; h = hh; allowError = allerror; a.resize(layerNum.size());//有這么多層網(wǎng)絡(luò)節(jié)點(diǎn) for (int i = 0; i < layerNum.size(); i++) { a[i].resize(layerNum[i]);//每層網(wǎng)絡(luò)節(jié)點(diǎn)有這么多個(gè)節(jié)點(diǎn) for (int j = 0; j < layerNum[i]; j++) a[i][j].resize(input[0].size()); } a[0] = input; studyNum = studynum; } BP::BP() { layerNum = {}; a = {}; y = {}; h = 0; allowError = 0; } BP::~BP() { } void BP::setLayerNumInput(vector<int>const& layer_num, vector<vector<double>> const & input) { layerNum = layer_num; x = input; a.resize(layerNum.size());//有這么多層網(wǎng)絡(luò)節(jié)點(diǎn) for (int i = 0; i < layerNum.size(); i++) { a[i].resize(layerNum[i]);//每層網(wǎng)絡(luò)節(jié)點(diǎn)有這么多個(gè)節(jié)點(diǎn) for (int j = 0; j < layerNum[i]; j++) a[i][j].resize(input[0].size()); } a[0] = input; } void BP::setOutputy(vector<vector<double>> const & output_y) { y = output_y; } void BP::setHErrorStudyNum(double hh, double allerror,int studynum) { h = hh; allowError = allerror; studyNum = studynum; } //初始化權(quán)重矩陣 void BP::iniwb() { w.resize(layerNum.size() - 1); b.resize(layerNum.size() - 1); srand((unsigned)time(NULL)); //節(jié)點(diǎn)層數(shù)層數(shù) for (int l = 0; l < layerNum.size() - 1; l++) { w[l].resize(layerNum[l + 1]); b[l].resize(layerNum[l + 1]); //對(duì)應(yīng)后層的節(jié)點(diǎn) for (int j = 0; j < layerNum[l + 1]; j++) { w[l][j].resize(layerNum[l]); b[l][j] = -1 + 2 * (rand() / RAND_MAX); //對(duì)應(yīng)前層的節(jié)點(diǎn) for (int k = 0; k < layerNum[l]; k++) w[l][j][k] = -1 + 2 * (rand() / RAND_MAX); } } } void BP::inidwdb() { dw.resize(layerNum.size() - 1); db.resize(layerNum.size() - 1); //節(jié)點(diǎn)層數(shù)層數(shù) for (int l = 0; l < layerNum.size() - 1; l++) { dw[l].resize(layerNum[l + 1]); db[l].resize(layerNum[l + 1]); //對(duì)應(yīng)后層的節(jié)點(diǎn) for (int j = 0; j < layerNum[l + 1]; j++) { dw[l][j].resize(layerNum[l]); db[l][j] = 0; //對(duì)應(yīng)前層的節(jié)點(diǎn) for (int k = 0; k < layerNum[l]; k++) w[l][j][k] = 0; } } } //激活函數(shù) double BP::sigmoid(double z) { return 1.0 / (1 + exp(-z)); } void BP::forward() { for (int l = 1; l < layerNum.size(); l++) { for (int i = 0; i < layerNum[l]; i++) { for (int j = 0; j < x[0].size(); j++) { a[l][i][j] = 0;//第l層第i個(gè)節(jié)點(diǎn)第j個(gè)數(shù)據(jù)樣本 //計(jì)算變量節(jié)點(diǎn)乘權(quán)值的和 for (int k = 0; k < layerNum[l - 1]; k++) a[l][i][j] += a[l - 1][k][j] * w[l - 1][i][k]; //加上節(jié)點(diǎn)偏置 a[l][i][j] += b[l - 1][i]; a[l][i][j] = sigmoid(a[l][i][j]); } } } } void BP::backward() { int xNum = x[0].size();//樣本個(gè)數(shù) //daP第l層da,daB第l+1層da vector<double> daP, daB; for (int j = 0; j < xNum; j++) { //處理最后一層的dw daP.clear(); daP.resize(layerNum[layerNum.size() - 1]); for (int i = 0, l = layerNum.size() - 1; i < layerNum[l]; i++) { daP[i] = a[l][i][j] - y[i][j]; for (int k = 0; k < layerNum[l - 1]; k++) dw[l - 1][i][k] += daP[i] * a[l][i][j] * (1 - a[l][i][j])*a[l - 1][k][j]; db[l - 1][i] += daP[i] * a[l][i][j] * (1 - a[l][i][j]); } //處理剩下層的權(quán)重w的增量Dw for (int l = layerNum.size() - 2; l > 0; l--) { daB = daP; daP.clear(); daP.resize(layerNum[l]); for (int k = 0; k < layerNum[l]; k++) { daP[k] = 0; for (int i = 0; i < layerNum[l + 1]; i++) daP[k] += daB[i] * a[l + 1][i][j] * (1 - a[l + 1][i][j])*w[l][i][k]; //dw for (int i = 0; i < layerNum[l - 1]; i++) dw[l - 1][k][i] += daP[k] * a[l][k][j] * (1 - a[l][k][j])*a[l - 1][i][j]; //db db[l-1][k] += daP[k] * a[l][k][j] * (1 - a[l][k][j]); } } } //計(jì)算dw與db平均值 for (int l = 0; l < layerNum.size() - 1; l++) { //對(duì)應(yīng)后層的節(jié)點(diǎn) for (int j = 0; j < layerNum[l + 1]; j++) { db[l][j] = db[l][j] / xNum; //對(duì)應(yīng)前層的節(jié)點(diǎn) for (int k = 0; k < layerNum[l]; k++) w[l][j][k] = w[l][j][k] / xNum; } } //更新參數(shù)w與b for (int l = 0; l < layerNum.size() - 1; l++) { for (int j = 0; j < layerNum[l + 1]; j++) { b[l][j] = b[l][j] - h * db[l][j]; //對(duì)應(yīng)前層的節(jié)點(diǎn) for (int k = 0; k < layerNum[l]; k++) w[l][j][k] = w[l][j][k] - h * dw[l][j][k]; } } } double BP::Error() { int l = layerNum.size() - 1; double temp = 0, error = 0; for (int i = 0; i < layerNum[l]; i++) for (int j = 0; j < x[0].size(); j++) { temp = a[l][i][j] - y[i][j]; error += temp * temp; } error = error / x[0].size();//求對(duì)每一組樣本的誤差平均 error = error / 2; cout << error << endl; return error; } //運(yùn)行神經(jīng)網(wǎng)絡(luò) void BP::run() { iniwb(); inidwdb(); int i = 0; for (; i < studyNum; i++) { forward(); if (Error() <= allowError) { cout << "Study Success!" << endl; break; } backward(); } if (i == 10000) cout << "Study Failed!" << endl; } vector<double> BP::predict(vector<double>& input) { vector<vector<double>> a1; a1.resize(layerNum.size()); for (int l = 0; l < layerNum.size(); l++) a1[l].resize(layerNum[l]); a1[0] = input; for (int l = 1; l < layerNum.size(); l++) for (int i = 0; i < layerNum[l]; i++) { a1[l][i] = 0;//第l層第i個(gè)節(jié)點(diǎn)第j個(gè)數(shù)據(jù)樣本 //計(jì)算變量節(jié)點(diǎn)乘權(quán)值的和 for (int k = 0; k < layerNum[l - 1]; k++) a1[l][i] += a1[l - 1][k] * w[l - 1][i][k]; //加上節(jié)點(diǎn)偏置 a1[l][i] += b[l - 1][i]; a1[l][i] = sigmoid(a1[l][i]); } return a1[layerNum.size() - 1]; }
驗(yàn)證程序:
#include"BP.h" int main() { vector<int> layer_num = { 1, 10, 1 }; vector<vector<double>> input_a0 = { { 1,2,3,4,5,6,7,8,9,10 } }; vector<vector<double>> output_y = { {0,0,0,0,1,1,1,1,1,1} }; BP bp(layer_num, input_a0,output_y,0.6,0.001, 2000); bp.run(); for (int j = 0; j < 30; j++) { vector<double> input = { 0.5*j }; vector<double> output = bp.predict(input); for (auto i : output) cout << "j:" << 0.5*j <<" pridict:" << i << " "; cout << endl; } system("pause"); return 0; }
輸出:
以上就是本文的全部?jī)?nèi)容,希望對(duì)大家的學(xué)習(xí)有所幫助,也希望大家多多支持腳本之家。
相關(guān)文章
C語(yǔ)言中函數(shù)指針與軟件設(shè)計(jì)經(jīng)驗(yàn)總結(jié)
今天小編就為大家分享一篇關(guān)于C語(yǔ)言中函數(shù)指針與軟件設(shè)計(jì)經(jīng)驗(yàn)總結(jié),小編覺(jué)得內(nèi)容挺不錯(cuò)的,現(xiàn)在分享給大家,具有很好的參考價(jià)值,需要的朋友一起跟隨小編來(lái)看看吧2018-12-12C語(yǔ)言數(shù)據(jù)結(jié)構(gòu)線性表教程示例詳解
這篇文章主要為大家介紹了C語(yǔ)言數(shù)據(jù)結(jié)構(gòu)線性表的示例詳解,有需要的朋友可以借鑒參考下,希望能夠有所幫助,祝大家多多進(jìn)步,早日升職加薪2022-02-02C++使用Kruskal和Prim算法實(shí)現(xiàn)最小生成樹(shù)
這篇文章主要介紹了C++使用Kruskal和Prim算法實(shí)現(xiàn)最小生成樹(shù),文中示例代碼介紹的非常詳細(xì),具有一定的參考價(jià)值,感興趣的小伙伴們可以參考一下2019-01-01淺析C++中dynamic_cast和static_cast實(shí)例語(yǔ)法詳解
這篇文章主要介紹了淺析C++中dynamic_cast和static_cast實(shí)例演示,包括static_cast語(yǔ)法知識(shí)和static_cast的作用講解,namic_cast 語(yǔ)法詳解,需要的朋友可以參考下2021-07-07C 語(yǔ)言基礎(chǔ)教程(我的C之旅開(kāi)始了)[四]
C 語(yǔ)言基礎(chǔ)教程(我的C之旅開(kāi)始了)[四]...2007-02-02C語(yǔ)言之函數(shù)遞歸的實(shí)現(xiàn)
本文主要介紹了C語(yǔ)言之函數(shù)遞歸的實(shí)現(xiàn),文中通過(guò)示例代碼介紹的非常詳細(xì),對(duì)大家的學(xué)習(xí)或者工作具有一定的參考學(xué)習(xí)價(jià)值,需要的朋友們下面隨著小編來(lái)一起學(xué)習(xí)學(xué)習(xí)吧2023-07-07