C++實(shí)現(xiàn)神經(jīng)BP神經(jīng)網(wǎng)絡(luò)
更新時間:2020年05月25日 14:15:33 作者:悟名堂
這篇文章主要為大家詳細(xì)介紹了C++實(shí)現(xiàn)神經(jīng)BP神經(jīng)網(wǎng)絡(luò),文中示例代碼介紹的非常詳細(xì),具有一定的參考價(jià)值,感興趣的小伙伴們可以參考一下
本文實(shí)例為大家分享了C++實(shí)現(xiàn)神經(jīng)BP神經(jīng)網(wǎng)絡(luò)的具體代碼,供大家參考,具體內(nèi)容如下
BP.h
#pragma once
#include<vector>
#include<stdlib.h>
#include<time.h>
#include<cmath>
#include<iostream>
using std::vector;
using std::exp;
using std::cout;
using std::endl;
class BP
{
private:
int studyNum;//允許學(xué)習(xí)次數(shù)
double h;//學(xué)習(xí)率
double allowError;//允許誤差
vector<int> layerNum;//每層的節(jié)點(diǎn)數(shù),不包括常量節(jié)點(diǎn)1
vector<vector<vector<double>>> w;//權(quán)重
vector<vector<vector<double>>> dw;//權(quán)重增量
vector<vector<double>> b;//偏置
vector<vector<double>> db;//偏置增量
vector<vector<vector<double>>> a;//節(jié)點(diǎn)值
vector<vector<double>> x;//輸入
vector<vector<double>> y;//期望輸出
void iniwb();//初始化w與b
void inidwdb();//初始化dw與db
double sigmoid(double z);//激活函數(shù)
void forward();//前向傳播
void backward();//后向傳播
double Error();//計(jì)算誤差
public:
BP(vector<int>const& layer_num, vector<vector<double>>const & input_a0,
vector<vector<double>> const & output_y, double hh = 0.5, double allerror = 0.001, int studynum = 1000);
BP();
void setLayerNumInput(vector<int>const& layer_num, vector<vector<double>> const & input);
void setOutputy(vector<vector<double>> const & output_y);
void setHErrorStudyNum(double hh, double allerror,int studynum);
void run();//運(yùn)行BP神經(jīng)網(wǎng)絡(luò)
vector<double> predict(vector<double>& input);//使用已經(jīng)學(xué)習(xí)好的神經(jīng)網(wǎng)絡(luò)進(jìn)行預(yù)測
~BP();
};
BP.cpp
#include "BP.h"
BP::BP(vector<int>const& layer_num, vector<vector<double>>const & input,
vector<vector<double>> const & output_y, double hh, double allerror,int studynum)
{
layerNum = layer_num;
x = input;//輸入多少個節(jié)點(diǎn)的數(shù)據(jù),每個節(jié)點(diǎn)有多少份數(shù)據(jù)
y = output_y;
h = hh;
allowError = allerror;
a.resize(layerNum.size());//有這么多層網(wǎng)絡(luò)節(jié)點(diǎn)
for (int i = 0; i < layerNum.size(); i++)
{
a[i].resize(layerNum[i]);//每層網(wǎng)絡(luò)節(jié)點(diǎn)有這么多個節(jié)點(diǎn)
for (int j = 0; j < layerNum[i]; j++)
a[i][j].resize(input[0].size());
}
a[0] = input;
studyNum = studynum;
}
BP::BP()
{
layerNum = {};
a = {};
y = {};
h = 0;
allowError = 0;
}
BP::~BP()
{
}
void BP::setLayerNumInput(vector<int>const& layer_num, vector<vector<double>> const & input)
{
layerNum = layer_num;
x = input;
a.resize(layerNum.size());//有這么多層網(wǎng)絡(luò)節(jié)點(diǎn)
for (int i = 0; i < layerNum.size(); i++)
{
a[i].resize(layerNum[i]);//每層網(wǎng)絡(luò)節(jié)點(diǎn)有這么多個節(jié)點(diǎn)
for (int j = 0; j < layerNum[i]; j++)
a[i][j].resize(input[0].size());
}
a[0] = input;
}
void BP::setOutputy(vector<vector<double>> const & output_y)
{
y = output_y;
}
void BP::setHErrorStudyNum(double hh, double allerror,int studynum)
{
h = hh;
allowError = allerror;
studyNum = studynum;
}
//初始化權(quán)重矩陣
void BP::iniwb()
{
w.resize(layerNum.size() - 1);
b.resize(layerNum.size() - 1);
srand((unsigned)time(NULL));
//節(jié)點(diǎn)層數(shù)層數(shù)
for (int l = 0; l < layerNum.size() - 1; l++)
{
w[l].resize(layerNum[l + 1]);
b[l].resize(layerNum[l + 1]);
//對應(yīng)后層的節(jié)點(diǎn)
for (int j = 0; j < layerNum[l + 1]; j++)
{
w[l][j].resize(layerNum[l]);
b[l][j] = -1 + 2 * (rand() / RAND_MAX);
//對應(yīng)前層的節(jié)點(diǎn)
for (int k = 0; k < layerNum[l]; k++)
w[l][j][k] = -1 + 2 * (rand() / RAND_MAX);
}
}
}
void BP::inidwdb()
{
dw.resize(layerNum.size() - 1);
db.resize(layerNum.size() - 1);
//節(jié)點(diǎn)層數(shù)層數(shù)
for (int l = 0; l < layerNum.size() - 1; l++)
{
dw[l].resize(layerNum[l + 1]);
db[l].resize(layerNum[l + 1]);
//對應(yīng)后層的節(jié)點(diǎn)
for (int j = 0; j < layerNum[l + 1]; j++)
{
dw[l][j].resize(layerNum[l]);
db[l][j] = 0;
//對應(yīng)前層的節(jié)點(diǎn)
for (int k = 0; k < layerNum[l]; k++)
w[l][j][k] = 0;
}
}
}
//激活函數(shù)
double BP::sigmoid(double z)
{
return 1.0 / (1 + exp(-z));
}
void BP::forward()
{
for (int l = 1; l < layerNum.size(); l++)
{
for (int i = 0; i < layerNum[l]; i++)
{
for (int j = 0; j < x[0].size(); j++)
{
a[l][i][j] = 0;//第l層第i個節(jié)點(diǎn)第j個數(shù)據(jù)樣本
//計(jì)算變量節(jié)點(diǎn)乘權(quán)值的和
for (int k = 0; k < layerNum[l - 1]; k++)
a[l][i][j] += a[l - 1][k][j] * w[l - 1][i][k];
//加上節(jié)點(diǎn)偏置
a[l][i][j] += b[l - 1][i];
a[l][i][j] = sigmoid(a[l][i][j]);
}
}
}
}
void BP::backward()
{
int xNum = x[0].size();//樣本個數(shù)
//daP第l層da,daB第l+1層da
vector<double> daP, daB;
for (int j = 0; j < xNum; j++)
{
//處理最后一層的dw
daP.clear();
daP.resize(layerNum[layerNum.size() - 1]);
for (int i = 0, l = layerNum.size() - 1; i < layerNum[l]; i++)
{
daP[i] = a[l][i][j] - y[i][j];
for (int k = 0; k < layerNum[l - 1]; k++)
dw[l - 1][i][k] += daP[i] * a[l][i][j] * (1 - a[l][i][j])*a[l - 1][k][j];
db[l - 1][i] += daP[i] * a[l][i][j] * (1 - a[l][i][j]);
}
//處理剩下層的權(quán)重w的增量Dw
for (int l = layerNum.size() - 2; l > 0; l--)
{
daB = daP;
daP.clear();
daP.resize(layerNum[l]);
for (int k = 0; k < layerNum[l]; k++)
{
daP[k] = 0;
for (int i = 0; i < layerNum[l + 1]; i++)
daP[k] += daB[i] * a[l + 1][i][j] * (1 - a[l + 1][i][j])*w[l][i][k];
//dw
for (int i = 0; i < layerNum[l - 1]; i++)
dw[l - 1][k][i] += daP[k] * a[l][k][j] * (1 - a[l][k][j])*a[l - 1][i][j];
//db
db[l-1][k] += daP[k] * a[l][k][j] * (1 - a[l][k][j]);
}
}
}
//計(jì)算dw與db平均值
for (int l = 0; l < layerNum.size() - 1; l++)
{
//對應(yīng)后層的節(jié)點(diǎn)
for (int j = 0; j < layerNum[l + 1]; j++)
{
db[l][j] = db[l][j] / xNum;
//對應(yīng)前層的節(jié)點(diǎn)
for (int k = 0; k < layerNum[l]; k++)
w[l][j][k] = w[l][j][k] / xNum;
}
}
//更新參數(shù)w與b
for (int l = 0; l < layerNum.size() - 1; l++)
{
for (int j = 0; j < layerNum[l + 1]; j++)
{
b[l][j] = b[l][j] - h * db[l][j];
//對應(yīng)前層的節(jié)點(diǎn)
for (int k = 0; k < layerNum[l]; k++)
w[l][j][k] = w[l][j][k] - h * dw[l][j][k];
}
}
}
double BP::Error()
{
int l = layerNum.size() - 1;
double temp = 0, error = 0;
for (int i = 0; i < layerNum[l]; i++)
for (int j = 0; j < x[0].size(); j++)
{
temp = a[l][i][j] - y[i][j];
error += temp * temp;
}
error = error / x[0].size();//求對每一組樣本的誤差平均
error = error / 2;
cout << error << endl;
return error;
}
//運(yùn)行神經(jīng)網(wǎng)絡(luò)
void BP::run()
{
iniwb();
inidwdb();
int i = 0;
for (; i < studyNum; i++)
{
forward();
if (Error() <= allowError)
{
cout << "Study Success!" << endl;
break;
}
backward();
}
if (i == 10000)
cout << "Study Failed!" << endl;
}
vector<double> BP::predict(vector<double>& input)
{
vector<vector<double>> a1;
a1.resize(layerNum.size());
for (int l = 0; l < layerNum.size(); l++)
a1[l].resize(layerNum[l]);
a1[0] = input;
for (int l = 1; l < layerNum.size(); l++)
for (int i = 0; i < layerNum[l]; i++)
{
a1[l][i] = 0;//第l層第i個節(jié)點(diǎn)第j個數(shù)據(jù)樣本
//計(jì)算變量節(jié)點(diǎn)乘權(quán)值的和
for (int k = 0; k < layerNum[l - 1]; k++)
a1[l][i] += a1[l - 1][k] * w[l - 1][i][k];
//加上節(jié)點(diǎn)偏置
a1[l][i] += b[l - 1][i];
a1[l][i] = sigmoid(a1[l][i]);
}
return a1[layerNum.size() - 1];
}
驗(yàn)證程序:
#include"BP.h"
int main()
{
vector<int> layer_num = { 1, 10, 1 };
vector<vector<double>> input_a0 = { { 1,2,3,4,5,6,7,8,9,10 } };
vector<vector<double>> output_y = { {0,0,0,0,1,1,1,1,1,1} };
BP bp(layer_num, input_a0,output_y,0.6,0.001, 2000);
bp.run();
for (int j = 0; j < 30; j++)
{
vector<double> input = { 0.5*j };
vector<double> output = bp.predict(input);
for (auto i : output)
cout << "j:" << 0.5*j <<" pridict:" << i << " ";
cout << endl;
}
system("pause");
return 0;
}
輸出:

以上就是本文的全部內(nèi)容,希望對大家的學(xué)習(xí)有所幫助,也希望大家多多支持腳本之家。
相關(guān)文章
C語言中函數(shù)指針與軟件設(shè)計(jì)經(jīng)驗(yàn)總結(jié)
今天小編就為大家分享一篇關(guān)于C語言中函數(shù)指針與軟件設(shè)計(jì)經(jīng)驗(yàn)總結(jié),小編覺得內(nèi)容挺不錯的,現(xiàn)在分享給大家,具有很好的參考價(jià)值,需要的朋友一起跟隨小編來看看吧2018-12-12
C語言數(shù)據(jù)結(jié)構(gòu)線性表教程示例詳解
這篇文章主要為大家介紹了C語言數(shù)據(jù)結(jié)構(gòu)線性表的示例詳解,有需要的朋友可以借鑒參考下,希望能夠有所幫助,祝大家多多進(jìn)步,早日升職加薪2022-02-02
C++使用Kruskal和Prim算法實(shí)現(xiàn)最小生成樹
這篇文章主要介紹了C++使用Kruskal和Prim算法實(shí)現(xiàn)最小生成樹,文中示例代碼介紹的非常詳細(xì),具有一定的參考價(jià)值,感興趣的小伙伴們可以參考一下2019-01-01
淺析C++中dynamic_cast和static_cast實(shí)例語法詳解
這篇文章主要介紹了淺析C++中dynamic_cast和static_cast實(shí)例演示,包括static_cast語法知識和static_cast的作用講解,namic_cast 語法詳解,需要的朋友可以參考下2021-07-07

