QQ登录

只需要一步,快速开始

 注册地址  找回密码
查看: 3619|回复: 1
打印 上一主题 下一主题

改写的C++BP类 未测试

[复制链接]
字体大小: 正常 放大
devil1980        

1

主题

0

听众

24

积分

升级  20%

该用户从未签到

新人进步奖

跳转到指定楼层
1#
发表于 2006-3-28 17:28 |只看该作者 |倒序浏览
|招呼Ta 关注Ta

// BpNetWork.cpp: implementation of the CBpNetWork class.
//
//////////////////////////////////////////////////////////////////////

//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
#include "stdafx.h"
#include "BpNetWork.h"
#include "iostream.h"

CBpNetWork::CBpNetWork()
{
 m_npBeta = new double;//Òþ²ãÖÁÊäÈë²ãѧϰЧÂÊ
 *m_npBeta = 0.2;
 m_npAlpha = new double;//Êä³ö²ãÖÁÒþ²ãµÄѧϰЧÂÊ
 *m_npAlpha = 0.2;
 m_npTotalErrofM = NULL;//µÚm¸öÑù±¾µÄ×ÜÎó²î
 m_npOutputLayerThreshold = NULL;//Êä³ö²ãµÄãÐÖµ
 m_npHideLayerThreshold = NULL;//Òþ²ãµÄãÐÖµ
 m_npOutputLayerOutputValue = NULL;//Êä³ö²ãµÄÊä³ö
 m_npHideOutputValue = NULL;//Òþ²ãµÄÊä³ö
 m_npOutputLayerInputValue = NULL;//Êä³ö²ãµÄÊäÈë
 m_npHideInputValue;//Òþ²ãµÄÊäÈë
 m_npHideToOutputWeightMatrix = NULL;//Òþ²ãÖÁÊä³ö²ãȨֵ
 m_npInputToHideWeightMatrix = NULL;//ÊäÈë²ãÖÁÒþ²ãȨֵ
 m_fpSingleTrainingSample = NULL;//µ¥¸öÑù±¾½ÌʦÊý¾Ý
 m_fpSingleInputSample = NULL;//µ¥¸öÑù±¾ÊäÈëÊý¾Ý
 m_fpE_Err = NULL;
 m_fpD_Err = NULL;
 m_fpInput = NULL;
 m_fpTeach = NULL;
 m_spStudy_Data = NULL;
 m_fpAbsErrValBefore =NULL;
 m_nLearningSampleNumber = 0;//ѧϰÑù±¾¸öÊý
 m_nOutputNetNumber = 0;//ÊäÈë²ãÉñ¾­ÔªÊýÄ¿
 m_nHideNetNumber = 0;//Òþ²ãÉñ¾­ÔªÊýÄ¿
 m_nInputNetNumber = 0;//Êä³ö²ãÉñ¾­ÔªÊýÄ¿
 Init();
}

CBpNetWork::~CBpNetWork()
{
 if(!m_npBeta) delete m_npBeta,m_npBeta = NULL;
 if(!m_npAlpha) delete m_npAlpha,m_npAlpha = NULL;
 if(!m_npTotalErrofM) delete m_npTotalErrofM, m_npTotalErrofM = NULL;
 if(!m_npOutputLayerThreshold) delete []m_npOutputLayerThreshold, m_npOutputLayerThreshold= NULL;
 if(!m_npHideLayerThreshold) delete[]m_npHideLayerThreshold, m_npHideLayerThreshold =NULL;
 if(!m_npOutputLayerOutputValue) delete[]m_npOutputLayerOutputValue,m_npOutputLayerOutputValue = NULL;
 if(!m_npHideOutputValue) delete []m_npHideOutputValue,m_npHideOutputValue=NULL;
 if(!m_npOutputLayerInputValue) delete []m_npOutputLayerInputValue,m_npOutputLayerInputValue= NULL;
 if(!m_npHideInputValue) delete []m_npHideInputValue, m_npHideInputValue = NULL;
 if(!m_npHideToOutputWeightMatrix) delete []m_npHideToOutputWeightMatrix, m_npHideToOutputWeightMatrix = NULL;
 if(!m_npInputToHideWeightMatrix) delete []m_npInputToHideWeightMatrix,m_npInputToHideWeightMatrix = NULL;
 if(!m_fpSingleTrainingSample) delete []m_fpSingleTrainingSample,m_fpSingleTrainingSample = NULL;
 if(!m_fpSingleInputSample) delete []m_fpSingleInputSample, m_fpSingleInputSample =NULL;
 if(!m_fpE_Err) delete []m_fpE_Err, m_fpE_Err =NULL;
 if(!m_fpD_Err) delete []m_fpD_Err, m_fpD_Err = NULL;
 if(!m_fpAbsErrValBefore) delete []m_fpAbsErrValBefore, m_fpAbsErrValBefore= NULL;
 Study_Data_T *p = m_spStudy_Data;
for(int i = 0;i< m_nLearningSampleNumber;i++)
  for(int j = 0; j< m_nInputNetNumber; j++)
 {
 
 m_fpInput = m_spStudy_Data->input;
 m_fpTeach = m_spStudy_Data->teach;
 if(!m_fpInput) delete []m_fpInput, m_fpInput = NULL;
 if(!m_fpTeach) delete []m_fpTeach, m_fpTeach = NULL;
 m_spStudy_Data++;
 }
 delete []p;
}


CBpNetWork::CBpNetWork(int LearningSplNum, int OutLayNumber, int HideLayNumber, int inLayNum)
{
 m_nLearningSampleNumber = LearningSplNum;
 m_nOutputNetNumber = OutLayNumber;
 m_nHideNetNumber = HideLayNumber;
 m_nInputNetNumber = inLayNum;
 Init();
}
//initial weight and threshold
bool CBpNetWork::Init()
{
 if(!m_nLearningSampleNumber||!m_nOutputNetNumber||!m_nHideNetNumber||!m_nInputNetNumber)
  return false;
 float sgn;
 float rnd;
 int i,j;
 m_npInputToHideWeightMatrix = new double [m_nHideNetNumber*m_nInputNetNumber];
 m_npHideLayerThreshold = new double[m_nHideNetNumber];
 m_npHideToOutputWeightMatrix = new double[m_nOutputNetNumber*m_nHideNetNumber];
 m_fpSingleInputSample = new double[m_nLearningSampleNumber];
 m_fpSingleTrainingSample = new double [m_nOutputNetNumber];
 m_npHideInputValue = new double[m_nHideNetNumber];
 m_npOutputLayerOutputValue = new double[m_nOutputNetNumber];
 m_npOutputLayerInputValue = new double[m_nOutputNetNumber];
 m_npHideLayerThreshold = new double[m_nHideNetNumber];
 m_npOutputLayerThreshold = new double [m_nOutputNetNumber];
 m_npTotalErrofM = new double[m_nLearningSampleNumber];
 m_fpE_Err = new double[m_nHideNetNumber];
 m_fpD_Err = new double[m_nOutputNetNumber];
 m_fpAbsErrValBefore = new double[m_nOutputNetNumber];

 m_spStudy_Data = (Study_Data_T *)new double[2*m_nLearningSampleNumber*m_nInputNetNumber];
 for( i = 0;i< m_nLearningSampleNumber;i++)
  for( j = 0; j< m_nInputNetNumber; j++)
  {
  m_fpInput = new double[m_nInputNetNumber];
  m_fpTeach = new double[m_nOutputNetNumber];
  m_spStudy_Data->input = m_fpInput;
  m_spStudy_Data->teach =m_fpTeach;
  m_spStudy_Data++;
  }
//Òþ²ãȨ¡¢ãÐÖµ³õʼ»¯//
 for(j=0;j<m_nHideNetNumber;j++)
  for(i=0;i<m_nInputNetNumber;i++)
       {
         //sgn=pow((-1),random(100));
         sgn=(float)rand()/RAND_MAX ;
         rnd=sgn*(rand()%10);
         m_npInputToHideWeightMatrix[j*i]= rnd/100;//Òþ²ãȨֵ³õʼ»¯¡£
         }
//randomize();
 for(j=0;j<m_nHideNetNumber;j++)
    {
     //sgn=pow((-1),random(1000));
     sgn=(float)rand()/RAND_MAX ;
 rnd=sgn*(rand()%10);
 m_npHideLayerThreshold[j]= rnd/1000;//Öмä²ããÐÖµ³õʼ»¯
     }
      //Êä³ö²ãȨ¡¢ãÐÖµ³õʼ»¯//
      //randomize();
      for (int k=0;k<m_nOutputNetNumber;k++)
       for (int j=0;j<m_nHideNetNumber;j++)
       {
       //sgn=pow((-1),random(1000));
         sgn=(float)rand()/RAND_MAX ;
         rnd=sgn*(rand()%10);
         m_npHideToOutputWeightMatrix[k*j]=rnd/100;//µÚm¸öÑù±¾Êä³ö²ãȨֵ³õʼ»¯
       }
      //randomize();
    for(k =0; k<m_nOutputNetNumber;k++)
        {
        //sgn=pow((-1),random(10));
        sgn=(float)rand()/RAND_MAX ;
        rnd=sgn*(rand()%10);
        m_npOutputLayerThreshold[k]=rnd/10;//Êä³ö²ããÐÖµ³õʼ»¯
        }
        return true;


}
//input No. m Tarinning data
void CBpNetWork::InputOfNoMTrainingData(int m,Study_Data_T * tnspl)
{
 for (int k=0;k<m;k++)
       m_fpSingleTrainingSample[k]=tnspl[m].teach[k];//Study_Data[m].m_fpTeach[k];
}
////µÚm¸öѧϰÑù±¾ÊäÈë×Ó³ÌÐò///
void CBpNetWork::InputOfNoMSampleData(int m, Study_Data_T * lnspl)
{
 for (int i=0;i<m_nInputNetNumber;i++)
       m_fpSingleInputSample= lnspl[m].input;//Study_Data[m].input;
  
}
//Òþ²ã¸÷µ¥ÔªÊäÈë¡¢Êä³öÖµ×Ó³ÌÐò///
void CBpNetWork::HideLayInOut()
{
      double sigma;
      int i,j;
      for (j=0;j<m_nHideNetNumber;j++)
   {
       sigma=0.0;
      for (i=0;i<m_nInputNetNumber;i++)
       sigma+=m_npInputToHideWeightMatrix[j*i]*m_fpSingleInputSample;//ÇóÒþ²ãÄÚ»ý
    
      m_npHideInputValue[j]=sigma - m_npHideLayerThreshold;//ÇóÒþ²ã¾»ÊäÈë
      m_npHideOutputValue[j]=1.0/(1.0+exp(-m_npHideInputValue[j]));//ÇóÒþ²ãÊä³ö
      }
     
}
//Êä³ö²ã¸÷µ¥ÔªÊäÈë¡¢Êä³öÖµ×Ó³ÌÐò///
void CBpNetWork::OutLayInOut()
{
      double sigma;
      for (int k=0;k<m_nOutputNetNumber;k++)
   {
       sigma=0.0;
      for (int j=0;j<m_nHideNetNumber;j++)
       sigma+=m_npHideToOutputWeightMatrix[k*j]*m_npHideOutputValue[k];//ÇóÊä³ö²ãÄÚ»ý
     
      m_npOutputLayerInputValue[k]=sigma-m_npOutputLayerThreshold[k]; //ÇóÊä³ö²ã¾»ÊäÈë
      m_npOutputLayerOutputValue[k]=1.0/(1.0+exp(-m_npOutputLayerInputValue[k]));//ÇóÊä³ö²ãÊä³ö
      }
  
}
//Êä³ö²ãÖÁÒþ²ãµÄÒ»°ã»¯Îó²î×Ó³ÌÐò////
void CBpNetWork::NormalizeErrOutLayToHideLay(int m)
{
 double *abs_err = new double[m_nOutputNetNumber];
 //double abs_err[m_nOutputNetNumber];//ÿ¸öÑù±¾µÄ¾ø¶ÔÎó²î¶¼ÊÇ´Ó0¿ªÊ¼µÄ
 double sqr_err=0;//ÿ¸öÑù±¾µÄƽ·½Îó²î¼ÆËã¶¼ÊÇ´Ó0¿ªÊ¼µÄ
 //for (int output=0;output<m_nOutputNetNumber;output++)  //output???
 for (int k=0;k<m_nOutputNetNumber;k++)
 {
 abs_err[k]=m_fpSingleTrainingSample[k]-m_npOutputLayerOutputValue[k];
 if(abs_err[k] == 0)
  abs_err[k] = 0.00001;
 //¸ù¾ÝÎó²î¸Ä±äѧϰЧÂÊ
 if(abs_err[k]>m_fpAbsErrValBefore[k])
 {
  *m_npAlpha += (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npAlpha;
  *m_npBeta += (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npBeta;
 }
 else
 {
  *m_npAlpha -= (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npAlpha;
  *m_npBeta -= (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npBeta;
 }
 m_fpAbsErrValBefore[k] = abs_err[k];
 //ÇóµÚm¸öÑù±¾ÏµĵÚk¸öÉñ¾­ÔªµÄ¾ø¶ÔÎó²î
 sqr_err+=(abs_err[k])*(abs_err[k]);//ÇóµÚm¸öÑù±¾ÏÂÊä³ö²ãµÄƽ·½Îó²î
 m_fpD_Err[k]=abs_err[k]*m_npOutputLayerOutputValue[k]*(1.0-m_npOutputLayerOutputValue[k]);//d_err[k]Êä³ö²ã¸÷Éñ¾­ÔªµÄÒ»°ã»¯Îó²î
 }
 m_npTotalErrofM[m]=sqr_err/2;//µÚm¸öÑù±¾ÏÂÊä³ö²ãµÄƽ·½Îó²î/2=µÚm¸öÑù±¾µÄ¾ù·½Îó²î
 delete []abs_err;
}

void CBpNetWork::NormalizeErrHideLayToOutLay()
{
 double sigma;
        for (int j=0;j<m_nHideNetNumber;j++)
  {
        sigma=0.0;
           for (int k=0;k<m_nOutputNetNumber;k++)
            sigma=m_fpD_Err[k]*m_npHideToOutputWeightMatrix[k*j];
  
      m_fpE_Err[j]=sigma*exp(m_npHideOutputValue[j])/((1-exp(m_npHideOutputValue[j]))*(1-exp(m_npHideOutputValue[j])));//Òþ²ã¸÷Éñ¾­ÔªµÄÒ»°ã»¯Îó²î
  }
}
  //Êä³ö²ãÖÁÒþ²ãµÄȨֵµ÷Õû¡¢Êä³ö²ããÐÖµµ÷Õû¼ÆËã×Ó³ÌÐò//////
void CBpNetWork::AdjustWeightAndThresholdOutLayToHideLay(int m, FILE * fp)
{
 for (int k=0;k<m_nOutputNetNumber;k++)
 {
     for (int j=0;j<m_nHideNetNumber;j++)
     {
    m_npHideToOutputWeightMatrix[k*j]+=*m_npAlpha*m_fpD_Err[k]*m_npHideOutputValue[j];//Êä³ö²ãÖÁÒþ²ãµÄȨֵµ÷Õû
     }
    m_npOutputLayerThreshold[k]+=*m_npAlpha*m_fpD_Err[k];//Êä³ö²ãÖÁÒþ²ãµÄãÐÖµµ÷Õû
 }
}
//m_nLearningSampleNumber¸öÑù±¾µÄÈ«¾ÖÎó²î¼ÆËã×Ó³ÌÐò////
double CBpNetWork::GlobalErrSum()
{
 double total_err=0;
      for (int m=0;m<m_nLearningSampleNumber;m++)
        total_err+=m_npTotalErrofM[m];//ÿ¸öÑù±¾µÄ¾ù·½Îó²î¼ÓÆðÀ´¾Í³ÉÁËÈ«¾ÖÎó²î  
      return total_err;
}
 //Òþ²ãÖÁÊäÈë²ãµÄȨֵµ÷Õû¡¢Òþ²ããÐÖµµ÷Õû¼ÆËã×Ó³ÌÐò/////
void CBpNetWork::AdjustWeightAndThresholdHideLayToOutLay(int m,FILE * fp)
{
for (int j=0;j<m_nHideNetNumber;j++)
 {
        for (int i=0;i<m_nInputNetNumber;i++)
  {
       //cout<<"µÚ"< fprintf(fp,"µÚ%d¸öÑù±¾Ê±µÄÊä³ö²ãÖÁÒþ²ãµÄȨֵΪ£º%f\n",m,m_npHideToOutputWeightMatrix[j]);
      m_npInputToHideWeightMatrix[j*i]+=*m_npBeta*m_fpE_Err[j]*m_fpSingleInputSample;//Òþ²ãÖÁÊäÈë²ãµÄȨֵµ÷Õû
  }
      m_npHideLayerThreshold[j]+=*m_npBeta*m_fpE_Err[j];
 }
}

void CBpNetWork::Excute(Study_Data_T * spl, double err, int count)
{
 double temp_err = 0;
 int local_count =0;
 if(count == 0)
  count = 1000;
 if(!spl)
  return;
 do
 {
 Tarning(spl);
 temp_err = GlobalErrSum();
 local_count++;
 }while(temp_err>err&&local_count<count);
}

void CBpNetWork::Tarning(Study_Data_T * spl)
{
  int i = 0;
  Study_Data_T * tlnspl =spl;
  Study_Data_T * ttnspl =spl;
 for(;tlnspl != NULL&& ttnspl != NULL && i<m_nLearningSampleNumber*m_nInputNetNumber;tlnspl++,ttnspl++,i++)
 {
  InputOfNoMSampleData(i, tlnspl);
  InputOfNoMTrainingData(i, ttnspl);
  HideLayInOut();
  OutLayInOut();
  NormalizeErrOutLayToHideLay(i);
  NormalizeErrHideLayToOutLay();
  AdjustWeightAndThresholdOutLayToHideLay(i,NULL);
  AdjustWeightAndThresholdHideLayToOutLay(i,NULL);
 }
}

 

// head file

// BpNetWork.h: interface for the CBpNetWork class.
//
//////////////////////////////////////////////////////////////////////

#if !defined(AFX_BPNETWORK_H__2FDDE314_11F6_4D49_A046_C42B29C3754D__INCLUDED_)
#define AFX_BPNETWORK_H__2FDDE314_11F6_4D49_A046_C42B29C3754D__INCLUDED_

#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000

 


#include "math.h"


typedef struct Study_Data_S {
   double *input;
   double *teach;
  } Study_Data_T;
class CBpNetWork 
{
public://Êä³ö²ãÖÁÒþ²ãµÄÒ»°ã»¯Îó²î×Ó³ÌÐò////

 void Tarning(Study_Data_T * spl);
 void Excute(Study_Data_T * spl, double err, int count);
 void AdjustWeightAndThresholdHideLayToOutLay(int m, FILE * fp);
 double GlobalErrSum();
 void AdjustWeightAndThresholdOutLayToHideLay(int m, FILE * fp);
 void NormalizeErrHideLayToOutLay();
 void NormalizeErrOutLayToHideLay(int m);
 void OutLayInOut();
 void HideLayInOut();
 void InputOfNoMSampleData(int m,Study_Data_T * lnspl);
 void InputOfNoMTrainingData(int m,Study_Data_T * lnspl);
 bool Init();
 CBpNetWork(int LearningSplNum, int OutLayNumber,int HideLayNumber,int inLayNum);

 double* m_npBeta;//Òþ²ãÖÁÊäÈë²ãѧϰЧÂÊ
 double* m_npAlpha;//Êä³ö²ãÖÁÒþ²ãµÄѧϰЧÂÊ
 double* m_npTotalErrofM;//µÚm¸öÑù±¾µÄ×ÜÎó²î
 double* m_npOutputLayerThreshold;//Êä³ö²ãµÄãÐÖµ
 double* m_npHideLayerThreshold;//Òþ²ãµÄãÐÖµ
 double* m_npOutputLayerOutputValue;//Êä³ö²ãµÄÊä³ö
 double* m_npHideOutputValue;//Òþ²ãµÄÊä³ö
 double* m_npOutputLayerInputValue;//Êä³ö²ãµÄÊäÈë
 double* m_npHideInputValue;//Òþ²ãµÄÊäÈë
 double* m_npHideToOutputWeightMatrix;//Òþ²ãÖÁÊä³ö²ãȨֵ
 double* m_npInputToHideWeightMatrix;//ÊäÈë²ãÖÁÒþ²ãȨֵ
 double* m_fpSingleTrainingSample;//µ¥¸öÑù±¾½ÌʦÊý¾Ý
 double* m_fpSingleInputSample;//µ¥¸öÑù±¾ÊäÈëÊý¾Ý
 double* m_fpInput;
 double* m_fpTeach;
 double* m_fpD_Err;
 double* m_fpE_Err;
 double * m_fpAbsErrValBefore;
 Study_Data_T *m_spStudy_Data;

 CBpNetWork();
 virtual ~CBpNetWork();

private:
 int m_nLearningSampleNumber;//ѧϰÑù±¾¸öÊý
 int m_nOutputNetNumber;//ÊäÈë²ãÉñ¾­ÔªÊýÄ¿
 int m_nHideNetNumber;//Òþ²ãÉñ¾­ÔªÊýÄ¿
 int m_nInputNetNumber;//Êä³ö²ãÉñ¾­ÔªÊýÄ¿
};

#endif // !defined(AFX_BPNETWORK_H__2FDDE314_11F6_4D49_A046_C42B29C3754D__INCLUDED_)

zan
转播转播0 分享淘帖0 分享分享0 收藏收藏0 支持支持0 反对反对0 微信微信
devil1980        

1

主题

0

听众

24

积分

升级  20%

该用户从未签到

新人进步奖

测试通过的BP类,可以收敛Total误差

// BpNetWork.cpp: implementation of the CBpNetWork class.
//
//////////////////////////////////////////////////////////////////////
//Author mail: s.win2k@163.com">os.win2k@163.com
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
#include "stdafx.h"
#include "BpNetWork.h"
#include "iostream.h"

CBpNetWork::CBpNetWork()
{
 m_npBeta = new double;//Òþ²ãÖÁÊäÈë²ãѧϰЧÂÊ
 *m_npBeta = 0.002;
 m_npAlpha = new double;//Êä³ö²ãÖÁÒþ²ãµÄѧϰЧÂÊ
 *m_npAlpha = 0.002;
 m_npTotalErrofM = NULL;//µÚm¸öÑù±¾µÄ×ÜÎó²î
 m_npOutputLayerThreshold = NULL;//Êä³ö²ãµÄãÐÖµ
 m_npHideLayerThreshold = NULL;//Òþ²ãµÄãÐÖµ
 m_npOutputLayerOutputValue = NULL;//Êä³ö²ãµÄÊä³ö
 m_npHideOutputValue = NULL;//Òþ²ãµÄÊä³ö
 m_npOutputLayerInputValue = NULL;//Êä³ö²ãµÄÊäÈë
 m_npHideInputValue;//Òþ²ãµÄÊäÈë
 m_npHideToOutputWeightMatrix = NULL;//Òþ²ãÖÁÊä³ö²ãȨֵ
 m_npInputToHideWeightMatrix = NULL;//ÊäÈë²ãÖÁÒþ²ãȨֵ
 m_fpSingleTrainingSample = NULL;//µ¥¸öÑù±¾½ÌʦÊý¾Ý
 m_fpSingleInputSample = NULL;//µ¥¸öÑù±¾ÊäÈëÊý¾Ý
 m_fpE_Err = NULL;
 m_fpD_Err = NULL;
 m_fpInput = NULL;
 m_fpTeach = NULL;
 m_spStudy_Data = NULL;
 m_fpAbsErrValBefore =NULL;
 m_nLearningSampleNumber = 10;//ѧϰÑù±¾¸öÊý
 m_nOutputNetNumber = 1;//ÊäÈë²ãÉñ¾­ÔªÊýÄ¿
 m_nHideNetNumber = 10;//Òþ²ãÉñ¾­ÔªÊýÄ¿
 m_nInputNetNumber = 1;//Êä³ö²ãÉñ¾­ÔªÊýÄ¿
 Init();
}

CBpNetWork::~CBpNetWork()
{
 if(m_npBeta!=NULL) delete m_npBeta,m_npBeta = NULL;
 if(NULL != m_npAlpha) delete m_npAlpha,m_npAlpha = NULL;
 if(NULL != m_npTotalErrofM) delete m_npTotalErrofM, m_npTotalErrofM = NULL;
 if(NULL != m_npOutputLayerThreshold) delete []m_npOutputLayerThreshold, m_npOutputLayerThreshold= NULL;
 if(NULL != m_npHideLayerThreshold) delete[]m_npHideLayerThreshold, m_npHideLayerThreshold =NULL;
 if(NULL != m_npOutputLayerOutputValue) delete[]m_npOutputLayerOutputValue,m_npOutputLayerOutputValue = NULL;
 if(NULL != m_npHideOutputValue) delete []m_npHideOutputValue,m_npHideOutputValue=NULL;
 if(NULL != m_npOutputLayerInputValue) delete []m_npOutputLayerInputValue,m_npOutputLayerInputValue= NULL;
 if(NULL != m_npHideInputValue) delete []m_npHideInputValue, m_npHideInputValue = NULL;
 if(NULL != m_npHideToOutputWeightMatrix) delete []m_npHideToOutputWeightMatrix, m_npHideToOutputWeightMatrix = NULL;
 if(NULL != m_npInputToHideWeightMatrix) delete []m_npInputToHideWeightMatrix,m_npInputToHideWeightMatrix = NULL;
 if(NULL != m_fpSingleTrainingSample) delete []m_fpSingleTrainingSample,m_fpSingleTrainingSample = NULL;
 if(NULL != m_fpSingleInputSample) delete []m_fpSingleInputSample, m_fpSingleInputSample =NULL;
 if(NULL != m_fpE_Err) delete []m_fpE_Err, m_fpE_Err =NULL;
 if(NULL != m_fpD_Err) delete []m_fpD_Err, m_fpD_Err = NULL;
 if(NULL != m_fpAbsErrValBefore) delete []m_fpAbsErrValBefore, m_fpAbsErrValBefore= NULL;
 Study_Data_T *p = m_spStudy_Data;
 
for(int i = 0;i< m_nLearningSampleNumber;i++)
  for(int j = 0; j< m_nInputNetNumber; j++)
 {
 
 m_fpInput = m_spStudy_Data->input;
 m_fpTeach = m_spStudy_Data->teach;
 if(NULL != m_fpInput) delete []m_fpInput, m_fpInput = NULL;
 if(NULL != m_fpTeach) delete []m_fpTeach, m_fpTeach = NULL;
 m_spStudy_Data++;
 }
 delete []p;
}


CBpNetWork::CBpNetWork(int LearningSplNum, int OutLayNumber, int HideLayNumber, int inLayNum)
{
 m_nLearningSampleNumber = LearningSplNum;
 m_nOutputNetNumber = OutLayNumber;
 m_nHideNetNumber = HideLayNumber;
 m_nInputNetNumber = inLayNum;
 Init();
}
//initial weight and threshold
bool CBpNetWork::Init()
{
 if(!m_nLearningSampleNumber||!m_nOutputNetNumber||!m_nHideNetNumber||!m_nInputNetNumber)
  return false;
 float sgn;
 float rnd;
 int i,j;
 m_npInputToHideWeightMatrix = new double [m_nHideNetNumber*m_nInputNetNumber];
 Set(m_npInputToHideWeightMatrix,m_nHideNetNumber*m_nInputNetNumber);

 m_npHideLayerThreshold = new double[m_nHideNetNumber];
 Set(m_npHideLayerThreshold,m_nHideNetNumber);

 m_npHideToOutputWeightMatrix = new double[m_nOutputNetNumber*m_nHideNetNumber];
 Set(m_npHideToOutputWeightMatrix,m_nOutputNetNumber*m_nHideNetNumber);

 m_fpSingleInputSample = new double[m_nLearningSampleNumber];
 Set(m_fpSingleInputSample,m_nLearningSampleNumber);

 m_fpSingleTrainingSample = new double [m_nOutputNetNumber];
 Set(m_fpSingleTrainingSample,m_nOutputNetNumber);
 m_npHideInputValue = new double[m_nHideNetNumber];
 Set(m_npHideInputValue,m_nHideNetNumber);

 m_npOutputLayerOutputValue = new double[m_nOutputNetNumber];
 Set(m_npOutputLayerOutputValue,m_nOutputNetNumber);

 m_npOutputLayerInputValue = new double[m_nOutputNetNumber];
 Set(m_npOutputLayerInputValue,m_nOutputNetNumber);

 m_npHideLayerThreshold = new double[m_nHideNetNumber];
 Set(m_npHideLayerThreshold,m_nHideNetNumber);

 m_npOutputLayerThreshold = new double [m_nOutputNetNumber];
 Set(m_npOutputLayerThreshold,m_nOutputNetNumber);

 m_npTotalErrofM = new double[m_nLearningSampleNumber];
 Set(m_npTotalErrofM,m_nLearningSampleNumber);

 m_fpE_Err = new double[m_nHideNetNumber];
 Set(m_fpE_Err,m_nHideNetNumber);

 m_fpD_Err = new double[m_nOutputNetNumber];
 Set(m_fpD_Err,m_nOutputNetNumber);

 m_fpAbsErrValBefore = new double[m_nOutputNetNumber];
 Set(m_fpAbsErrValBefore,m_nOutputNetNumber);

 m_npHideOutputValue = new double[m_nHideNetNumber];
 Set(m_npHideOutputValue,m_nHideNetNumber);

 m_spStudy_Data = (Study_Data_T *)new double[2*m_nLearningSampleNumber*m_nInputNetNumber];
 for( i = 0;i< m_nLearningSampleNumber;i++)
  for( j = 0; j< m_nInputNetNumber; j++)
  {
  m_fpInput = new double[m_nInputNetNumber];
  m_fpTeach = new double[m_nOutputNetNumber];
  m_spStudy_Data->input = m_fpInput;
  m_spStudy_Data->teach =m_fpTeach;
  m_spStudy_Data++;
  }
//Òþ²ãȨ¡¢ãÐÖµ³õʼ»¯//
 for(j=0;j<m_nHideNetNumber;j++)
  for(i=0;i<m_nInputNetNumber;i++)
       {
         //sgn=pow((-1),random(100));
         sgn=(float)rand()/RAND_MAX ;
         rnd=sgn*(rand()%10);
         m_npInputToHideWeightMatrix[j*i]= rnd/100;//Òþ²ãȨֵ³õʼ»¯¡£
         }
//randomize();
 for(j=0;j<m_nHideNetNumber;j++)
    {
     //sgn=pow((-1),random(1000));
     sgn=(float)rand()/RAND_MAX;
 rnd=sgn*(rand()%10);
 m_npHideLayerThreshold[j]= 0;//Öмä²ããÐÖµ³õʼ»¯
     }
      //Êä³ö²ãȨ¡¢ãÐÖµ³õʼ»¯//
      //randomize();
      for (int k=0;k<m_nOutputNetNumber;k++)
       for (int j=0;j<m_nHideNetNumber;j++)
       {
       //sgn=pow((-1),random(1000));
         sgn=(float)rand()/RAND_MAX ;
         rnd=sgn*(rand()%10);
         m_npHideToOutputWeightMatrix[k*j]=rnd/100;//µÚm¸öÑù±¾Êä³ö²ãȨֵ³õʼ»¯
       }
      //randomize();
    for(k =0; k<m_nOutputNetNumber;k++)
        {
        //sgn=pow((-1),random(10));
        sgn=(float)rand()/RAND_MAX ;
        rnd=sgn*(rand()%10);
        m_npOutputLayerThreshold[k]=0;//Êä³ö²ããÐÖµ³õʼ»¯
        }
        return true;


}
//input No. m Tarinning data
void CBpNetWork::InputOfNoMTrainingData(int m,Study_Data_T * tnspl)
{
 for (int k=0;k<m_nInputNetNumber;k++)
       m_fpSingleTrainingSample[k]=tnspl[m].teach[k];//Study_Data[m].m_fpTeach[k];
}
////µÚm¸öѧϰÑù±¾ÊäÈë×Ó³ÌÐò///
void CBpNetWork::InputOfNoMSampleData(int m, Study_Data_T * lnspl)
{
 for (int i=0;i<m_nInputNetNumber;i++)
       m_fpSingleInputSample= lnspl[m].input;//Study_Data[m].input;
  
}
//Òþ²ã¸÷µ¥ÔªÊäÈë¡¢Êä³öÖµ×Ó³ÌÐò///
void CBpNetWork::HideLayInOut()
{
      double sigma;
      int i,j;
      for (j=0;j<m_nHideNetNumber;j++)
   {
       sigma=0.0;
      for (i=0;i<m_nInputNetNumber;i++)
       sigma+=m_npInputToHideWeightMatrix[j*i]*m_fpSingleInputSample;//ÇóÒþ²ãÄÚ»ý
    
      m_npHideInputValue[j]=sigma - m_npHideLayerThreshold;//ÇóÒþ²ã¾»ÊäÈë
      m_npHideOutputValue[j]=1.0/(1.0+exp(-m_npHideInputValue[j]));//ÇóÒþ²ãÊä³ö
      }
     
}
//Êä³ö²ã¸÷µ¥ÔªÊäÈë¡¢Êä³öÖµ×Ó³ÌÐò///
void CBpNetWork::OutLayInOut()
{
      double sigma;
      for (int k=0;k<m_nOutputNetNumber;k++)
   {
       sigma=0.0;
      for (int j=0;j<m_nHideNetNumber;j++)
      sigma+=m_npHideToOutputWeightMatrix[k*j]*m_npHideOutputValue[k];//ÇóÊä³ö²ãÄÚ»ý
     
      m_npOutputLayerInputValue[k]=sigma-m_npOutputLayerThreshold[k]; //ÇóÊä³ö²ã¾»ÊäÈë
      m_npOutputLayerOutputValue[k]=1.0/(1.0+exp(-m_npOutputLayerInputValue[k]));//ÇóÊä³ö²ãÊä³ö
      }
  
}
//Êä³ö²ãÖÁÒþ²ãµÄÒ»°ã»¯Îó²î×Ó³ÌÐò////
void CBpNetWork::NormalizeErrOutLayToHideLay(int m)
{
 double *abs_err = new double[m_nOutputNetNumber];
 //double abs_err[m_nOutputNetNumber];//ÿ¸öÑù±¾µÄ¾ø¶ÔÎó²î¶¼ÊÇ´Ó0¿ªÊ¼µÄ
 double sqr_err=0;//ÿ¸öÑù±¾µÄƽ·½Îó²î¼ÆËã¶¼ÊÇ´Ó0¿ªÊ¼µÄ
 //for (int output=0;output<m_nOutputNetNumber;output++)  //output???
 for (int k=0;k<m_nOutputNetNumber;k++)
 {
 abs_err[k]=m_fpSingleTrainingSample[k]-m_npOutputLayerOutputValue[k];
 if(abs_err[k] == 0)
  abs_err[k] = 0.0000001;
 //¸ù¾ÝÎó²î¸Ä±äѧϰЧÂÊ
/* if(abs_err[k]>m_fpAbsErrValBefore[k])
 {
  *m_npAlpha += (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npAlpha;
  *m_npBeta += (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npBeta;
 }
 else
 {
  *m_npAlpha -= (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npAlpha;
  *m_npBeta -= (abs_err[k]/(abs_err[k]+m_fpAbsErrValBefore[k]))**m_npBeta;
 }*/
 m_fpAbsErrValBefore[k] = abs_err[k];
 //ÇóµÚm¸öÑù±¾ÏµĵÚk¸öÉñ¾­ÔªµÄ¾ø¶ÔÎó²î
 sqr_err+=(abs_err[k])*(abs_err[k]);//ÇóµÚm¸öÑù±¾ÏÂÊä³ö²ãµÄƽ·½Îó²î
 m_fpD_Err[k]=abs_err[k]*m_npOutputLayerOutputValue[k]*(1.0-m_npOutputLayerOutputValue[k]);//d_err[k]Êä³ö²ã¸÷Éñ¾­ÔªµÄÒ»°ã»¯Îó²î
 }
 m_npTotalErrofM[m]=sqr_err/2;//µÚm¸öÑù±¾ÏÂÊä³ö²ãµÄƽ·½Îó²î/2=µÚm¸öÑù±¾µÄ¾ù·½Îó²î
 delete []abs_err;
}

void CBpNetWork::NormalizeErrHideLayToOutLay()
{
 double sigma;
        for (int j=0;j<m_nHideNetNumber;j++)
  {
        sigma=0.0;
           for (int k=0;k<m_nOutputNetNumber;k++)
            sigma=m_fpD_Err[k]*m_npHideToOutputWeightMatrix[k*j];
  
      m_fpE_Err[j]=sigma*exp(m_npHideOutputValue[j])/((1-exp(m_npHideOutputValue[j]))*(1-exp(m_npHideOutputValue[j])));//Òþ²ã¸÷Éñ¾­ÔªµÄÒ»°ã»¯Îó²î
  }
}
  //Êä³ö²ãÖÁÒþ²ãµÄȨֵµ÷Õû¡¢Êä³ö²ããÐÖµµ÷Õû¼ÆËã×Ó³ÌÐò//////
void CBpNetWork::AdjustWeightAndThresholdOutLayToHideLay(int m, FILE * fp)
{
 for (int k=0;k<m_nOutputNetNumber;k++)
 {
     for (int j=0;j<m_nHideNetNumber;j++)
     {
    m_npHideToOutputWeightMatrix[k*j]+=*m_npAlpha*m_fpD_Err[k]*m_npHideOutputValue[j];//Êä³ö²ãÖÁÒþ²ãµÄȨֵµ÷Õû
     }
    m_npOutputLayerThreshold[k]+=*m_npAlpha*m_fpD_Err[k];//Êä³ö²ãÖÁÒþ²ãµÄãÐÖµµ÷Õû
 }
}
//m_nLearningSampleNumber¸öÑù±¾µÄÈ«¾ÖÎó²î¼ÆËã×Ó³ÌÐò////
double CBpNetWork::GlobalErrSum()
{
 double total_err=0;
      for (int m=0;m<m_nLearningSampleNumber;m++)
        total_err+=m_npTotalErrofM[m];//ÿ¸öÑù±¾µÄ¾ù·½Îó²î¼ÓÆðÀ´¾Í³ÉÁËÈ«¾ÖÎó²î  
      return total_err;
}
 //Òþ²ãÖÁÊäÈë²ãµÄȨֵµ÷Õû¡¢Òþ²ããÐÖµµ÷Õû¼ÆËã×Ó³ÌÐò/////
void CBpNetWork::AdjustWeightAndThresholdHideLayToOutLay(int m,FILE * fp)
{
for (int j=0;j<m_nHideNetNumber;j++)
 {
        for (int i=0;i<m_nInputNetNumber;i++)
  {
       //cout<<"µÚ"< fprintf(fp,"µÚ%d¸öÑù±¾Ê±µÄÊä³ö²ãÖÁÒþ²ãµÄȨֵΪ£º%f\n",m,m_npHideToOutputWeightMatrix[j]);
      m_npInputToHideWeightMatrix[j*i]+=*m_npBeta*m_fpE_Err[j]*m_fpSingleInputSample;//Òþ²ãÖÁÊäÈë²ãµÄȨֵµ÷Õû
  }
      m_npHideLayerThreshold[j]+=*m_npBeta*m_fpE_Err[j];
 }
}

void CBpNetWork::Excute(Study_Data_T * spl, double err, int count)
{
 double temp_err = 0;
 int local_count =0;
 FILE * f;
 if(count == 0)
  count = 1000;
 if(!spl)
  return;
 f = fopen("c:\\ err.txt","a+");
 do
 {
 Tarning(spl);
 temp_err = GlobalErrSum();

 fprintf(f," %d, %f ",local_count, temp_err);
 temp_err = sqrt(temp_err/m_nLearningSampleNumber);
 local_count++;
 }while(temp_err>err);//&&local_count<count);
 fprintf(f,"\n");
 fclose(f);
 f = fopen("c:\\ weight.txt","a+");
 for(int i =0;i<m_nHideNetNumber*m_nInputNetNumber;i++)
 fprintf(f," %f ",m_npInputToHideWeightMatrix);
 fprintf(f,"\n\n");
 for (i = 0; i<m_nOutputNetNumber*m_nHideNetNumber;i++)
 fprintf(f,"%f",m_npHideToOutputWeightMatrix);
 fprintf(f,"\n\n");
 fclose(f);
}

void CBpNetWork::Tarning(Study_Data_T * spl)
{
  int i = 0;
  Study_Data_T * tlnspl =spl;
  Study_Data_T * ttnspl =spl;
 for(;tlnspl != NULL&& ttnspl != NULL && i<m_nLearningSampleNumber*m_nInputNetNumber;i++)
 {
  InputOfNoMSampleData(0, tlnspl);
  InputOfNoMTrainingData(0, ttnspl);
  HideLayInOut();
  OutLayInOut();
  NormalizeErrOutLayToHideLay(0);
  NormalizeErrHideLayToOutLay();
  AdjustWeightAndThresholdOutLayToHideLay(0,NULL);
  AdjustWeightAndThresholdHideLayToOutLay(0,NULL);
 }
}

void CBpNetWork::Set(double * pointer, int num)
{
int i =0;
for(;i<num;i++)
pointer = 0;
}

void CBpNetWork:redict(int m, Study_Data_T *PresentData)
{
InputOfNoMSampleData(m,  PresentData);
HideLayInOut();
OutLayInOut();
}

 

 

//head file

// BpNetWork.h: interface for the CBpNetWork class.
//
//////////////////////////////////////////////////////////////////////

#if !defined(AFX_BPNETWORK_H__2FDDE314_11F6_4D49_A046_C42B29C3754D__INCLUDED_)
#define AFX_BPNETWORK_H__2FDDE314_11F6_4D49_A046_C42B29C3754D__INCLUDED_

#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000

 


#include "math.h"


typedef struct Study_Data_S
    {
    double *input;
    double *teach;
    } Study_Data_T;
class CBpNetWork 
{
public://Êä³ö²ãÖÁÒþ²ãµÄÒ»°ã»¯Îó²î×Ó³ÌÐò////
 void Predict(int m, Study_Data_T *PresentData);

 void Tarning(Study_Data_T * spl);
 void Excute(Study_Data_T * spl, double err, int count);
 void AdjustWeightAndThresholdHideLayToOutLay(int m, FILE * fp);
 double GlobalErrSum();
 void AdjustWeightAndThresholdOutLayToHideLay(int m, FILE * fp);
 void NormalizeErrHideLayToOutLay();
 void NormalizeErrOutLayToHideLay(int m);
 void OutLayInOut();
 void HideLayInOut();
 void InputOfNoMSampleData(int m,Study_Data_T * lnspl);
 void InputOfNoMTrainingData(int m,Study_Data_T * lnspl);
 bool Init();
 CBpNetWork(int LearningSplNum, int OutLayNumber,int HideLayNumber,int inLayNum);

 double* m_npBeta;//Òþ²ãÖÁÊäÈë²ãѧϰЧÂÊ
 double* m_npAlpha;//Êä³ö²ãÖÁÒþ²ãµÄѧϰЧÂÊ
 double* m_npTotalErrofM;//µÚm¸öÑù±¾µÄ×ÜÎó²î
 double* m_npOutputLayerThreshold;//Êä³ö²ãµÄãÐÖµ
 double* m_npHideLayerThreshold;//Òþ²ãµÄãÐÖµ
 double* m_npOutputLayerOutputValue;//Êä³ö²ãµÄÊä³ö
 double* m_npHideOutputValue;//Òþ²ãµÄÊä³ö
 double* m_npOutputLayerInputValue;//Êä³ö²ãµÄÊäÈë
 double* m_npHideInputValue;//Òþ²ãµÄÊäÈë
 double* m_npHideToOutputWeightMatrix;//Òþ²ãÖÁÊä³ö²ãȨֵ
 double* m_npInputToHideWeightMatrix;//ÊäÈë²ãÖÁÒþ²ãȨֵ
 double* m_fpSingleTrainingSample;//µ¥¸öÑù±¾½ÌʦÊý¾Ý
 double* m_fpSingleInputSample;//µ¥¸öÑù±¾ÊäÈëÊý¾Ý
 double* m_fpInput;
 double* m_fpTeach;
 double* m_fpD_Err;
 double* m_fpE_Err;
 double* m_fpAbsErrValBefore;
 Study_Data_T *m_spStudy_Data;

 CBpNetWork();
 virtual ~CBpNetWork();

private:
 void Set(double*,int );
 int m_nLearningSampleNumber;//ѧϰÑù±¾¸öÊý
 int m_nOutputNetNumber;//ÊäÈë²ãÉñ¾­ÔªÊýÄ¿
 int m_nHideNetNumber;//Òþ²ãÉñ¾­ÔªÊýÄ¿
 int m_nInputNetNumber;//Êä³ö²ãÉñ¾­ÔªÊýÄ¿
};

#endif // !defined(AFX_BPNETWORK_H__2FDDE314_11F6_4D49_A046_C42B29C3754D__INCLUDED_)

回复

使用道具 举报

您需要登录后才可以回帖 登录 | 注册地址

qq
收缩
  • 电话咨询

  • 04714969085
fastpost

关于我们| 联系我们| 诚征英才| 对外合作| 产品服务| QQ

手机版|Archiver| |繁體中文 手机客户端  

蒙公网安备 15010502000194号

Powered by Discuz! X2.5   © 2001-2013 数学建模网-数学中国 ( 蒙ICP备14002410号-3 蒙BBS备-0002号 )     论坛法律顾问:王兆丰

GMT+8, 2025-8-27 11:34 , Processed in 0.673396 second(s), 62 queries .

回顶部