C++ Neural Networks and Fuzzy Logic C++ Neural Networks and Fuzzy Logic
by Valluru B. Rao
M&T Books, IDG Books Worldwide, Inc.
ISBN: 1558515526   Pub Date: 06/01/95
  

Previous Table of Contents Next


Listing 8.2 bamntwrk.cpp

//bamntwrk.cpp   V. Rao, H. Rao

//Source file for BAM network program

#include “bamntwrk.h”

void bmneuron::getnrn(int m1,int m2,int m3,char *y)
{
int i;
name = y;
nnbr = m1;
outn = m2;
inn  = m3;

for(i=0;i<outn;++i){
outwt[i] = 0 ;
}

output = 0;
activation = 0;
}

void exemplar::getexmplr(int k,int l,int *b1,int *b2)
{
int i2;
xdim = k;
ydim = l;

for(i2=0;i2<xdim;++i2){
v1[i2] = b1[i2]; }

for(i2=0;i2<ydim;++i2){
v2[i2] = b2[i2]; }
}

void exemplar::prexmplr()
{
int i;
cout<<”\nX vector you gave is:\n”;
for(i=0;i<xdim;++i){
       cout<<v1[i]<<”  “;}

cout<<”\nY vector you gave is:\n”;

for(i=0;i<ydim;++i){
       cout<<v2[i]<<”  “;}

cout<<”\n”;
}

void exemplar::trnsfrm()
{
int i;

for(i=0;i<xdim;++i){
       u1[i] = 2*v1[i] -1;}

for(i=0;i<ydim;++i){
       u2[i] = 2*v2[i] - 1;}

}

void exemplar::prtrnsfrm()
{
int i;
cout<<”\nbipolar version of X vector you gave is:\n”;

for(i=0;i<xdim;++i){

       cout<<u1[i]<<”  “;}

cout<<”\nbipolar version of Y vector you gave is:\n”;

for(i=0;i<ydim;++i){
       cout<<u2[i]<<”  “;}

cout<<”\n”;
}
void asscpair::getasscpair(int i,int j,int k)
{
idn = i;
xdim = j;
ydim = k;
}

void asscpair::prasscpair()
{
int i;
cout<<”\nX vector in the associated pair no. “<<idn<<”   is:\n”;

for(i=0;i<xdim;++i){
       cout<<v1[i]<<”  “;}

cout<<”\nY vector in the associated pair no. “<<idn<<”   is:\n”;

for(i=0;i<ydim;++i){
       cout<<v2[i]<<”  “;}

cout<<”\n”;
}

void potlpair::getpotlpair(int k,int j)
{

xdim = k;
ydim = j;

}

void potlpair::prpotlpair()
{
int i;
cout<<”\nX vector in possible associated pair is:\n”;

for(i=0;i<xdim;++i){
       cout<<v1[i]<<”  “;}
cout<<”\nY vector in possible associated pair is:\n”;

for(i=0;i<ydim;++i){
       cout<<v2[i]<<”  “;}

cout<<”\n”;
}

void network::getnwk(int k,int l,int k1,int b1[][6],int
       b2[][5])
{
anmbr = k;
bnmbr = l;
nexmplr = k1;
nasspr = 0;
ninpt = 0;
int i,j,i2;
flag =0;
char *y1=”ANEURON”, *y2=”BNEURON” ;

for(i=0;i<nexmplr;++i){
       e[i].getexmplr(anmbr,bnmbr,b1[i],b2[i]);
       e[i].prexmplr();
       e[i].trnsfrm();
       e[i].prtrnsfrm();
       }

for(i=0;i<anmbr;++i){
       anrn[i].bmneuron::getnrn(i,bnmbr,0,y1);}

for(i=0;i<bnmbr;++i){
       bnrn[i].bmneuron::getnrn(i,0,anmbr,y2);}

for(i=0;i<anmbr;++i){

       for(j=0;j<bnmbr;++j){
              mtrx1[i][j]  = 0;
       for(i2=0;i2<nexmplr;++i2){
              mtrx1[i][j]  += e[i2].u1[i]*e[i2].u2[j];}

       mtrx2[j][i] = mtrx1[i][j];
       anrn[i].outwt[j] = mtrx1[i][j];
       bnrn[j].outwt[i] = mtrx2[j][i];
    }
}

prwts();
cout<<”\n”;
}

void network::asgninpt(int *b)
{
int i;
cout<<”\n”;

for(i=0;i<anmbr;++i){
       anrn[i].output = b[i];
       outs1[i] = b[i];
       }

}

void network::compr1(int j,int k)
{
int i;

for(i=0;i<anmbr;++i){
       if(pp[j].v1[i] != pp[k].v1[i]) flag = 1;
       break;
       }

}

void network::compr2(int j,int k)
{
int i;

for(i=0;i<anmbr;++i){
       if(pp[j].v2[i] != pp[k].v2[i]) flag = 1;
       break;}

}

void network::comput1()
{
int j;

for(j=0;j<bnmbr;++j){
       int ii1;
       int c1 =0,d1;
       cout<<”\n”;

       for(ii1=0;ii1<anmbr;++ii1){
              d1 = outs1[ii1] * mtrx1[ii1][j];
              c1 += d1;
              }

       bnrn[j].activation = c1;
       cout<<”\n output layer neuron         “<<j<<” activation is”
              <<c1<<”\n”;

if(bnrn[j].activation <0) {

       bnrn[j].output = 0;
       outs2[j] = 0;}

else

       if(bnrn[j].activation>0) {

               bnrn[j].output = 1;
               outs2[j] = 1;}
               else

               {cout<<”\n A 0 is obtained, use previous output
value \n”;

               if(ninpt<=nexmplr){

                         bnrn[j].output = e[ninpt-1].v2[j];}

               else

                         { bnrn[j].output = pp[0].v2[j];}
                         outs2[j] = bnrn[j].output; }

       cout<<”\n output layer neuron         “<<j<<” output is”
               <<bnrn[j].output<<”\n”;
       }
}

void network::comput2()
{
int i;

for(i=0;i<anmbr;++i){
       int ii1;
       int c1=0;

       for(ii1=0;ii1<bnmbr;++ii1){
              c1 += outs2[ii1] * mtrx2[ii1][i];  }

       anrn[i].activation = c1;
       cout<<”\ninput layer neuron        “<<i<<”activation is “
              <<c1<<”\n”;

       if(anrn[i].activation <0 ){

              anrn[i].output = 0;
              outs1[i] = 0;}
       else

              if(anrn[i].activation >0 ) {

                     anrn[i].output = 1;
                     outs1[i] = 1;
                     }

              else

              { cout<<”\n A 0 is obtained, use previous value if available\n”;

              if(ninpt<=nexmplr){

                        anrn[i].output = e[ninpt-1].v1[i];}

              else

                        {anrn[i].output = pp[0].v1[i];}

              outs1[i] = anrn[i].output;}
              cout<<”\n input layer neuron
              “<<i<<” output is “
                  <<anrn[i].output<<”\n”;
              }
}

void network::asgnvect(int j1,int *b1,int *b2)
{
int  j2;

for(j2=0;j2<j1;++j2){
       b2[j2] = b1[j2];}

}

void network::prwts()
{
int i3,i4;
cout<<”\n  weights—  input layer to output layer: \n\n”;

for(i3=0;i3<anmbr;++i3){

        for(i4=0;i4<bnmbr;++i4){
                cout<<anrn[i3].outwt[i4]<<”                  “;}

        cout<<”\n”; }

cout<<”\n”;

cout<<”\nweights—  output layer to input layer: \n\n”;

for(i3=0;i3<bnmbr;++i3){

        for(i4=0;i4<anmbr;++i4){
                cout<<bnrn[i3].outwt[i4]<<”                  “;}

        cout<<”\n”;  }

cout<<”\n”;
}

void network::iterate()
{
int i1;

for(i1=0;i1<nexmplr;++i1){
        findassc(e[i1].v1);
        }

}

void network::findassc(int *b)
{
int j;
flag = 0;
        asgninpt(b);
ninpt ++;
cout<<”\nInput vector is:\n” ;

for(j=0;j<6;++j){
       cout<<b[j]<<” “;}

cout<<”\n”;
pp[0].getpotlpair(anmbr,bnmbr);
asgnvect(anmbr,outs1,pp[0].v1);

comput1();

if(flag>=0){
           asgnvect(bnmbr,outs2,pp[0].v2);

           cout<<”\n”;
           pp[0].prpotlpair();
           cout<<”\n”;

           comput2(); }

for(j=1;j<MXSIZ;++j){
       pp[j].getpotlpair(anmbr,bnmbr);
       asgnvect(anmbr,outs1,pp[j].v1);

       comput1();

       asgnvect(bnmbr,outs2,pp[j].v2);

       pp[j].prpotlpair();
       cout<<”\n”;

       compr1(j,j-1);
       compr2(j,j-1);

       if(flag == 0) {

               int j2;
               nasspr += 1;
               j2 = nasspr;

               as[j2].getasscpair(j2,anmbr,bnmbr);
               asgnvect(anmbr,pp[j].v1,as[j2].v1);
               asgnvect(bnmbr,pp[j].v2,as[j2].v2);

               cout<<”\nPATTERNS ASSOCIATED:\n”;
               as[j2].prasscpair();
               j = MXSIZ ;
      }

      else

               if(flag == 1)

                       {
                       flag = 0;
                       comput1();
                       }

     }
}

void network::prstatus()
{
int j;
cout<<”\nTHE FOLLOWING ASSOCIATED PAIRS WERE FOUND BY BAM\n\n”;


for(j=1;j<=nasspr;++j){
       as[j].prasscpair();
       cout<<”\n”;}

}

void main()
{
int ar = 6, br = 5, nex = 3;
int inptv[][6]={1,0,1,0,1,0,1,1,1,0,0,0,0,1,1,0,0,0,0,1,0,1,0,1,
   1,1,1,1,1,1};
int outv[][5]={1,1,0,0,1,0,1,0,1,1,1,0,0,1,0};

cout<<”\n\nTHIS PROGRAM IS FOR A BIDIRECTIONAL ASSOCIATIVE MEMORY NETWORK.\n”;
cout<<” THE NETWORK ISSET UP FOR ILLUSTRATION WITH “<<ar<<
         “ INPUT NEURONS, AND “<<br;
cout<<” OUTPUT NEURONS.\n”<<nex
         <<” exemplars are used to encode \n”;

static network bamn;
bamn.getnwk(ar,br,nex,inptv,outv) ;
bamn.iterate();
bamn.findassc(inptv[3]);
bamn.findassc(inptv[4]);
bamn.prstatus();
}


Previous Table of Contents Next

Copyright © IDG Books Worldwide, Inc.