package apv.nrlibj;
import java.io.*;
import java.lang.*;
//import java.lang.Exception;
import java.util.*;
import java.util.zip.*;
import robocode.*;
/************************************************************************/
/* */
/* */
/* CLASS NNet */
/* */
/* */
/************************************************************************/
/**
* This Class (the most important) defines a Neural Network object.
* A NNet object contains a lot of computation nodes linked toghether in various
* ways but organized in layers.<BR>
* So, a NNet is an array of layers.<p>
* The computing phase proceeds from the layer 0 to last layer.<BR>
* For each layer:<BR>
* each node is computed appling its "trf" function.<BR>
* After that, before pass to next layer, again for each node is applied the
* "out" node function. This function moves the output value from the out-buffer
* to out variable and sincronize layer computation.
* (for the EBP phase, if used, the process is reversed using err
* variable).<p>
* A Node can be defined by user or can be one of predefined node type (NodeLin
* or NodeSigm). But a layer have same type nodes.<p>
* A layer can have a buffer. This buffer is another layer. A layer can have just
* one buffer but this buffer can have a buffer... in a chain way.
* When a layer have a buffer, its output values are copied into the input
* variable of destination layer nodes.<BR>
* These values will be computed when this buffer will be computed. So, if this
* buffer have a number < than the buffered layer these values will be considered
* in next forward cycle as a memory, otherwise in the same computational cycle.
* <PRE>
* layer 0 layer 2 layer 3
* |--| |--| |--|
* | |---------------->| |--------->| |
* | | |-->| | | |
* | | |--| | |--| |--|
* |--| | |-| 在uffered
* | | 奸ayer
* layer 1|--| �
* buffer of layer2 ^........�
*
* </PRE>
*
* @author D. Denaro
* @version 5.0 , 2/2001
*/
public class NNet
{
Layer lyr[];
NNet(){}
/**
* This constructor return a NNet with <TT>"nlayer"</TT> layers empty
*/
NNet(int nlayer){this.lyr=new Layer[nlayer];}
/**
* This constructor creates a NNet with tree layers
* and a buffer memory (0,1,2,3) or without memory (0,1,2).
* <BR>When <TT>"mem"</TT> = true, a buffer of hiden layer is created and this
* memory layer is linked with the hiden layer (Elman context memory)
* <BR>The weights of the net are randomicaly choosen with range <TT>"ra"</TT> - <TT>"rb"</TT>
* <BR><TT>"nodeinp"</TT> is the number of first layer nodes,
* <BR><TT>"nodehid"</TT> of hiden layer and
* <BR><TT>"nodeout"</TT> of last layer.
* <BR>The first layer nodes (layer 0) are liner node (NodeLin); hidden layer
* and last layer nodes have sigmoid transfer function of activation (NodeSigm)
*/
public NNet(int nnodeinp,int nnodehid,int nnodeout,boolean fmem,float ra,float rb)
{int i; String CnodeLin, CnodeSigm ;int nlyr;
{CnodeLin="NodeLin";CnodeSigm="NodeSigm";}
if (fmem) nlyr=4; else nlyr=3;
lyr= new Layer[nlyr];
lyr[0]=new Layer(0,nnodeinp,CnodeLin);
if (fmem)
{
lyr[1]=new Layer(1,nnodehid,CnodeSigm);
lyr[2]=new Layer(2,nnodehid,CnodeSigm);
lyr[2].mbuff=lyr[1];
lyr[3]=new Layer(3,nnodeout,CnodeSigm);
}
else
{
lyr[1]=new Layer(1,nnodehid,CnodeSigm);
lyr[2]=new Layer(2,nnodeout,CnodeSigm);
}
if (fmem)
{lyr[2].linkFromAll(lyr[0],ra,rb);lyr[2].linkFromAll(lyr[1],ra,rb);lyr[3].linkFromAll(lyr[2],ra,rb);}
else
{lyr[1].linkFromAll(lyr[0],ra,rb);lyr[2].linkFromAll(lyr[1],ra,rb);}
}
/**
* This constructor creates a NNet with tree layers (0,1,3)
* and a buffer memory or not. But the
* the weights of the net are randomicaly choosen with default range
*/
public NNet(int nnodeinp,int nnodehid,int nnodeout,boolean fmem)
{this(nnodeinp,nnodehid,nnodeout,fmem,NrPop.ra,NrPop.rb);}
/**
* This constructor creates a simple NNet with tree layers (0,1,3) without context
* memory. Default range for random choosen weights
*/
public NNet(int nnodeinp,int nnodehid,int nnodeout)
{this(nnodeinp,nnodehid,nnodeout,false,NrPop.ra,NrPop.rb);}
/**
* This constructor creates a network in a most flexible way.
* The string array <TT>"descrrec"</TT> contains the records that describes the network
* population in a simple predefined laguage:
* <PRE>
* 3 records types
*
* <B>
* layer=n [tnode=n,[m] nname=xxxx... copytoml=n] </B>
* meens: definition of layer number "n" with "n" total-node (or n x m nodes
* if is a bidimesional layer) and with a buffer realised by layer "n"
* The node type is defined by the name (Ex. NodeLin, NodeSigm)
* <B>
* linktype=[xxx...] fromlayer=n[(h[,k])] tolayer=n[(h[,k])] [ value=na,[nb]] </B>
* meens: link "all" to all (default) or "one" to one from layer number "n" to
* layer number "n". As default link is realized between all layers
* node, but it is possible to describe a link for a sub-set of nodes
* from node "h" to node "k". As default weights are randomically chosen
* in default range, but it is possible define a different range between
* "na" and "nb". If only "na" is present, that meens all weights = "na"
* <B>
* biasval=na[,nb] oflayer=n[(h[,k])] </B>
* meens: as previous kind of record but for bias values
*
* example of descrrec[] contents (each record = one array element)
*
* layer=0 tnode=2 nname=NodeLin
* layer=1 tnode=2 nname=NodeSigm
* layer=2 tnode=1 nname=NodeSigm
* linktype=all fromlayer=0 tolayer=1
* linktype=all fromlayer=1 tolayer=2
* </PRE>
* it defines a network with 3 layers and 2,2,1 nodes (for XOR problem for instance)
* NodeLin is a kind of node that reproduce the input value into the output variable
* NodeSigm is a sigmoid node.
*/
public NNet(String descrrec[])
{ this(descrrec,0);}
/**
* This constructor is similar to NNet(String descrrec[]) but read net
* description from a file. (for an external NN definition)
*/
public NNet(String namefile)
{this(NrPop.fileToStrArray(namefile),0);}
NNet(String descrrec[],int s)
{
int i,k,ct;
StringTokenizer tok;
String t,val,ThisC,Pack;
String a,b,c;
ThisC=this.getClass().getName();
Pack=ThisC.substring(0,ThisC.lastIndexOf('.')+1);
int maxlayer=-1;
int nlayer=0,tnode=0,tnodey=0,layerbuff=-1;
String nname="";
int ltype=0,lfrom=-1,lto=-1,fnodes=-1,fnodee=-1,tnodes=-1,tnodee=-1;
float vala=NrPop.ra,valb=NrPop.rb;
int blayer=-1,bnodes=-1,bnodee=-1;
float bvaluea=0,bvalueb=0;
for (i=s;i<descrrec.length;i++)
{
tok=new StringTokenizer(descrrec[i]);
if (tok.countTokens()<1) continue;
t=tok.nextToken();
if (t.startsWith("layer="))
{
val=t.substring(t.indexOf('='));a=NrPop.values(val,1);
try {nlayer=Integer.parseInt(a);} catch (NumberFormatException e){NNError.err("Layer Number missed at rec "+i);}
if (nlayer>maxlayer) maxlayer=nlayer;
}
if (t.startsWith("net=")) break;
}
lyr= new Layer[maxlayer+1];
for (i=s;i<descrrec.length;i++)
{
tok=new StringTokenizer(descrrec[i]);
if (tok.countTokens()<1) continue;
t=tok.nextToken();
if (t.startsWith("layer="))
{ct=tok.countTokens();
val=t.substring(t.indexOf('='));a=NrPop.values(val,1);
try {n