/**
* Copyright 2010 Neuroph Project http://neuroph.sourceforge.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neuroph.core;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Random;
import org.neuroph.core.exceptions.VectorSizeMismatchException;
import org.neuroph.core.learning.IterativeLearning;
import org.neuroph.core.learning.LearningRule;
import org.neuroph.core.learning.TrainingSet;
import org.neuroph.util.NeuralNetworkType;
import org.neuroph.util.plugins.LabelsPlugin;
import org.neuroph.util.plugins.PluginBase;
/**
*<pre>
* Base class for artificial neural networks. It provides generic structure and functionality
* for the neural networks. Neural network contains a collection of neuron layers and learning rule.
* Custom neural networks are created by deriving from this class, creating layers of interconnected network specific neurons,
* and setting network specific learning rule.
*</pre>
*
* @see Layer
* @see LearningRule
* @author Zoran Sevarac <sevarac@gmail.com>
*/
public class NeuralNetwork extends Observable implements Runnable, Serializable {
/**
* The class fingerprint that is set to indicate serialization
* compatibility with a previous version of the class.
*/
private static final long serialVersionUID = 3L;
/**
* Network type id (see neuroph.util.NeuralNetworkType)
*/
private NeuralNetworkType type;
/**
* Neural network
*/
private List<Layer> layers;
/**
* Reference to network input neurons
*/
private List<Neuron> inputNeurons;
/**
* Reference to newtwork output neurons
*/
private List<Neuron> outputNeurons;
/**
* Learning rule for this network
*/
private LearningRule learningRule; // learning algorithme
/**
* Separate thread for learning rule
*/
private transient Thread learningThread; // thread for learning rule
/**
* Plugins collection
*/
private Map<String, PluginBase> plugins;
/**
* Creates an instance of empty neural network.
*/
public NeuralNetwork() {
this.layers = new ArrayList<Layer>();
this.plugins = new HashMap<String, PluginBase>();
this.addPlugin(new LabelsPlugin());
}
/**
* Adds layer to neural network
*
* @param layer
* layer to add
*/
public void addLayer(Layer layer) {
layer.setParentNetwork(this);
this.layers.add(layer);
}
/**
* Adds layer to specified index position in network
*
* @param idx
* index position to add layer
* @param layer
* layer to add
*/
public void addLayer(int idx, Layer layer) {
layer.setParentNetwork(this);
this.layers.add(idx, layer);
}
/**
* Removes specified layer from network
*
* @param layer
* layer to remove
*/
public void removeLayer(Layer layer) {
this.layers.remove(layer);
}
/**
* Removes layer at specified index position from net
*
* @param idx
* int value represents index postion of layer which should be
* removed
*/
public void removeLayerAt(int idx) {
this.layers.remove(idx);
}
/**
* Returns interface for iterating layers
*
* @return iterator interface for network getLayersIterator
*/
public Iterator<Layer> getLayersIterator() {
return this.layers.iterator();
}
/**
* Returns layers collection
*
* @return layers collection
*/
public List<Layer> getLayers() {
return this.layers;
}
/**
* Returns layer at specified index
*
* @param idx
* layer index position
* @return layer at specified index position
*/
public Layer getLayerAt(int idx) {
return this.layers.get(idx);
}
/**
* Returns index position of the specified layer
*
* @param layer
* requested Layer object
* @return layer position index
*/
public int indexOf(Layer layer) {
return this.layers.indexOf(layer);
}
/**
* Returns number of layers in network
*
* @return number of layes in net
*/
public int getLayersCount() {
return this.layers.size();
}
/**
* Sets network input. Input is array of double values.
*
* @param inputArray
* network input as double array
*/
public void setInput(double ... inputVector) throws VectorSizeMismatchException {
if (inputVector.length != inputNeurons.size())
throw new VectorSizeMismatchException("Input vector size does not match network input dimension!");
int i = 0;
for(Neuron neuron : this.inputNeurons) {
neuron.setInput(inputVector[i]); // set input to the coresponding neuron
i++;
}
}
/**
* Returns network output Vector. Output Vector is a collection of Double
* values.
*
* @return network output Vector
*/
public double[] getOutput() {
double[] outputVector = new double[outputNeurons.size()];
int i = 0;
for(Neuron neuron : this.outputNeurons) {
outputVector[i] = neuron.getOutput();
i++;
}
return outputVector;
}
// /**
// * Returns network output vector as double array
// *
// * @return network output vector as double array
// */
// public double[] getOutputAsArray() {
// return VectorParser.convertToArray(getOutput());
// }
/**
* Performs calculation on whole network
*/
public void calculate() {
for(Layer layer : this.layers) {
layer.calculate();
}
}
/**
* Resets the activation levels for whole network
*/
public void reset() {
for(Layer layer : this.layers) {
layer.reset();
}
}
/**
* Implementation of Runnable interface for calculating network in the
* separate thread.
*/
@Override
public void run() {
this.calculate();
}
/**
* Starts learning in a new thread to learn the specified training set,
* and immediately returns from method to the current thread execution
* @param trainingSetToLearn
* set of training elements to learn
*/
public void learnInNewThread(TrainingSet trainingSetToLearn) {
learningRule.setTrainingSet(trainingSetToLearn);
learningThread = new Thread(learningRule);
learningRule.setStarted();
learningThread.start();
}
/**
* Starts learning with specified learning rule in new thread to learn the
* specified training set, and immediately returns from method to the current thread execution
* @param trainingSetToLearn
* set of training elements to learn
* @param learningRule
* learning algorithm
*/
public void learnInNewThread(TrainingSet trainingSetToLearn, LearningRule learning
没有合适的资源?快使用搜索试试~ 我知道了~
温馨提示
Neuroph是一个轻量级的Java神经网络的框架,可以用来模拟常见的神经网络架构。少数基本类别相对应的基本网络的概念,它非常容易学习。而且它也还有一个不错的GUI应用程序。Neurop最初是一位硕士研究生的毕业论文主题,随后成为一个开源项目,它采用LGPL3许可证发布源代码。开发者已在SourceForge网站上发布了如何使用的介绍文档和一个在线Demo(需要Java 1.6)。 Neuroph 2.5 with Neuroph Studio 测试版发布,该版本有一些新功能和改进: 1. Neuroph Studio-基于NetBeans平台的GUI 生成器! 2. 性能和算法优化 3. 整技术与Encog引擎
资源推荐
资源详情
资源评论
收起资源包目录
神经网络图像识别——Neuroph框架 (537个子文件)
stylesheet.css 1KB
inherit.gif 57B
serialized-form.html 91KB
index-7.html 69KB
index-17.html 59KB
NeuralNetwork.html 52KB
NeuralNetwork.html 50KB
Neuron.html 45KB
Neuron.html 44KB
SupervisedLearning.html 40KB
TrainingSet.html 39KB
DynamicBackPropagation.html 38KB
LMS.html 36KB
FlatNetworkLearning.html 35KB
SimulatedAnnealingLearning.html 32KB
NeuralNetworkFactory.html 31KB
TrainingSet.html 31KB
index-3.html 31KB
MatrixMomentumBackpropagation.html 30KB
overview-tree.html 30KB
MomentumBackpropagation.html 28KB
Layer.html 28KB
index-9.html 28KB
IterativeLearning.html 27KB
SupervisedHebbianLearning.html 27KB
MatrixMlpLayer.html 27KB
TransferFunctionType.html 27KB
ImageRecognitionPlugin.html 26KB
BinaryDeltaRule.html 26KB
Layer.html 26KB
BackPropagation.html 25KB
SigmoidDeltaRule.html 25KB
LearningRule.html 25KB
ConnectionFactory.html 25KB
PerceptronLearning.html 24KB
TransferFunction.html 24KB
package-use.html 24KB
index-18.html 23KB
Connection.html 23KB
CompetitiveNeuron.html 23KB
OcrUtils.html 22KB
NeuronProperties.html 22KB
MultiLayerPerceptron.html 22KB
index-16.html 22KB
FractionRgbData.html 21KB
UnsupervisedHebbianLearning.html 21KB
index-19.html 21KB
package-use.html 21KB
MatrixMultiLayerPerceptron.html 21KB
TrainingElement.html 21KB
Connection.html 21KB
SupervisedTrainingElement.html 20KB
NeuralNetworkType.html 20KB
LearningRule.html 20KB
SunSpots.html 20KB
BinaryHebbianLearning.html 20KB
UnsupervisedLearning.html 20KB
CompetitiveLearning.html 20KB
BiasNeuron.html 20KB
OutstarLearning.html 20KB
InstarLearning.html 20KB
OjaLearning.html 20KB
IterativeLearning.html 19KB
InputOutputNeuron.html 19KB
ImageSampler.html 19KB
index-12.html 19KB
Trapezoid.html 19KB
index-11.html 19KB
KohonenLearning.html 19KB
Ramp.html 19KB
FlatWeight.html 19KB
ThresholdNeuron.html 19KB
DelayedNeuron.html 19KB
allclasses-frame.html 18KB
OcrPlugin.html 18KB
FlatNetworkPlugin.html 18KB
index-13.html 18KB
SupervisedHebbianNetwork.html 17KB
TransferFunctionType.html 17KB
SummingFunctionType.html 17KB
Perceptron.html 17KB
UnsupervisedHebbianNetwork.html 17KB
CompetitiveLayer.html 17KB
ImageRecognitionHelper.html 17KB
Weight.html 16KB
InputFunction.html 16KB
Weight.html 16KB
NeuronProperties.html 16KB
Hopfield.html 16KB
ColorMode.html 16KB
IACNeuron.html 16KB
NeuroFuzzyPerceptron.html 16KB
Step.html 16KB
index-1.html 16KB
Sigmoid.html 16KB
Tanh.html 16KB
allclasses-noframe.html 16KB
RecommenderNetwork.html 16KB
index-14.html 16KB
RbfNetwork.html 16KB
共 537 条
- 1
- 2
- 3
- 4
- 5
- 6
资源评论
- zhangjianfengconquer2012-07-23界面比想象中复杂,还在琢磨中,暂时运行还是不错滴
- 旋zi2018-02-15正在研究,挺不错的
- victorchee2012-10-22正在研究中,运行速度不怎么样
- kjaz20082018-05-30正在学习中,先研究一下
- ioernt2014-04-17文化程度地,看不懂。唉,要养家糊口,没时间细看,一点点碎片时间。研究不懂
xyag902
- 粉丝: 1
- 资源: 5
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功