神经网络elm算法

function [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = elm(TrainingData_File, TestingData_File, Elm_Type, NumberofHiddenNeurons, ActivationFunction)

% Usage: elm(TrainingData_File, TestingData_File, Elm_Type, NumberofHiddenNeurons, ActivationFunction)

% OR: [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = elm(TrainingData_File, TestingData_File, Elm_Type, NumberofHiddenNeurons, ActivationFunction) %

% Input:

% TrainingData_File - Filename of training data set % TestingData_File - Filename of testing data set

% Elm_Type - 0 for regression; 1 for (both binary and multi-classes) classification % NumberofHiddenNeurons - Number of hidden neurons assigned to the ELM % ActivationFunction - Type of activation function:

% 'sig' for Sigmoidal function % 'sin' for Sine function

% 'hardlim' for Hardlim function

% 'tribas' for Triangular basis function

% 'radbas' for Radial basis function (for additive type of SLFNs instead of RBF type of SLFNs) %

% Output:

% TrainingTime - Time (seconds) spent on training ELM

% TestingTime - Time (seconds) spent on predicting ALL testing data % TrainingAccuracy - Training accuracy:

% RMSE for regression or correct classification rate for classification % TestingAccuracy - Testing accuracy:

% RMSE for regression or correct classification rate for classification %

% MULTI-CLASSE CLASSIFICATION: NUMBER OF OUTPUT NEURONS WILL BE AUTOMATICALLY SET EQUAL TO NUMBER OF CLASSES

% FOR EXAMPLE, if there are 7 classes in all, there will have 7 output

% neurons; neuron 5 has the highest output means input belongs to 5-th class %

% Sample1 regression: [TrainingTime, TestingTime, TrainingAccuracy, TestingAccuracy] = elm('sinc_train', 'sinc_test', 0, 20, 'sig')

% Sample2 classification: elm('diabetes_train', 'diabetes_test', 1, 20, 'sig') %

%%%% Authors: MR QIN-YU ZHU AND DR GUANG-BIN HUANG %%%% NANYANG TECHNOLOGICAL UNIVERSITY, SINGAPORE

%%%% EMAIL: EGBHUANG@NTU.EDU.SG; GBHUANG@IEEE.ORG %%%% WEBSITE: http://www.ntu.edu.sg/eee/icis/cv/egbhuang.htm %%%% DATE: APRIL 2004

%%%%%%%%%%% Macro definition REGRESSION=0; CLASSIFIER=1;

%%%%%%%%%%% Load training dataset train_data=load(TrainingData_File); T=train_data(:,1)';

P=train_data(:,2:size(train_data,2))';

clear train_data; % Release raw training data array

%%%%%%%%%%% Load testing dataset test_data=load(TestingData_File);

TV.T=test_data(:,1)';

TV.P=test_data(:,2:size(test_data,2))';

clear test_data; % Release raw testing data array

NumberofTrainingData=size(P,2); NumberofTestingData=size(TV.P,2); NumberofInputNeurons=size(P,1);

if Elm_Type~=REGRESSION

%%%%%%%%%%%% Preprocessing the data of classification sorted_target=sort(cat(2,T,TV.T),2);

label=zeros(1,1); % Find and save in 'label' class label from training and testing data sets

label(1,1)=sorted_target(1,1); j=1;

for i = 2:(NumberofTrainingData+NumberofTestingData) if sorted_target(1,i) ~= label(1,j) j=j+1;

label(1,j) = sorted_target(1,i); end end

number_class=j;

NumberofOutputNeurons=number_class;

%%%%%%%%%% Processing the targets of training

temp_T=zeros(NumberofOutputNeurons, NumberofTrainingData); for i = 1:NumberofTrainingData for j = 1:number_class if label(1,j) == T(1,i) break; end end

temp_T(j,i)=1; end

T=temp_T*2-1;

%%%%%%%%%% Processing the targets of testing

temp_TV_T=zeros(NumberofOutputNeurons, NumberofTestingData); for i = 1:NumberofTestingData for j = 1:number_class

if label(1,j) == TV.T(1,i) break; end end

temp_TV_T(j,i)=1; end

TV.T=temp_TV_T*2-1;

end % end if of Elm_Type

%%%%%%%%%%% Calculate weights & biases start_time_train=cputime;

%%%%%%%%%%% Random generate input weights InputWeight (w_i) and biases BiasofHiddenNeurons (b_i) of hidden neurons

InputWeight=rand(NumberofHiddenNeurons,NumberofInputNeurons)*2-1;

BiasofHiddenNeurons=rand(NumberofHiddenNeurons,1); tempH=InputWeight*P;

clear P; % Release input of training data ind=ones(1,NumberofTrainingData);

BiasMatrix=BiasofHiddenNeurons(:,ind); % Extend the bias matrix BiasofHiddenNeurons to match the demention of H tempH=tempH+BiasMatrix;

%%%%%%%%%%% Calculate hidden neuron output matrix H switch lower(ActivationFunction) case {'sig','sigmoid'}

%%%%%%%% Sigmoid H = 1 ./ (1 + exp(-tempH)); case {'sin','sine'}

%%%%%%%% Sine H = sin(tempH); case {'hardlim'}

%%%%%%%% Hard Limit H = double(hardlim(tempH)); case {'tribas'}

%%%%%%%% Triangular basis function H = tribas(tempH); case {'radbas'}

%%%%%%%% Radial basis function H = radbas(tempH);

%%%%%%%% More activation functions can be added here end

clear tempH; % Release the temparary array for calculation of hidden neuron output matrix H

%%%%%%%%%%% Calculate output weights OutputWeight (beta_i)

OutputWeight=pinv(H') * T'; % slower implementation %OutputWeight=inv(eye(size(H,1))/C+H * H') * H * T'; % faster method 1

%implementation; one can set regularizaiton factor C properly in classification applications %OutputWeight=(eye(size(H,1))/C+H * H') \\ H * T'; % faster method 2

%implementation; one can set regularizaiton factor C properly in classification applications

%If you use faster methods or kernel method, PLEASE CITE in your paper properly:

%Guang-Bin Huang, Hongming Zhou, Xiaojian Ding, and Rui Zhang, \ Learning Machine for Regression and Multi-Class Classification,\Machine Intelligence, October 2010.

end_time_train=cputime;

TrainingTime=end_time_train-start_time_train % Calculate CPU time (seconds) spent for training ELM

%%%%%%%%%%% Calculate the training accuracy

Y=(H' * OutputWeight)'; % Y: the actual output of the training data if Elm_Type == REGRESSION

TrainingAccuracy=sqrt(mse(T - Y)) % Calculate training accuracy (RMSE) for regression case end clear H;

%%%%%%%%%%% Calculate the output of testing input

联系客服:779662525#qq.com(#替换为@) 苏ICP备20003344号-4