Clear Filters
Clear Filters

While running the following program error occur as " TrainOb = 228 ??? Index exceeds matrix dimensions"

1 view (last 30 days)
%*************************************
% Begin of Program
% Set parameters
% Clear the operating environment
clc;
clear all;
load TXY.txt;
% for testing length of TXY
LengthofInputdata=length(TXY);
% TXY;
% Input No. of Normalized Data
% Or use mapminmax;
TrainOb=228 % No. of Traning data
% LenghtofTrain=length(OP)
P = TXY(1:TrainOb,1:7);
LenghtofTrain=length(P)
P=P'
% Normalized the Data
for i9=1:length(P(:,1))
P(i9,:)=(P(i9,:)-min(P(i9,:)))/(max(P(i9,:))-min(P(i9,:)));
end
NP=P
LtofTrNormal=length(NP);
Ltr=length(NP);
[row,col]=size(TXY);
set=row/5;
row=row-set;
row1=row/2;
%***************************
Lth=length(TXY)
OP = TXY(1:TrainOb,1:7);
LenghtofTrain=length(OP)
NP=NP'
% for testing length of traindata1
traindata1=NP(1:row1,1:col-1);
% length(traindata1);
% for testing length of traindata2
traindata2=NP(row1+1:row,1:col-1);
%length(traindata2);
% target of traindata1
t1=NP(1:row1,col);
% target of traindata2
t2=NP(row1+1:row,col);
t1=t1'
t2=t2'
tr1=traindata1'
tr2=traindata2'
la=1;
X_axis=rand();
Y_axis=rand();
maxgen=100;
% maxgen=50;
sizepop=10;
%*********
for i=1:sizepop
X(i)=X_axis+20*rand()-10;
Y(i)=Y_axis+20*rand()-10;
D(i)=(X(i)^2+Y(i)^2)^0.5;
S(i)=1/D(i);
%***
g=0;
p=S(i); % Learning spread of GRNN
if 0.001>p
p=1;
end
% Cross validation
if la == 1
net=newgrnn(tr1,t1,p);
yc=sim(net,tr2);
y=yc-t2;%
for ii=1:row1
g=g+y(ii)^2;
end
Smell(i)=(g/row1)^0.5; % RMSE
la=2;
else
net=newgrnn(tr2,t2,p);
yc=sim(net,tr1);
y=yc-t1;%
for ii=1:row1
g=g+y(ii)^2;
end
Smell(i)=(g/row1)^0.5; % RMSE
la=1;
end
end
%***
[bestSmell bestindex]=min(Smell);
X_axis=X(bestindex);
Y_axis=Y(bestindex);
bestS=S(bestindex);
Smellbest=bestSmell;
%
for gen=1:maxgen
gen
bestS
for i=1:sizepop
%
g=0;
X(i)=X_axis+20*rand()-10;
Y(i)=Y_axis+20*rand()-10;
%
D(i)=(X(i)^2+Y(i)^2)^0.5;
%
S(i)=1/D(i);
%
p=S(i); % Learning the spread of GRNN
if 0.001>p
p=1;
end
% Cross validation
if la == 1
net=newgrnn(tr1,t1,p);
yc=sim(net,tr2);
y=yc-t2;%
for ii=1:row1
g=g+y(ii)^2;
end
Smell(i)=(g/row1)^0.5; % RMSE
la=2;
else
net=newgrnn(tr2,t2,p);
yc=sim(net,tr1);
y=yc-t1;
for ii=1:row1
g=g+y(ii)^2;
end
Smell(i)=(g/row1)^0.5;
la=1;
end
end
%***
[bestSmell bestindex]=min(Smell); % find the min of RMSE
%***
if bestSmell<Smellbest
X_axis=X(bestindex);
Y_axis=Y(bestindex);
bestS=S(bestindex);
Smellbest=bestSmell;
end
%
yy(gen)=Smellbest;
Xbest(gen)=X_axis;
Ybest(gen)=Y_axis;
end
%
figure(1)
plot(yy)
title('Optimization process','fontsize',12)
xlabel('Iteration Number','fontsize',12);ylabel('RMSE','fontsize',12);
bestS
Xbest
Ybest
figure(2)
plot(Xbest,Ybest,'b.');
title('Fruit fly flying route','fontsize',14)
xlabel('X-axis','fontsize',12);ylabel('Y-axis','fontsize',12);
%*******Begin to Predict
% TestData
LengthofInputdata=length(TXY)
% Input No. of Normalized Testing Data
% LenghtofAll=length(OP)
P = TXY(1:LengthofInputdata,1:7);
% LenghtofTallData=length(P);
% Length of testing data (All Data Normalized)
% Changed Non-normalized Data into Normalized Data
P=P';
for i9=1:length(P(:,1))
P(i9,:)=(P(i9,:)-min(P(i9,:)))/(max(P(i9,:))-min(P(i9,:)));
end
Nt=P';
% Training Data
TrainData=Nt(1:row,1:col-1);
tr=TrainData';
% tr=[tr1 tr2]
% LTr=length(tr)
% Testing Data
TestData=Nt(row+1:LengthofInputdata,1:col-1);
% predict value of testdata
% No target Y
test3=TestData';
LengthofTestData=length(TestData)
t3=TXY(row+1:LengthofInputdata,col);
% length_tr3=length(tr3);
% tt=Nt(1:row,col);
tt=[t1 t2];
% Ltt=length(tt)
% bestS for parameter p;
p=bestS;
% TrainData put inot grnn
net=newgrnn(tr,tt,p);
%%predict value of testdata
ytest=sim(net,test3);
Y_hat=ytest'
% length_Y_hat=length(Y_hat)
% Predicted output Y_hat normalized
Lny=length(Y_hat);
P = Y_hat(1:Lny,1);
P=P';
LenghtofTrain=length(P)
% Changed Non-normalized Data into Normalized Data
for i9=1:length(P(:,1))
P(i9,:)=(P(i9,:)-min(P(i9,:)))/(max(P(i9,:))-min(P(i9,:)));
end
NPP=P';
% target of testdata
Target3=t3;
save Y_hat
% End of Program
DATA:
TXY.txt:
-0.14 13.21 67.57 -102.79 7 0 0
-0.77 8.24 -45.01 -102.04 7 0 1
0.63 23.86 -44.33 -33.27 4 0 1
-1.55 23.73 14.95 128.27 7 1 0
1.71 44.32 10.13 8.38 7 0 1
0.86 18.03 -11.6 9.72 5 0 1
-3.36 16.97 66.23 80.83 10 0 0
2.16 11.71 -19.3 -30.35 6 0 1
2.19 18.19 -1.61 20 7 0 1
-5.12 3.51 40.61 2.75 10 0 0
0.26 7.05 -12.44 -76.7 4 0 1
0.94 39.03 -3.53 -29.26 4 0 1
-3.45 12.1 28.25 98.66 9 1 0
0.26 7.05 -12.44 -76.7 4 0 1
-1.64 9.97 17.93 68.33 6 0 1
-30.54 2 -75.62 -304.66 10 0 0
-1.64 9.97 17.93 68.33 6 0 1
0.89 33.97 -1.11 -46.5 8 0 1
8.5 12.49 92.58 20.64 10 0 0
-2.91 3.55 -66.12 -2280.87 6 0 1
13.27 11.2 6.47 11.86 5 0 1
-45.41 -14.41 48.3 -34.01 10 0 0
0.58 8.05 0.73 -58.02 5 0 1
-2.91 3.55 -66.12 -2280.87 6 0 1
-34.97 8.62 68.62 -109.11 9 1 0
6.86 60.46 11.41 87.51 5 0 1
5.18 36.69 11.8 265.44 7 0 1
-22.13 3.89 48.62 -35.67 9 0 0
2.16 11.71 -19.3 -30.35 6 0 1
2.19 18.19 -1.61 20 7 0 1
4.62 7.75 439.85 232.22 7 0 0
6.61 17.28 118.89 742.37 6 0 1
1.34 10.6 -8.63 -57.62 6 0 1
-3.07 1.57 -45.41 -86.28 10 6 0
0.26 9.3 10.29 -132.6 6 0 1
5.21 27.9 12.24 -9.6 2 0 1
-10.95 3.18 -35.88 1.17 8 0 0
2.05 3.99 -68.34 -131.13 6 0 1
0.73 14.35 0 -67.23 5 0 1
-4.69 8.57 24.27 -309.74 9 2 0
3.11 10.75 -48.55 -50.82 3 0 1
0.86 15.2 0.73 47.41 5 0 1
-11.21 13.33 -34 -369.86 10 0 0
-0.23 -0.03 -102.15 110.51 7 0 1
-0.33 9.36 272.19 -24.36 8 0 1
-5.61 100 -94.89 31.85 10 0 0
-0.19 5.47 -38.36 -50.19 6 0 1
0.74 8.07 -5.47 50.09 6 0 1
-5.91 -18.23 -951.11 57.34 9 0 0
-2.01 5.99 396.44 42.35 6 0 1
1.9 8.77 -49.71 -53.56 7 0 1
-11.4 5.57 -42.36 2.26 9 0 0
9.78 20.43 22.84 34.61 5 0 1
4.05 14.52 -52.99 -48.17 6 0 1
-12.05 -2.43 -160.22 -177.48 10 0 0
6.5 14.47 -18.25 28.53 3 0 1
2.63 12.07 3.13 224.81 6 0 1
-1.74 3.29 -43.77 -1601.44 9 3 0
0.27 -1.01 -118.66 -94.51 9 0 1
0.26 9.3 10.29 -132.6 6 0 1
-3.83 -2.34 -125.1 -142.15 6 0 0
-0.33 9.36 272.19 -24.36 8 0 1
-8.86 25.18 29.31 18.42 7 0 1
-1.9 7.97 207.03 -128.86 8 0 0
0.73 14.35 0 -67.23 5 0 1
2.05 3.99 -68.34 -131.13 6 0 1
2.29 15.66 -14.89 -24.02 5 0 0
0.33 9.07 6.08 -19.98 6 0 1
0.29 13.72 689.79 117.49 6 0 1
-27.99 6.43 -60.04 -15272.03 10 2 0
1.9 8.77 -49.71 -53.56 7 0 1
0.07 14.11 -7.87 178.5 7 0 1
-21.1 16.08 -23.44 -2686.34 9 0 0
9.4 19.04 -2.74 11.06 4 0 1
6.95 11.54 36.56 53.24 4 0 1
-23.51 7.39 23.31 -155.29 8 6 0
1.65 11 263.39 120.71 5 0 1
3.26 12.4 39.14 1806.34 6 0 1
-0.2 13.58 -16.87 -97.18 8 5 0
9.4 19.04 -2.74 11.06 4 0 1
0.82 20.92 30.78 59.47 5 0 1
-3.93 9.06 18.53 -1153.33 10 0 0
1.6 8.34 60.64 257.58 7 0 1
-3.35 1.7 -74.91 -217.91 7 0 1
-10.76 6.24 16.23 -221.14 10 0 0
9.82 17.49 -9.26 -53.57 5 0 1
1.84 5.66 7.26 -26.04 9 0 1
1.23 9.95 -7.19 -74.09 5 1 0
11.53 23.88 17.4 35.92 3 0 1
3.71 18.67 -28.65 -37.32 4 0 1
-22.62 -5.58 -150.28 -217.77 9 0 0
3.75 18.38 -3.5 19.14 6 0 1
0.83 23.69 -1.18 -79.52 6 0 1
7.01 35.21 3.01 -3.87 5 0 0
4.94 25.39 -30.9 -42.45 2 0 1
0.83 23.69 -1.18 -79.52 6 0 1
-13.23 2 -87.55 -52.61 10 0 0
3.75 18.38 -3.5 19.14 6 0 1
0.83 23.69 -1.18 -79.52 6 0 1
-0.55 10.5 -3.12 -184.86 9 0 0
0.67 10.75 -13.25 -71.05 5 0 1
1.58 8.81 -16.63 47.63 5 0 1
-6.81 -4.39 -148.74 -4639.23 8 0 0
0.6 12.04 -24.77 -82.12 7 0 1
0.67 10.75 -13.25 -71.05 5 0 1
0.97 8.86 35.19 -64.56 10 1 0
0.74 0.33 -97.31 -50.44 9 0 1
4.32 23.64 115.67 158.93 6 0 1
-0.35 5.52 -38.9 -43.59 6 0 0
18.77 21.24 70.69 213.78 5 0 1
3.24 8.15 -23.34 -29.07 5 0 1
0.18 10.01 -15.56 -82.76 5 0 0
0.6 12.04 -24.77 -82.12 7 0 1
1.58 8.81 -16.63 47.63 5 0 1
-3.45 -0.95 -126.29 -50.41 9 0 0
0.8 10.22 -53.07 -77.37 5 0 1
0.55 7.6 -51.53 -91.71 8 0 1
-6.44 2.01 -61.36 -199.43 10 0 0
1.96 12.34 18.85 -6.92 3 0 1
0 17.15 48.18 100.28 9 0 1
8.15 17.44 -19.84 497.17 10 2 0
-1.35 11.25 -34.31 -464.82 6 0 1
2 14.1 -26.74 -73.15 5 0 1
2.61 8.84 -14.93 -68.16 5 2 0
10.13 13.78 20.29 4.35 2 0 1
9.6 7.14 14.44 71.97 3 0 1
-10.79 -13.05 8.29 -21.44 7 0 0
1.4 30.21 -16.22 -47.81 3 0 1
4.31 19.86 10.42 7.25 4 0 1
-14.21 -4.6 -268.95 -61.2 7 0 0
-11.58 5.32 653 32.06 7 0 1
12.3 13.86 -2.71 -29.84 5 0 1
-48.86 -0.29 -126.34 -76.73 10 0 0
3.97 9.21 -44.05 -66.77 6 0 1
5.94 10.9 -11.1 -51.3 5 0 1
-17.76 15.48 -35.12 -535.39 7 0 0
1.4 30.21 -16.22 -47.81 3 0 1
18.11 29.29 -14.81 -42.33 3 0 1
-1.88 0.58 371.14 71.68 8 0 0
3.74 31.32 -29.26 -18.48 7 0 1
7.12 23.44 -5.04 -5.85 4 0 1
-82.48 -3.44 -251.13 -336.18 9 0 0
12.4 13.4 74.63 463.66 3 0 1
-4.94 11.82 7.5 -156.64 3 1 1
-8.19 -14.03 -270.09 -58.22 8 0 0
-1.53 5.99 32.02 67.99 4 0 1
3.99 22.05 108.86 169.04 5 0 1
-78.39 6.11 -72.05 -1831.99 10 1 0
10.14 41.46 -24.61 -24.72 7 0 1
8.79 23.73 75.89 97.87 5 0 1
-52.33 -1.08 -120.7 -359.6 10 0 0
6 23.17 45.78 22.32 6 0 1
-0.8 4.75 -38.12 -108.25 5 0 1
-4.21 10.76 -22.84 -761.79 7 0 0
1.63 24.41 59.22 2597.98 6 0 1
4.14 16.34 -6.9 -29.69 4 0 1
-3.06 11.1 -5.34 -95.63 7 0 0
15.45 19.54 77 114.63 3 0 1
1.33 13.54 -13.37 -9.4 5 0 1
-1.22 27.46 -24.06 -491.9 10 10 0
0.5 15.15 42.14 413.46 6 0 1
2.51 9.69 -40.47 -70.48 6 0 1
-14.06 64.54 -38.98 -881.36 10 0 0
5.26 45.49 5.86 58.66 4 0 1
1.48 82.08 8.9 21.19 5 0 1
-2.69 11.16 13.22 89.63 9 2 0
-1.51 16.49 18.28 -190.98 9 0 1
3.72 31.63 41.32 1404.3 4 0 1
-90.57 23.43 -28.73 -3899.68 10 3 0
13.36 20.05 107.41 213.94 7 0 1
4.41 18.59 11.08 156.87 6 0 1
-9.57 12.83 36.14 17.88 10 4 0
5.05 91.28 -0.45 3.36 5 0 1
-0.83 20.98 -22.46 -148.87 6 0 1
-14.61 0.2 -92.89 56.24 8 0 0
4.75 27.37 -0.39 -42.92 5 0 1
4.11 9.02 -3 5.82 4 0 1
-9.99 2.16 -69.68 -512.26 9 0 0
-12.44 11.99 0.27 -139.13 7 0 1
6.1 18.05 41.2 39.99 5 0 1
-7.56 -3.54 -121.19 -81.83 9 0 0
3.98 12.43 0.29 -27.95 4 0 1
1.76 12.42 31.47 -45.64 6 0 1
-4.26 3.99 -43.13 -6096.46 10 0 0
3.16 18.11 11.05 73.39 5 0 1
2.54 13.65 25.38 34.8 6 0 1
126.61 -33.48 -206.6 221.24 10 0 0
-3.61 -6.13 -49.71 -54.52 7 0 1
2.63 12.07 3.13 224.81 6 0 1
-17.46 2.53 -71.83 -524.78 9 0 0
-2.01 5.99 396.44 42.35 6 0 1
4.05 14.52 -52.99 -48.17 6 0 1
-5.12 11.95 -14.64 -1380.43 10 3 0
0.29 13.72 689.79 117.49 6 0 1
0.86 6.62 647.42 -48.73 6 0 1
-29.99 3.57 -50.86 45.34 10 0 0
11.61 16.04 39.32 182.8 5 0 1
12.9 8.2 -17.84 113.1 6 0 1
-14.64 -5.48 -232.59 14.51 8 0 0
12.62 15.49 -5.19 -11.24 6 0 1
14.18 29.38 3.07 -9.58 6 0 1
17.15 17.04 0.4 213.03 5 7 0
12.25 26.81 4.38 35.03 5 0 1
5.11 23.76 -2.31 -12.48 4 0 1
-74.86 4.05 -73.43 -431.64 10 0 0
15.47 68.66 14.49 2.35 4 0 1
1.31 9.21 7.98 -57.97 6 0 1
-1.91 6.58 -26.15 -179.17 8 0 0
0.8 10.22 -53.07 -77.37 5 0 1
0.55 7.6 -51.53 -91.71 8 0 1
-7.7 3.72 -43.83 -8.05 10 0 0
0 17.15 48.18 100.28 9 0 1
1.96 12.34 18.85 -6.92 3 0 1
-17.56 7.34 -67.84 -6347.57 10 0 0
11.48 82.82 61.64 91.78 6 0 1
0.13 13.37 12.03 -86.4 7 0 1
-13.51 23.98 13.12 -8.19 9 2 0
0.47 22.13 33.29 -33.64 6 0 1
38.11 68.2 12.91 -2.83 5 0 1
-39.95 22.37 -25.11 -766.53 10 0 0
-1.03 87.61 178.9 104.31 7 3 1
-15.86 40.02 -34.32 3.35 7 0 1
-14.17 6.54 -11.38 -588 10 0 0
4.52 15.78 -37.81 -28.57 6 0 1
13.18 43.66 -8.15 2.73 6 0 1
-36.04 -49.94 -728.54 -6401.43 8 0 0
2.49 18.41 -19.18 -90.88 4 0 1
9.36 22.66 -14.66 -16.67 3 0 1
-19.33 -33.64 -22.01 56.85 6 0 0
1.42 9.67 51.56 157.68 3 0 1
-4.96 2.17 427.12 -18.29 6 0 1
-6.09 5.33 -30.31 -103.78 7 0 0
13.09 12.49 12.82 22.62 5 0 1
13.89 26.83 140.51 629.3 6 0 1
-7.79 9.37 115.07 47.13 10 0 0
1.32 31.91 -38.97 -97.94 7 0 1
4.65 31.12 15.22 251.46 6 0 1
-175.79 2.81 -83.18 -1068.06 10 0 0
-0.63 6.63 3.96 -136.77 8 1 1
4.49 17.55 -10.92 -9.95 5 0 1
-60.5 -11.72 -137.3 -47.52 9 0 0
-3.56 46.43 -8.58 -45.86 7 0 1
3.42 77.81 1.09 -30.62 4 0 1
2 20.39 20.16 -13.52 10 0 0
3.72 31.63 41.32 1404.3 4 0 1
4.56 14.7 -9.16 35.57 3 0 1
-22.98 2.97 -69.61 -341.84 8 1 0
3.67 11 -8.86 -21.02 7 0 1
3.65 34.15 -18.51 -61.79 5 0 1
-15.74 29.07 -56.53 -396.8 9 1 0
4.28 20.05 35.21 10.38 5 0 1
-0.89 11.03 920.73 29.8 8 0 1
-3.97 4.48 -36.95 -175.25 10 3 0
4.03 58.39 14.7 8.94 6 0 1
3.86 18.59 -5.45 -67.56 6 0 1
-16.36 18.63 11.29 -434.7 9 3 0
8.35 20.95 28.67 57.16 5 0 1
6.9 13.71 -21.94 -36.05 6 0 1
-55.84 8.6 -82.3 -1354.3 10 1 0
-4.6 18.7 12.8 -148.26 6 0 1
-4.41 -0.6 -89.79 -185.4 7 0 1
-28.76 -3.56 -191.68 18.83 8 0 0
10.82 18.44 29.13 51.05 7 0 1
17.39 40.03 66.22 202.89 7 1 1
20.47 14.5 150.4 237.44 9 2 0
10.44 61.72 13.64 -13.25 4 0 1
18.74 21.77 80.89 136.69 3 0 1
14.9 26 84.04 209.46 10 0 0
2.63 12.07 3.13 224.81 6 0 1
-6.25 7.19 -57.39 -301.21 8 0 1
4.49 7.27 -9.6 1293.72 9 1 0
13.36 20.05 107.41 213.94 7 0 1
4.41 18.59 11.08 156.87 6 0 1
-26.48 -2.12 -120.43 -984.43 8 0 0
2.37 5.96 119.84 -99.16 5 0 1
0 17.15 48.18 100.28 9 0 1
-7.75 14.79 -6.76 -339.86 10 3 0
-3.22 17.9 -20.21 -109.53 6 0 1
3.41 20.1 -10.41 -50.35 5 1 1
-3.54 12.81 -56.62 -127.07 10 2 0
17.69 15.42 20.61 23.52 2 0 1
10.13 13.78 20.29 4.35 2 0 1
-0.03 16.12 3.35 -95.25 8 0 0
3.41 20.1 -10.41 -50.35 5 1 1
2.99 34.78 18.48 25.64 4 0 1
  2 Comments
Jan
Jan on 18 Oct 2015
Edited: Jan on 18 Oct 2015
Please format your code properly (as explained in the documentation of the forum and as discussed hundrets of times in other threads). Do you see, that the currently provided code is not readable?
Posting the relevant part of the code would allow us to concentrate on the problem without needing an hour to understand your code. The complete error message would reveal more details.
satheeshkumar satheeshkumar M
Edited: Walter Roberson on 2 Nov 2015
The relevant part of the code
% TXY;
% Input No. of Normalized Data
% Or use mapminmax;
TrainOb=228 % No. of Traning data
% LenghtofTrain=length(OP)
The complete error message as follows
TrainOb =
228
??? Index exceeds matrix dimensions.

Sign in to comment.

Answers (1)

Walter Roberson
Walter Roberson on 2 Nov 2015
At the MATLAB command prompt give the command
dbstop if error
and then run your program again. When it stops with the error, give the comman
dbstack
and show us the output of that.
  2 Comments
satheeshkumar satheeshkumar M
While using dbstop if error also following statement appears TrainOb = 228 ??? Index exceeds matrix dimensions. After using dbstack found no output
Walter Roberson
Walter Roberson on 3 Nov 2015
Edit your file in the MATLAB editor at at the line
if la == 1
click on the line. Now click on "Run to Cursor". When it stops, click on "Step" repeatedly until the program fails. When it does tell us which line it failed on.
I cannot test this myself as I do not have the Neural Network toolbox.

Sign in to comment.

Categories

Find more on Statistics and Machine Learning Toolbox in Help Center and File Exchange

Community Treasure Hunt

Find the treasures in MATLAB Central and discover how the community can help you!

Start Hunting!