Here is what I did!!
%% row to colum
x = X'; % input Always stays same
t = yte'; % target change a/c to tr, te and Pn
%%
k=10;
[ I N ] = size(x)
[O N ] = size(t) % [ 1 94 ]
rng('default') % Or substitute your lucky number
ind0 = randperm(N);
% ind0 = 1:N; % For debugging
M = floor(N/k) % 9 length(valind & tstind)
Ntrn = N-2*M % 76 length(trnind)
Ntrneq = Ntrn*O % 76 No. of training equations
H = 17 % 4 No. of hidden nodes (default is 10)
%%
net = fitnet(H);
net.divideFcn = 'divideind';
%%
for i = 1:k
rngstate(i) = rng;
net = configure(net,x,t);
valind = 1 + M*(i-1) : M*i;
if i==k
tstind = 1:M;
trnind = [ M+1:M*(k-1) , M*k+1:N ];
else
tstind = valind + M;
trnind = [ 1:valind(1)-1 , tstind(end)+1:N ];
end
trnInd = ind0(trnind); % Note upper & lower case "i"
valInd = ind0(valind);
tstInd = ind0(tstind);
net.divideParam.trainInd = trnInd;
net.divideParam.valInd = valInd;
net.divideParam.testInd = tstInd;
[ net tr e] = train( net, x, t );
stopcrit{i,1} = tr.stop;
bestepoch(i,1) = tr.best_epoch;
Ytest = e; % prediction on X
%e(i) = gsubtract(t,Ytest); % subtraction( Yactual-ypred)
MSE(i,1) = perform(net, t,Ytest); % Calculate network performance = mae or mse value
MAE(i,1)=mae(net, t,Ytest);
trOut = Ytest(tr.trainInd); %traing output-predicted
trTarg = t(tr.trainInd); % training target-Actual
vOut = Ytest(tr.valInd); % val output
vTarg = t(tr.valInd); % val target
tsOut = Ytest(tr.testInd); % test output
tsTarg = t(tr.testInd); %test target
R2_Train(i,1)= regression(trTarg, trOut)^2;
R2_Val(i,1)= regression(vTarg, vOut)^2;
R2_Test(i,1)= regression(tsTarg, tsOut)^2;
R2_all(i,1)= regression(t,Ytest)^2;
% calculate weight product for sensitivity analysis
W1 = net.IW{1};
W2= net.LW{2};
u=abs(W1.*W2');
us=sum(u);
Ri=(us./sum(us))
end