Is there a worked example for Bayesian optimization with a classical neural network?

12 views (last 30 days)
Is there a worked example for Bayesian optimization with a classical neural network?
I noticed that there is an example for "Deep Learning Using Bayesian Optimization" (linked below), but I would like to see an example for a classical neural network.

Accepted Answer

MathWorks Support Team
MathWorks Support Team on 22 Jun 2023
Edited: MathWorks Support Team on 23 Jun 2023
Please find below a worked example script for using Bayesian optimization to optimize a classical neural network.
  • This example was written to be very similar in structure to the example for "Deep Learning Using Bayesian Optimization" (linked in the question).
  • Note that this script is meant to be illustrative and may contain errors.
----------------------------------------------------------------------------------------------------------------------------------------------------------
%% Input-Output Fitting with a Neural Network and Bayesian Optimization
%% Prepare Data
simplefitInputs = [0 0.0498 0.0996 0.1550 0.2103 0.2657 0.3210 0.3825 ...
    0.4440 0.5123 0.5807 0.6566 0.7409 0.8347 0.9388 1.0674 1.2102 1.3690 ...
    1.5453 1.7041 1.8469 1.9898 2.1326 2.2755 2.4183 2.5612 2.7041 2.8469 ...
    2.9898 3.1326 3.2755 3.4342 3.5929 3.7693 3.9457 4.1220 4.2984 4.4748 ...
    4.6511 4.8275 4.9862 5.1450 5.3037 5.4466 5.5894 5.7323 5.8910 6.0674 ...
    6.2437 6.3866 6.5295 6.6452 6.7389 6.8233 6.8992 6.9675 7.0290 7.0905 ...
    7.1458 7.2012 7.2565 7.3119 7.3617 7.4115 7.4613 7.5167 7.5720 7.6273 ...
    7.6827 7.7442 7.8057 7.8740 7.9499 8.0343 8.1384 8.2813 8.4577 8.6005 ...
    8.7162 8.8100 8.8943 8.9702 9.0461 9.1145 9.1828 9.2511 9.3195 9.3878 ...
    9.4637 9.5396 9.6240 9.7177 9.8334 9.9763];
simplefitTargets = [5.0472 5.3578 5.6632 5.9955 6.3195 6.6343 6.9389 ...
    7.2645 7.5753 7.9020 8.2078 8.5216 8.8366 9.1432 9.4289 9.7007 9.8995 ...
    10.0000 9.9786 9.8589 9.6876 9.4722 9.2283 8.9701 8.7099 8.4579 8.2217 ...
    8.0065 7.8153 7.6494 7.5084 7.3793 7.2770 7.1912 7.1319 7.0972 7.0866 ...
    7.1014 7.1440 7.2169 7.3100 7.4287 7.5699 7.7102 7.8544 7.9901 8.1120 ...
    8.1811 8.1424 8.0056 7.7556 7.4618 7.1617 6.8445 6.5222 6.2041 5.8970 ...
    5.5721 5.2664 4.9500 4.6250 4.2937 3.9920 3.6889 3.3863 3.0529 2.7252 ...
    2.4056 2.0968 1.7695 1.4619 1.1469 0.8345 0.5391 0.2564 0.0263 0 0.1787 ...
    0.4413 0.7207 1.0154 1.3092 1.6244 1.9214 2.2266 2.5356 2.8438 3.1469 ...
    3.4723 3.7799 4.0938 4.3986 4.6956 4.9132];
%% Choose Variables to Optimize
minHiddenLayerSize = 10;
maxHiddenLayerSize = 20;
hiddenLayerSizeRange = [minHiddenLayerSize maxHiddenLayerSize];
optimVars = [
    optimizableVariable('Layer1Size',hiddenLayerSizeRange,'Type','integer')
    optimizableVariable('Layer2Size',hiddenLayerSizeRange,'Type','integer')];
%% Perform Bayesian Optimization
ObjFcn = makeObjFcn(simplefitInputs, simplefitTargets);
BayesObject = bayesopt(ObjFcn,optimVars,...
    'MaxObj',30,...
    'MaxTime',8*60*60,...
    'IsObjectiveDeterministic',false,...
    'UseParallel',false);
%% Evaluate Final Network
bestIdx = BayesObject.IndexOfMinimumTrace(end);
fileName = BayesObject.UserDataTrace{bestIdx};
load(fileName);
YPredicted = net(simplefitInputs);
testError = perform(net,simplefitTargets,YPredicted);
testError
valError
%% etc.
% ...
%% Objective Function for Optimization
function ObjFcn = makeObjFcn(XTrain,YTrain)
    ObjFcn = @valErrorFun;
    function [valError,cons,fileName] = valErrorFun(optVars)
        % Solve an Input-Output Fitting problem with a Neural Network
        % Choose a Training Function
        % For a list of all training functions type: help nntrain
        % 'trainlm' is usually fastest.
        % 'trainbr' takes longer but may be better for challenging problems.
        % 'trainscg' uses less memory. Suitable in low memory situations.
        trainFcn = 'trainlm';  % Levenberg-Marquardt backpropagation.
        % Create a Fitting Network
        layer1_size = optVars.Layer1Size;
        layer2_size = optVars.Layer2Size;
        hiddenLayerSizes = [layer1_size layer2_size];
        net = fitnet(hiddenLayerSizes,trainFcn);
        % Setup Division of Data for Training, Validation, Testing
        net.divideParam.trainRatio = 70/100;
        net.divideParam.valRatio = 15/100;
        net.divideParam.testRatio = 15/100;
        % Train the Network
        net.trainParam.showWindow = false;
        net.trainParam.showCommandLine = false;
        [net,~] = train(net,XTrain,YTrain);
        % Test the Network
        YPredicted = net(XTrain);
        
        valError = perform(net,YTrain,YPredicted);
        
        fileName = num2str(valError) + ".mat";
        save(fileName,'net','valError')
        cons = [];
    end
end

More Answers (0)

Tags

No tags entered yet.

Community Treasure Hunt

Find the treasures in MATLAB Central and discover how the community can help you!

Start Hunting!