Skip to content

Commit 35e0a4c

Browse files
authored
Added radialBasisFunc and ReLU for regModel (#619)
* Added the ReLU activation func. * Added the radialBasisFunc for regModel.
1 parent 1b56851 commit 35e0a4c

File tree

2 files changed

+75
-2
lines changed

2 files changed

+75
-2
lines changed

src/adjoint/DARegression/DARegression.C

+69-2
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,18 @@ DARegression::DARegression(
4949
{
5050
regSubDict.readEntry<labelList>("hiddenLayerNeurons", hiddenLayerNeurons_);
5151
regSubDict.readEntry<word>("activationFunction", activationFunction_);
52+
if (activationFunction_ == "ReLU")
53+
{
54+
leakyCoeff_ = regSubDict.lookupOrDefault<scalar>("leakyCoeff", 0.0);
55+
}
56+
}
57+
else if (modelType_ == "radialBasisFunction")
58+
{
59+
nRBFs_ = regSubDict.getLabel("nRBFs");
60+
}
61+
else
62+
{
63+
FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork and radialBasisFunction" << abort(FatalError);
5264
}
5365

5466
// initialize parameters and give it large values
@@ -323,9 +335,16 @@ label DARegression::compute()
323335
{
324336
layerVals[layerI][neuronI] = (1 - exp(-2 * layerVals[layerI][neuronI])) / (1 + exp(-2 * layerVals[layerI][neuronI]));
325337
}
338+
else if (activationFunction_ == "ReLU")
339+
{
340+
if (layerVals[layerI][neuronI] < 0)
341+
{
342+
layerVals[layerI][neuronI] = leakyCoeff_ * layerVals[layerI][neuronI];
343+
}
344+
}
326345
else
327346
{
328-
FatalErrorIn("") << "activationFunction not valid. Options are: sigmoid and tanh" << abort(FatalError);
347+
FatalErrorIn("") << "activationFunction not valid. Options are: sigmoid, tanh, and ReLU" << abort(FatalError);
329348
}
330349
}
331350
}
@@ -350,9 +369,44 @@ label DARegression::compute()
350369

351370
outputField.correctBoundaryConditions();
352371
}
372+
else if (modelType_ == "radialBasisFunction")
373+
{
374+
List<List<scalar>> inputFields;
375+
inputFields.setSize(inputNames_.size());
376+
377+
this->calcInput(inputFields);
378+
379+
label nInputs = inputNames_.size();
380+
381+
// increment of the parameters for each RBF basis
382+
label dP = 2 * nInputs + 1;
383+
384+
forAll(mesh_.cells(), cellI)
385+
{
386+
scalar outputVal = 0.0;
387+
for (label i = 0; i < nRBFs_; i++)
388+
{
389+
scalar expCoeff = 0.0;
390+
for (label j = 0; j < nInputs; j++)
391+
{
392+
scalar A = (inputFields[j][cellI] - parameters_[dP * i + 2 * j]) * (inputFields[j][cellI] - parameters_[dP * i + 2 * j]);
393+
scalar B = 2 * parameters_[dP * i + 2 * j + 1] * parameters_[dP * i + 2 * j + 1];
394+
expCoeff += A / B;
395+
}
396+
outputVal += parameters_[(dP + 1) * i + dP] * exp(-expCoeff);
397+
}
398+
399+
outputField[cellI] = outputScale_ * (outputVal + outputShift_);
400+
}
401+
402+
// check if the output values are valid otherwise fix/bound them
403+
fail = this->checkOutput(outputField);
404+
405+
outputField.correctBoundaryConditions();
406+
}
353407
else
354408
{
355-
FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork" << abort(FatalError);
409+
FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork and radialBasisFunction" << abort(FatalError);
356410
}
357411

358412
return fail;
@@ -397,6 +451,19 @@ label DARegression::nParameters()
397451

398452
return nParameters;
399453
}
454+
else if (modelType_ == "radialBasisFunction")
455+
{
456+
label nInputs = inputNames_.size();
457+
458+
// each RBF has a weight, nInputs mean, and nInputs std
459+
label nParameters = nRBFs_ * (2 * nInputs + 1);
460+
461+
return nParameters;
462+
}
463+
else
464+
{
465+
FatalErrorIn("") << "modelType_: " << modelType_ << " not supported. Options are: neuralNetwork and radialBasisFunction" << abort(FatalError);
466+
}
400467
}
401468

402469
label DARegression::checkOutput(volScalarField& outputField)

src/adjoint/DARegression/DARegression.H

+6
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,9 @@ protected:
8181
/// neural network activation function
8282
word activationFunction_;
8383

84+
/// if the ReLU activation function is used we can prescribe a potentially leaky coefficient
85+
scalar leakyCoeff_ = 0.0;
86+
8487
/// the upper bound for the output
8588
scalar outputUpperBound_;
8689

@@ -93,6 +96,9 @@ protected:
9396
/// default output values
9497
scalar defaultOutputValue_;
9598

99+
/// number of radial basis function
100+
label nRBFs_;
101+
96102
public:
97103
/// Constructors
98104
DARegression(

0 commit comments

Comments
 (0)