Mix/max search and sensitivity from designΒΆ

In this example we are going to evaluate the min and max values of the output variable of interest from a sample and to evaluate the gradient of the limit state function defining the output variable of interest at a particular point.

[12]:
from __future__ import print_function
import openturns as ot
import math as m
[13]:
# Create the marginal distributions of the parameters
dist_E = ot.Beta(0.93, 3.2, 2.8e7, 4.8e7)
dist_F = ot.LogNormalMuSigma(30000, 9000, 15000).getDistribution()
dist_L = ot.Uniform(250, 260)
dist_I = ot.Beta(2.5, 4.0, 3.1e2, 4.5e2)
marginals = [dist_E, dist_F, dist_L, dist_I]
distribution = ot.ComposedDistribution(marginals)
[14]:
# Sample inputs
sampleX = distribution.getSample(100)
[15]:
# Create the model
model = ot.SymbolicFunction(['E', 'F', 'L', 'I'], ['F*L^3/(3*E*I)'])
[16]:
# Evaluate outputs
sampleY = model(sampleX)
[26]:
# Get min and max
minY = sampleY.getMin()
minX = sampleX[sampleY.find(minY)]
print('min: y=', minY, ' with x=', minX)
maxY = sampleY.getMax()
maxX = sampleX[sampleY.find(maxY)]
print('max: y=', maxY, ' with x=', maxX)
min: y= [5.9225]  with x= [4.18618e+07,20181.7,251.839,433.395]
max: y= [31.3208]  with x= [2.9115e+07,62844,255.189,381.749]
[31]:
# Get sensitivity at min
model.gradient(minX)
[31]:

[[ -1.41477e-07 ]
[ 0.000293459 ]
[ 0.070551 ]
[ -0.0136654 ]]