Coverage for /opt/hostedtoolcache/Python/3.11.10/x64/lib/python3.11/site-packages/pysagas/optimisation/optimiser.py: 0%

39 statements  

« prev     ^ index     » next       coverage.py v7.6.4, created at 2024-10-30 04:27 +0000

1import os 

2import numpy as np 

3from pysagas import banner 

4from numpy.typing import ArrayLike 

5from typing import Union, Optional, Dict, Any 

6from pyoptsparse import Optimizer, Optimization 

7 

8 

9class ShapeOpt: 

10 """PySAGAS Shape Optimisation wrapper for pyOptSparse.""" 

11 

12 def __init__( 

13 self, 

14 optimiser: Optimizer, 

15 working_dir: str, 

16 optimiser_options: Optional[Dict[str, Any]] = None, 

17 ) -> None: 

18 """Initialise PySAGAS Shape Optimiser. 

19 

20 Parameters 

21 ---------- 

22 optimiser : Optimizer 

23 The pyoptsparse Optimizer object of choice. 

24 

25 working_dir : str 

26 The name of the working directory. 

27 

28 optimiser_options : dict, optional 

29 The options to pass to the optimiser. 

30 """ 

31 self.opt_problem: Optimization 

32 self.optimiser = optimiser(options=optimiser_options) 

33 

34 # Prepare working directory 

35 if not os.path.exists(working_dir): 

36 os.mkdir(working_dir) 

37 

38 def add_variables( 

39 self, 

40 parameters_dict: Optional[dict] = None, 

41 name: Optional[str] = None, 

42 n: Optional[int] = None, 

43 vartype: Optional[str] = "c", 

44 initial: Optional[ArrayLike] = None, 

45 lower_bounds: Optional[Union[ArrayLike, Dict]] = None, 

46 upper_bounds: Optional[Union[ArrayLike, Dict]] = None, 

47 **kwargs, 

48 ): 

49 """Adds variables to the optimisation problem. 

50 

51 Parameters 

52 ---------- 

53 parameters_dict : dict, optional 

54 The dictionary of nominal parameter values. This can be provided 

55 instead of adding each parameter individually with the 'name' 

56 argument. 

57 

58 name : str 

59 Name of variable group. This name should be unique across all the design variable groups 

60 

61 n : int, optional 

62 Number of design variables in this variable group. 

63 

64 varType : str. 

65 String representing the type of variable. Suitable values for type 

66 are: 'c' for continuous variables, 'i' for integer values and 

67 'd' for discrete selection. 

68 

69 value : scalar or array. 

70 Starting value for design variables. If it is a a scalar, the same 

71 value is applied to all 'nVars' variables. Otherwise, it must be 

72 iterable object with length equal to 'nVars'. 

73 

74 lower : scalar or array. 

75 Lower bound of variables. Scalar/array usage is the same as value 

76 keyword 

77 

78 upper : scalar or array. 

79 Upper bound of variables. Scalar/array usage is the same as value 

80 keyword 

81 """ 

82 

83 def check_bounds( 

84 parameters: Dict[str, float], bounds: Dict[str, float], sign: int 

85 ): 

86 for p, v in parameters.items(): 

87 # Get bound for this parameter 

88 b = bounds.get(p) 

89 if np.sign(v - b) != np.sign(sign) and np.sign(v - b) != 0: 

90 if np.sign(sign) < 0: 

91 bound = ">" 

92 else: 

93 bound = "<" 

94 raise Exception( 

95 f"Invalid bounds on {p}: " + f"{v:.5f} {bound} {b:.5f}." 

96 ) 

97 

98 if parameters_dict: 

99 # Check bounds 

100 if lower_bounds is None: 

101 lower_bounds = {} 

102 else: 

103 # Make sure bounds are valid 

104 check_bounds(parameters_dict, lower_bounds, 1) 

105 

106 if upper_bounds is None: 

107 upper_bounds = {} 

108 else: 

109 # Make sure bounds are valid 

110 check_bounds(parameters_dict, upper_bounds, -1) 

111 

112 # Unpack parameters dict 

113 for param, value in parameters_dict.items(): 

114 self.opt_problem.addVarGroup( 

115 name=param, 

116 nVars=1, 

117 varType=vartype, 

118 value=value, 

119 lower=lower_bounds.get(param), 

120 upper=upper_bounds.get(param), 

121 **kwargs, 

122 ) 

123 

124 else: 

125 self.opt_problem.addVarGroup( 

126 name=name, 

127 nVars=n, 

128 varType=vartype, 

129 value=initial, 

130 lower=lower_bounds, 

131 upper=upper_bounds, 

132 **kwargs, 

133 ) 

134 

135 def add_constriants( 

136 self, 

137 name: str, 

138 n: Optional[int] = 1, 

139 lower: Optional[Union[ArrayLike, Dict]] = None, 

140 upper: Optional[Union[ArrayLike, Dict]] = None, 

141 scale: Optional[float] = 1, 

142 ): 

143 """Adds constraints to the optimisation problem. 

144 

145 Parameters 

146 ----------- 

147 name : str 

148 The name key of the constraint being added. This must appear 

149 in the dictionary returned by the objective and Jacobian callback 

150 returns. 

151 

152 nCon : int 

153 The number of constraints in this group 

154 

155 lower : scalar or array 

156 The lower bound(s) for the constraint. If it is a scalar, 

157 it is applied to all nCon constraints. If it is an array, 

158 the array must be the same length as nCon. 

159 

160 upper : scalar or array 

161 The upper bound(s) for the constraint. If it is a scalar, 

162 it is applied to all nCon constraints. If it is an array, 

163 the array must be the same length as nCon. 

164 

165 scale : scalar or array 

166 A scaling factor for the constraint. It is generally 

167 advisable to have most optimization constraint around the 

168 same order of magnitude. 

169 """ 

170 self.opt_problem.addConGroup( 

171 name=name, 

172 nCon=n, 

173 lower=lower, 

174 upper=upper, 

175 scale=scale, 

176 linear=None, 

177 wrt=None, 

178 jac=None, 

179 ) 

180 

181 def add_objective(self, name: str): 

182 """Adds an objective to the optimisation problem. 

183 

184 Parameters 

185 ----------- 

186 name : str 

187 The name key of the objective being added. This must appear 

188 in the dictionary returned by the objective and Jacobian callback 

189 returns. 

190 """ 

191 self.opt_problem.addObj(name) 

192 

193 def run(self, hotstart_file: str = None): 

194 """Run ShapeOpt. 

195 

196 Parameters 

197 ----------- 

198 hotstart_file : str, optional 

199 The filepath to the history file, used to hot start the 

200 optimiser. 

201 """ 

202 # Print banner 

203 banner() 

204 print("\033[4mPySAGAS Shape Optimisation\033[0m".center(50, " ")) 

205 

206 # Run optimiser 

207 self.sol = self.optimiser( 

208 self.opt_problem, 

209 storeHistory="history.hst", 

210 hotStart=hotstart_file, 

211 )