From fbf6ad4d2fe3ff592bf485395c269b6c8f55a151 Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 28 Mar 2023 16:48:51 +0200 Subject: [PATCH 001/110] changed child bookkeeping to use a dict instead of list --- pygeo/parameterization/DVGeo.py | 208 ++++++++++++++--------------- tests/reg_tests/test_DVGeometry.py | 4 +- 2 files changed, 106 insertions(+), 106 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index bde454d4..118ff1dc 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -118,7 +118,7 @@ def __init__(self, fileName, *args, isComplex=False, child=False, faceFreeze=Non # Flags to determine if this DVGeometry is a parent or child self.isChild = child - self.children = [] + self.children = OrderedDict() self.iChild = None self.masks = None self.finalized = False @@ -788,13 +788,13 @@ def coordXfer(coords, mode="fwd", applyDisplacement=True, **kwargs): self.FFD._updateVolumeCoef() # Now embed into the children: - for child in self.children: + for child in self.children.values(): child.addPointSet(points, ptName, origConfig, **kwargs) self.FFD.calcdPtdCoef(ptName) self.updated[ptName] = False - def addChild(self, childDVGeo): + def addChild(self, childDVGeo, childName=None): """Embed a child FFD into this object. An FFD child is a 'sub' FFD that is fully contained within @@ -823,18 +823,22 @@ def addChild(self, childDVGeo): iChild = len(self.children) childDVGeo.iChild = iChild - self.FFD.attachPoints(childDVGeo.FFD.coef, "child%d_coef" % (iChild)) - self.FFD.calcdPtdCoef("child%d_coef" % (iChild)) + # check if a custom name is provided, if not, we will use the old naming scheme based on the iChild + if childName is None: + childName = f"child{iChild:d}" + + self.FFD.attachPoints(childDVGeo.FFD.coef, f"{childName}_coef") + self.FFD.calcdPtdCoef(f"{childName}_coef") # We must finalize the Child here since we need the ref axis # coefficients if len(childDVGeo.axis) > 0: childDVGeo._finalizeAxis() - self.FFD.attachPoints(childDVGeo.refAxis.coef, "child%d_axis" % (iChild)) - self.FFD.calcdPtdCoef("child%d_axis" % (iChild)) + self.FFD.attachPoints(childDVGeo.refAxis.coef, f"{childName}_axis") + self.FFD.calcdPtdCoef(f"{childName}_axis") # Add the child to the parent and return - self.children.append(childDVGeo) + self.children[childName] = childDVGeo def addGlobalDV(self, dvName, value, func, lower=None, upper=None, scale=1.0, config=None): """ @@ -1598,7 +1602,7 @@ def setDesignVars(self, dvDict): # Now call setValues on the children. This way the # variables will be set on the children - for child in self.children: + for child in self.children.values(): child.setDesignVars(dvDict) def zeroJacobians(self, ptSetNames): @@ -1644,7 +1648,7 @@ def getValues(self): # Now call getValues on the children. This way the # returned dictionary will include the variables from # the children - for child in self.children: + for child in self.children.values(): childdvDict = child.getValues() dvDict.update(childdvDict) @@ -1848,17 +1852,17 @@ def update(self, ptSetName, childDelta=True, config=None): self.FFD.coef = self.origFFDCoef.copy() self._setInitialValues() - for iChild in range(len(self.children)): - if len(self.children[iChild].axis) > 0: - self.children[iChild]._finalize() - refaxis_ptSetName = "child%d_axis" % (iChild) + for childName, child in self.children.items(): + if len(child.axis) > 0: + child._finalize() + refaxis_ptSetName = f"{childName}_axis" if refaxis_ptSetName not in self.FFD.embeddedVolumes: - self.FFD.attachPoints(self.children[iChild].refAxis.coef, refaxis_ptSetName) - self.FFD.calcdPtdCoef("child%d_axis" % (iChild)) + self.FFD.attachPoints(child.refAxis.coef, refaxis_ptSetName) + self.FFD.calcdPtdCoef(f"{childName}_axis") else: - for iChild in range(len(self.children)): - if len(self.children[iChild].axis) > 0: - refaxis_ptSetName = "child%d_axis" % (iChild) + for childName, child in self.children.items(): + if len(child.axis) > 0: + refaxis_ptSetName = f"{childName}_axis" if refaxis_ptSetName not in self.FFD.embeddedVolumes: raise Error( f"refaxis {refaxis_ptSetName} cannot be added to child FFD after child is appended to parent" @@ -1946,10 +1950,9 @@ def update(self, ptSetName, childDelta=True, config=None): # Now loop over the children set the FFD and refAxis control # points as evaluated from the parent - for iChild in range(len(self.children)): - child = self.children[iChild] + for childName, child in self.children.items(): child._finalize() - self.applyToChild(iChild) + self.applyToChild(childName) if self.complex: # need to propagate the sensitivity to the children Xfinal here to do this @@ -1957,8 +1960,8 @@ def update(self, ptSetName, childDelta=True, config=None): child._complexifyCoef() child.FFD.coef = child.FFD.coef.astype("D") - dXrefdCoef = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].dPtdCoef - dCcdCoef = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].dPtdCoef + dXrefdCoef = self.FFD.embeddedVolumes[f"{childName}_axis"].dPtdCoef + dCcdCoef = self.FFD.embeddedVolumes[f"{childName}_coef"].dPtdCoef if dXrefdCoef is not None: for ii in range(3): @@ -1987,16 +1990,16 @@ def update(self, ptSetName, childDelta=True, config=None): Xfinal = self.coordXfer[ptSetName](Xfinal, mode="fwd", applyDisplacement=True) return Xfinal - def applyToChild(self, iChild): + def applyToChild(self, childName): """ This function is used to apply the changes in the parent FFD to the child FFD points and child reference axis points. """ - child = self.children[iChild] + child = self.children[childName] # Set FFD points and reference axis points from parent - child.FFD.coef = self.FFD.getAttachedPoints("child%d_coef" % (iChild)) - child.coef = self.FFD.getAttachedPoints("child%d_axis" % (iChild)) + child.FFD.coef = self.FFD.getAttachedPoints(f"{childName}_coef") + child.coef = self.FFD.getAttachedPoints(f"{childName}_axis") # Update the reference axes on the child child.refAxis.coef = child.coef.copy() @@ -2071,8 +2074,8 @@ def convertSensitivityToDict(self, dIdx, out1D=False, useCompositeNames=False): i += dv.nVal # Add in child portion - for iChild in range(len(self.children)): - childdIdx = self.children[iChild].convertSensitivityToDict( + for child in self.children.values(): + childdIdx = child.convertSensitivityToDict( dIdx, out1D=out1D, useCompositeNames=useCompositeNames ) # update the total sensitivities with the derivatives from the child @@ -2137,8 +2140,8 @@ def convertDictToSensitivity(self, dIdxDict): i += dv.nVal # Note: not sure if this works with (multiple) sibling child FFDs - for iChild in range(len(self.children)): - childdIdx = self.children[iChild].convertDictToSensitivity(dIdxDict) + for child in self.children.values(): + childdIdx = child.convertDictToSensitivity(dIdxDict) # update the total sensitivities with the derivatives from the child dIdx += childdIdx return dIdx @@ -2167,8 +2170,8 @@ def getVarNames(self, pyOptSparse=False): names = [self.DVComposite.name] # Call the children recursively - for iChild in range(len(self.children)): - names.extend(self.children[iChild].getVarNames()) + for child in self.children.values(): + names.extend(child.getVarNames()) return names @@ -2530,15 +2533,15 @@ def computeTotalJacobian(self, ptSetName, config=None): self.JT[ptSetName].sort_indices() # Add in child portion - for iChild in range(len(self.children)): + for childName, child in self.children.items(): # Reset control points on child for child link derivatives - self.applyToChild(iChild) - self.children[iChild].computeTotalJacobian(ptSetName, config=config) + self.applyToChild(childName) + child.computeTotalJacobian(ptSetName, config=config) if self.JT[ptSetName] is not None: - self.JT[ptSetName] = self.JT[ptSetName] + self.children[iChild].JT[ptSetName] + self.JT[ptSetName] = self.JT[ptSetName] + child.JT[ptSetName] else: - self.JT[ptSetName] = self.children[iChild].JT[ptSetName] + self.JT[ptSetName] = child.JT[ptSetName] else: self.JT[ptSetName] = None @@ -2641,15 +2644,14 @@ def computeTotalJacobianCS(self, ptSetName, config=None): DVLocalCount += 1 self.DV_listLocal[key].value[j] = refVal - for iChild in range(len(self.children)): - child = self.children[iChild] + for childName, child in self.children.items(): child._finalize() # In the updates applied previously, the FFD points on the children # will have been set as deltas. We need to set them as absolute # coordinates based on the changes in the parent before moving down # to the next level - self.applyToChild(iChild) + self.applyToChild(childName) # Now get jacobian from child and add to parent jacobian child.computeTotalJacobianCS(ptSetName, config=config) @@ -2768,7 +2770,7 @@ def addVariablesPyOpt( ) # Add variables from the children - for child in self.children: + for child in self.children.values(): child.addVariablesPyOpt( optProb, globalVars, localVars, sectionlocalVars, spanwiselocalVars, ignoreVars, freezeVars ) @@ -2818,9 +2820,9 @@ def writeRefAxes(self, fileName): if not len(self.axis) == 0: self.refAxis.writeTecplot(gFileName, orig=True, curves=True, coef=True) # Write children axes: - for iChild in range(len(self.children)): - cFileName = fileName + f"_child{iChild:03d}.dat" - self.children[iChild].refAxis.writeTecplot(cFileName, orig=True, curves=True, coef=True) + for childName, child in self.children.items(): + cFileName = fileName + f"_{childName}.dat" + child.refAxis.writeTecplot(cFileName, orig=True, curves=True, coef=True) def writeLinks(self, fileName): """Write the links attaching the control points to the reference axes @@ -2983,7 +2985,7 @@ def getFlattenedChildren(self): Return a flattened list of all DVGeo objects in the family hierarchy. """ flatChildren = [self] - for child in self.children: + for child in self.children.values(): flatChildren += child.getFlattenedChildren() return flatChildren @@ -3338,7 +3340,7 @@ def _getNDVGlobal(self): for key in self.DV_listGlobal: nDV += self.DV_listGlobal[key].nVal - for child in self.children: + for child in self.children.values(): nDV += child._getNDVGlobal() return nDV @@ -3351,7 +3353,7 @@ def _getNDVLocal(self): for key in self.DV_listLocal: nDV += self.DV_listLocal[key].nVal - for child in self.children: + for child in self.children.values(): nDV += child._getNDVLocal() return nDV @@ -3364,7 +3366,7 @@ def _getNDVSectionLocal(self): for key in self.DV_listSectionLocal: nDV += self.DV_listSectionLocal[key].nVal - for child in self.children: + for child in self.children.values(): nDV += child._getNDVSectionLocal() return nDV @@ -3377,7 +3379,7 @@ def _getNDVSpanwiseLocal(self): for key in self.DV_listSpanwiseLocal: nDV += self.DV_listSpanwiseLocal[key].nVal - for child in self.children: + for child in self.children.values(): nDV += child._getNDVSpanwiseLocal() return nDV @@ -3466,7 +3468,7 @@ def _getDVOffsets(self): nDVSW = self._getNDVSpanwiseLocalSelf() # Set the total number of global and local DVs into any children of this parent - for child in self.children: + for child in self.children.values(): # now get the numbers for the current parent child child.nDV_T = self.nDV_T @@ -3511,11 +3513,11 @@ def _update_deriv(self, iDV=0, oneoverh=1.0 / 1e-40, config=None, localDV=False) new_pts[:, 2] = self.FFD.coef[self.ptAttachInd, 2] # set the forward effect of the global design vars in each child - for iChild in range(len(self.children)): + for childName, child in self.children.items(): # get the derivative of the child axis and control points wrt the parent # control points - dXrefdCoef = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].dPtdCoef - dCcdCoef = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].dPtdCoef + dXrefdCoef = self.FFD.embeddedVolumes[f"{childName}_axis"].dPtdCoef + dCcdCoef = self.FFD.embeddedVolumes[f"{childName}_coef"].dPtdCoef # create a vector with the derivative of the parent control points wrt the # parent global variables @@ -3541,11 +3543,11 @@ def _update_deriv(self, iDV=0, oneoverh=1.0 / 1e-40, config=None, localDV=False) dCcdXdv[1::3] = dCcdCoef.dot(tmp[:, 1]) dCcdXdv[2::3] = dCcdCoef.dot(tmp[:, 2]) if localDV and self._getNDVLocalSelf(): - self.children[iChild].dXrefdXdvl[:, iDV] += dXrefdXdv - self.children[iChild].dCcdXdvl[:, iDV] += dCcdXdv + child.dXrefdXdvl[:, iDV] += dXrefdXdv + child.dCcdXdvl[:, iDV] += dCcdXdv elif self._getNDVGlobalSelf(): - self.children[iChild].dXrefdXdvg[:, iDV] += dXrefdXdv.real - self.children[iChild].dCcdXdvg[:, iDV] += dCcdXdv.real + child.dXrefdXdvg[:, iDV] += dXrefdXdv.real + child.dCcdXdvg[:, iDV] += dCcdXdv.real return new_pts def _update_deriv_cs(self, ptSetName, config=None): @@ -3620,18 +3622,17 @@ def _update_deriv_cs(self, ptSetName, config=None): Xfinal[:, ii] += imag_j * dPtdCoef.dot(imag_part[:, ii]) # now do the same for the children - for iChild in range(len(self.children)): + for childName, child in self.children.items(): # first, update the coef. to their new locations - child = self.children[iChild] child._finalize() - self.applyToChild(iChild) + self.applyToChild(childName) # now cast forward the complex part of the derivative child._complexifyCoef() child.FFD.coef = child.FFD.coef.astype("D") - dXrefdCoef = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].dPtdCoef - dCcdCoef = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].dPtdCoef + dXrefdCoef = self.FFD.embeddedVolumes[f"{childName}_axis"].dPtdCoef + dCcdCoef = self.FFD.embeddedVolumes[f"{childName}_coef"].dPtdCoef if dXrefdCoef is not None: for ii in range(3): @@ -3801,15 +3802,14 @@ def computeTotalJacobianFD(self, ptSetName, config=None): DVLocalCount += 1 self.DV_listLocal[key].value[j] = refVal - for iChild in range(len(self.children)): - child = self.children[iChild] + for childName, child in self.children.items(): child._finalize() # In the updates applied previously, the FFD points on the children # will have been set as deltas. We need to set them as absolute # coordinates based on the changes in the parent before moving down # to the next level - self.applyToChild(iChild) + self.applyToChild(childName) # Now get jacobian from child and add to parent jacobian child.computeTotalJacobianFD(ptSetName, config=config) @@ -3834,14 +3834,14 @@ def _attachedPtJacobian(self, config): # Create the storage arrays for the information that must be # passed to the children - for iChild in range(len(self.children)): - N = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].N + for childName, child in self.children.items(): + N = self.FFD.embeddedVolumes[f"{childName}_axis"].N # Derivative of reference axis points wrt global DVs at this level - self.children[iChild].dXrefdXdvg = np.zeros((N * 3, self.nDV_T)) + child.dXrefdXdvg = np.zeros((N * 3, self.nDV_T)) - N = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].N + N = self.FFD.embeddedVolumes[f"{childName}_coef"].N # derivative of the control points wrt the global DVs at this level - self.children[iChild].dCcdXdvg = np.zeros((N * 3, self.nDV_T)) + child.dCcdXdvg = np.zeros((N * 3, self.nDV_T)) # We need to save the reference state so that we can always start # from the same place when calling _update_deriv @@ -3907,12 +3907,12 @@ def _spanwiselocalDVJacobian(self, config=None): # Create the storage arrays for the information that must be # passed to the children - for iChild in range(len(self.children)): - N = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].N - self.children[iChild].dXrefdXdvl = np.zeros((N * 3, self.nDV_T)) + for childName, child in self.children.items(): + N = self.FFD.embeddedVolumes[f"{childName}_axis"].N + child.dXrefdXdvl = np.zeros((N * 3, self.nDV_T)) - N = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].N - self.children[iChild].dCcdXdvl = np.zeros((N * 3, self.nDV_T)) + N = self.FFD.embeddedVolumes[f"{childName}_coef"].N + child.dCcdXdvl = np.zeros((N * 3, self.nDV_T)) iDVSpanwiseLocal = self.nDVSW_count for key in self.DV_listSpanwiseLocal: @@ -3943,11 +3943,11 @@ def _spanwiselocalDVJacobian(self, config=None): # for each node effected by the dv_SWLocal[j] Jacobian[irow, iDVSpanwiseLocal] = 1.0 - for iChild in range(len(self.children)): + for childName, child in self.children.items(): # Get derivatives of child ref axis and FFD control # points w.r.t. parent's FFD control points - dXrefdCoef = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].dPtdCoef - dCcdCoef = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].dPtdCoef + dXrefdCoef = self.FFD.embeddedVolumes[f"{childName}_axis"].dPtdCoef + dCcdCoef = self.FFD.embeddedVolumes[f"{childName}_coef"].dPtdCoef # derivative of Change in the FFD coef due to DVs # same as Jacobian above, but differnt ordering @@ -3969,8 +3969,8 @@ def _spanwiselocalDVJacobian(self, config=None): # TODO: the += here is to allow recursion check this with multiple nesting # levels - self.children[iChild].dXrefdXdvl[:, iDVSpanwiseLocal] += dXrefdXdvl - self.children[iChild].dCcdXdvl[:, iDVSpanwiseLocal] += dCcdXdvl + child.dXrefdXdvl[:, iDVSpanwiseLocal] += dXrefdXdvl + child.dCcdXdvl[:, iDVSpanwiseLocal] += dCcdXdvl iDVSpanwiseLocal += 1 else: @@ -3999,12 +3999,12 @@ def _sectionlocalDVJacobian(self, config=None): # Create the storage arrays for the information that must be # passed to the children - for iChild in range(len(self.children)): - N = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].N - self.children[iChild].dXrefdXdvl = np.zeros((N * 3, self.nDV_T)) + for childName, child in self.children.items(): + N = self.FFD.embeddedVolumes[f"{childName}_axis"].N + child.dXrefdXdvl = np.zeros((N * 3, self.nDV_T)) - N = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].N - self.children[iChild].dCcdXdvl = np.zeros((N * 3, self.nDV_T)) + N = self.FFD.embeddedVolumes[f"{childName}_coef"].N + child.dCcdXdvl = np.zeros((N * 3, self.nDV_T)) iDVSectionLocal = self.nDVSL_count for key in self.DV_listSectionLocal: @@ -4027,9 +4027,9 @@ def _sectionlocalDVJacobian(self, config=None): # rows = range(coef*3,(coef+1)*3) # Jacobian[rows, iDVSectionLocal] += R.dot(T.dot(inFrame)) Jacobian[coef * 3 : (coef + 1) * 3, iDVSectionLocal] += R.dot(T.dot(inFrame)) - for iChild in range(len(self.children)): - dXrefdCoef = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].dPtdCoef - dCcdCoef = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].dPtdCoef + for childName, child in self.children.items(): + dXrefdCoef = self.FFD.embeddedVolumes[f"{childName}_axis"].dPtdCoef + dCcdCoef = self.FFD.embeddedVolumes[f"{childName}_coef"].dPtdCoef tmp = np.zeros(self.FFD.coef.shape, dtype="d") @@ -4048,8 +4048,8 @@ def _sectionlocalDVJacobian(self, config=None): # TODO: the += here is to allow recursion check this with multiple nesting # levels - self.children[iChild].dXrefdXdvl[:, iDVSectionLocal] += dXrefdXdvl - self.children[iChild].dCcdXdvl[:, iDVSectionLocal] += dCcdXdvl + child.dXrefdXdvl[:, iDVSectionLocal] += dXrefdXdvl + child.dCcdXdvl[:, iDVSectionLocal] += dCcdXdvl iDVSectionLocal += 1 else: iDVSectionLocal += self.DV_listSectionLocal[key].nVal @@ -4077,12 +4077,12 @@ def _localDVJacobian(self, config=None): # Create the storage arrays for the information that must be # passed to the children - for iChild in range(len(self.children)): - N = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].N - self.children[iChild].dXrefdXdvl = np.zeros((N * 3, self.nDV_T)) + for childName, child in self.children.items(): + N = self.FFD.embeddedVolumes[f"{childName}_axis"].N + child.dXrefdXdvl = np.zeros((N * 3, self.nDV_T)) - N = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].N - self.children[iChild].dCcdXdvl = np.zeros((N * 3, self.nDV_T)) + N = self.FFD.embeddedVolumes[f"{childName}_coef"].N + child.dCcdXdvl = np.zeros((N * 3, self.nDV_T)) iDVLocal = self.nDVL_count for key in self.DV_listLocal: @@ -4099,11 +4099,11 @@ def _localDVJacobian(self, config=None): irow = pt_dv[0] * 3 + pt_dv[1] Jacobian[irow, iDVLocal] = 1.0 - for iChild in range(len(self.children)): + for childName, child in self.children.items(): # Get derivatives of child ref axis and FFD control # points w.r.t. parent's FFD control points - dXrefdCoef = self.FFD.embeddedVolumes["child%d_axis" % (iChild)].dPtdCoef - dCcdCoef = self.FFD.embeddedVolumes["child%d_coef" % (iChild)].dPtdCoef + dXrefdCoef = self.FFD.embeddedVolumes[f"{childName}_axis"].dPtdCoef + dCcdCoef = self.FFD.embeddedVolumes[f"{childName}_coef"].dPtdCoef tmp = np.zeros(self.FFD.coef.shape, dtype="d") @@ -4122,8 +4122,8 @@ def _localDVJacobian(self, config=None): # TODO: the += here is to allow recursion check this with multiple nesting # levels - self.children[iChild].dXrefdXdvl[:, iDVLocal] += dXrefdXdvl - self.children[iChild].dCcdXdvl[:, iDVLocal] += dCcdXdvl + child.dXrefdXdvl[:, iDVLocal] += dXrefdXdvl + child.dCcdXdvl[:, iDVLocal] += dCcdXdvl iDVLocal += 1 else: iDVLocal += self.DV_listLocal[key].nVal @@ -4285,8 +4285,8 @@ def _writeVols(self, handle, vol_counter, solutionTime): vol_counter += 1 # Write children volumes: - for iChild in range(len(self.children)): - vol_counter += self.children[iChild]._writeVols(handle, vol_counter, solutionTime) + for child in self.children.values(): + vol_counter += child._writeVols(handle, vol_counter, solutionTime) return vol_counter diff --git a/tests/reg_tests/test_DVGeometry.py b/tests/reg_tests/test_DVGeometry.py index 19a5e703..1873a79c 100644 --- a/tests/reg_tests/test_DVGeometry.py +++ b/tests/reg_tests/test_DVGeometry.py @@ -1091,11 +1091,11 @@ def test_writeRefAxes(self): # Check that files were written self.assertTrue(os.path.isfile(axesPath + "_parent.dat")) - self.assertTrue(os.path.isfile(axesPath + "_child000.dat")) + self.assertTrue(os.path.isfile(axesPath + "_child0.dat")) # Delete axis files os.remove(axesPath + "_parent.dat") - os.remove(axesPath + "_child000.dat") + os.remove(axesPath + "_child0.dat") def train_ffdSplineOrder(self, train=True, refDeriv=True): self.test_ffdSplineOrder(train=train, refDeriv=refDeriv) From 4aed9bb03278e3b4d2f8cd78bde96420a57bdb3e Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Wed, 29 Mar 2023 09:23:34 +0200 Subject: [PATCH 002/110] added active child dvgeo handling --- pygeo/parameterization/DVGeo.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 118ff1dc..42de57ec 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -658,7 +658,7 @@ def addRefAxis( return nAxis - def addPointSet(self, points, ptName, origConfig=True, coordXfer=None, **kwargs): + def addPointSet(self, points, ptName, origConfig=True, coordXfer=None, activeChildren=None, **kwargs): """ Add a set of coordinates to DVGeometry @@ -752,6 +752,11 @@ def coordXfer(coords, mode="fwd", applyDisplacement=True, **kwargs): # compNames is only needed for DVGeometryMulti, so remove it if passed kwargs.pop("compNames", None) + # check if we want a custom subset of child DVGeos + if activeChildren is None: + # take it all + activeChildren = list(self.children.keys()) + # save this name so that we can zero out the jacobians properly self.ptSetNames.append(ptName) self.zeroJacobians([ptName]) @@ -789,7 +794,11 @@ def coordXfer(coords, mode="fwd", applyDisplacement=True, **kwargs): # Now embed into the children: for child in self.children.values(): - child.addPointSet(points, ptName, origConfig, **kwargs) + # only add to the active children for this pointset. + # when we are getting the points back from children, + # we will check if the ptsetname is already added to the child + if child in activeChildren: + child.addPointSet(points, ptName, origConfig, **kwargs) self.FFD.calcdPtdCoef(ptName) self.updated[ptName] = False @@ -818,6 +827,10 @@ def addChild(self, childDVGeo, childName=None): if childDVGeo.isChild is False: raise Error("Trying to add a child FFD that has NOT been " "created as a child. This operation is illegal.") + # check if this custom name has already been used + if childName in self.children: + raise Error(f"Another child DVGeo has already been added with the name {childName}. Change the name of one of the child FFDs with the same name and try again.") + # Extract the coef from the child FFD and ref axis and embed # them into the parent and compute their derivatives iChild = len(self.children) @@ -1973,7 +1986,10 @@ def update(self, ptSetName, childDelta=True, config=None): child.refAxis.coef = child.coef.copy() child.refAxis._updateCurveCoef() - Xfinal += child.update(ptSetName, childDelta=True, config=config) + if ptSetName in child.points: + # only get this child's contribution if it is active for this pointset + # we don't skip the other computations for consistency + Xfinal += child.update(ptSetName, childDelta=True, config=config) self._unComplexifyCoef() From 9bf213669a7a24dadb3afc8fc3613ab2b11d2b60 Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Wed, 29 Mar 2023 09:55:32 +0200 Subject: [PATCH 003/110] fix bug with dict keys --- pygeo/parameterization/DVGeo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 42de57ec..6173d228 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -793,11 +793,11 @@ def coordXfer(coords, mode="fwd", applyDisplacement=True, **kwargs): self.FFD._updateVolumeCoef() # Now embed into the children: - for child in self.children.values(): + for childName, child in self.children.items(): # only add to the active children for this pointset. # when we are getting the points back from children, # we will check if the ptsetname is already added to the child - if child in activeChildren: + if childName in activeChildren: child.addPointSet(points, ptName, origConfig, **kwargs) self.FFD.calcdPtdCoef(ptName) From e7d2e2341917db2a61e817376fd9409c4fc4d13e Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Wed, 5 Apr 2023 00:15:35 +0300 Subject: [PATCH 004/110] added derivatives. may not work --- pygeo/parameterization/DVGeo.py | 49 +++++++++++++++++++++++++++------ 1 file changed, 40 insertions(+), 9 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 6173d228..e94b7573 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2552,12 +2552,14 @@ def computeTotalJacobian(self, ptSetName, config=None): for childName, child in self.children.items(): # Reset control points on child for child link derivatives self.applyToChild(childName) - child.computeTotalJacobian(ptSetName, config=config) - if self.JT[ptSetName] is not None: - self.JT[ptSetName] = self.JT[ptSetName] + child.JT[ptSetName] - else: - self.JT[ptSetName] = child.JT[ptSetName] + if ptSetName in child.points: + child.computeTotalJacobian(ptSetName, config=config) + + if self.JT[ptSetName] is not None: + self.JT[ptSetName] = self.JT[ptSetName] + child.JT[ptSetName] + else: + self.JT[ptSetName] = child.JT[ptSetName] else: self.JT[ptSetName] = None @@ -4109,11 +4111,28 @@ def _localDVJacobian(self, config=None): ): self.DV_listLocal[key](self.FFD.coef, config) + # figure out if this is a regular local DV or if its a shapeFunc DV + if hasattr(self.DV_listLocal[key], "shapes"): + shapeFunc = True + else: + shapeFunc = False + nVal = self.DV_listLocal[key].nVal for j in range(nVal): - pt_dv = self.DV_listLocal[key].coefList[j] - irow = pt_dv[0] * 3 + pt_dv[1] - Jacobian[irow, iDVLocal] = 1.0 + if shapeFunc: + # get the current shape + shape = self.DV_listLocal[key].shapes[j] + + # loop over entries in shape and set values in jac + for coefInd, direction in shape.items(): + # set the 3 coordinates + for jj in range(3): + irow = coefInd * 3 + jj + Jacobian[irow, iDVLocal] = direction[jj] + else: + pt_dv = self.DV_listLocal[key].coefList[j] + irow = pt_dv[0] * 3 + pt_dv[1] + Jacobian[irow, iDVLocal] = 1.0 for childName, child in self.children.items(): # Get derivatives of child ref axis and FFD control @@ -4123,7 +4142,19 @@ def _localDVJacobian(self, config=None): tmp = np.zeros(self.FFD.coef.shape, dtype="d") - tmp[pt_dv[0], pt_dv[1]] = 1.0 + if shapeFunc: + # get the current shape + shape = self.DV_listLocal[key].shapes[j] + + # loop over entries in shape and set values in jac + for coefInd, direction in shape.items(): + # set the 3 coordinates + for jj in range(3): + irow = coefInd * 3 + jj + tmp[irow, iDVLocal] = direction[jj] + + else: + tmp[pt_dv[0], pt_dv[1]] = 1.0 dXrefdXdvl = np.zeros((dXrefdCoef.shape[0] * 3), "d") dCcdXdvl = np.zeros((dCcdCoef.shape[0] * 3), "d") From 569cde492ea58c8a1aeb75d52a0fd533ef1a60ba Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 11 Apr 2023 08:43:03 +0200 Subject: [PATCH 005/110] undo shape func related changes --- pygeo/parameterization/DVGeo.py | 37 ++++----------------------------- 1 file changed, 4 insertions(+), 33 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index e94b7573..3f465995 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -4111,28 +4111,11 @@ def _localDVJacobian(self, config=None): ): self.DV_listLocal[key](self.FFD.coef, config) - # figure out if this is a regular local DV or if its a shapeFunc DV - if hasattr(self.DV_listLocal[key], "shapes"): - shapeFunc = True - else: - shapeFunc = False - nVal = self.DV_listLocal[key].nVal for j in range(nVal): - if shapeFunc: - # get the current shape - shape = self.DV_listLocal[key].shapes[j] - - # loop over entries in shape and set values in jac - for coefInd, direction in shape.items(): - # set the 3 coordinates - for jj in range(3): - irow = coefInd * 3 + jj - Jacobian[irow, iDVLocal] = direction[jj] - else: - pt_dv = self.DV_listLocal[key].coefList[j] - irow = pt_dv[0] * 3 + pt_dv[1] - Jacobian[irow, iDVLocal] = 1.0 + pt_dv = self.DV_listLocal[key].coefList[j] + irow = pt_dv[0] * 3 + pt_dv[1] + Jacobian[irow, iDVLocal] = 1.0 for childName, child in self.children.items(): # Get derivatives of child ref axis and FFD control @@ -4142,19 +4125,7 @@ def _localDVJacobian(self, config=None): tmp = np.zeros(self.FFD.coef.shape, dtype="d") - if shapeFunc: - # get the current shape - shape = self.DV_listLocal[key].shapes[j] - - # loop over entries in shape and set values in jac - for coefInd, direction in shape.items(): - # set the 3 coordinates - for jj in range(3): - irow = coefInd * 3 + jj - tmp[irow, iDVLocal] = direction[jj] - - else: - tmp[pt_dv[0], pt_dv[1]] = 1.0 + tmp[pt_dv[0], pt_dv[1]] = 1.0 dXrefdXdvl = np.zeros((dXrefdCoef.shape[0] * 3), "d") dCcdXdvl = np.zeros((dCcdCoef.shape[0] * 3), "d") From 93a49d4929e0e9e0b1b76df03988bd9bb776007f Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 16 May 2023 23:06:50 -0400 Subject: [PATCH 006/110] black --- pygeo/parameterization/DVGeo.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 3f465995..1b494ad3 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -829,7 +829,9 @@ def addChild(self, childDVGeo, childName=None): # check if this custom name has already been used if childName in self.children: - raise Error(f"Another child DVGeo has already been added with the name {childName}. Change the name of one of the child FFDs with the same name and try again.") + raise Error( + f"Another child DVGeo has already been added with the name {childName}. Change the name of one of the child FFDs with the same name and try again." + ) # Extract the coef from the child FFD and ref axis and embed # them into the parent and compute their derivatives @@ -2091,9 +2093,7 @@ def convertSensitivityToDict(self, dIdx, out1D=False, useCompositeNames=False): # Add in child portion for child in self.children.values(): - childdIdx = child.convertSensitivityToDict( - dIdx, out1D=out1D, useCompositeNames=useCompositeNames - ) + childdIdx = child.convertSensitivityToDict(dIdx, out1D=out1D, useCompositeNames=useCompositeNames) # update the total sensitivities with the derivatives from the child for key in childdIdx: if key in dIdxDict.keys(): From df2dbc5fa20ab5a7182e196df5c7529886e3a25f Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Sat, 8 Jul 2023 16:54:41 -0400 Subject: [PATCH 007/110] start dvgeomulti mods --- pygeo/parameterization/DVGeoMulti.py | 97 +++++++++++++++++++++++----- 1 file changed, 81 insertions(+), 16 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index eeec1d16..6c2eba5c 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -6,6 +6,8 @@ from mpi4py import MPI import numpy as np from scipy import sparse +from scipy.spatial.distance import cdist +from copy import deepcopy try: # External modules @@ -48,18 +50,20 @@ class DVGeometryMulti: """ - def __init__(self, comm=MPI.COMM_WORLD, checkDVs=True, debug=False, isComplex=False): + def __init__(self, comm=MPI.COMM_WORLD, fillet=False, checkDVs=True, debug=False, isComplex=False): # Check to make sure pySurf is installed before initializing - if not pysurfInstalled: - raise ImportError("pySurf is not installed and is required to use DVGeometryMulti.") + if not pysurfInstalled and not fillet: + raise ImportError("pySurf is not installed and is required to use DVGeometryMulti outside of fillet mode.") self.compNames = [] self.comps = OrderedDict() self.DVGeoDict = OrderedDict() self.points = OrderedDict() - self.comm = comm self.updated = {} self.intersectComps = [] + + self.comm = comm + self.fillet = fillet self.checkDVs = checkDVs self.debug = debug self.complex = isComplex @@ -72,7 +76,7 @@ def __init__(self, comm=MPI.COMM_WORLD, checkDVs=True, debug=False, isComplex=Fa self.dtype = float self.adtAPI = adtAPI.adtapi - def addComponent(self, comp, DVGeo, triMesh=None, scale=1.0, bbox=None, pointSetKwargs=None): + def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, bbox=None, pointSetKwargs=None): """ Method to add components to the DVGeometryMulti object. @@ -81,12 +85,17 @@ def addComponent(self, comp, DVGeo, triMesh=None, scale=1.0, bbox=None, pointSet comp : str The name of the component. - DVGeo : DVGeometry + DVGeo : DVGeometry, optional The DVGeometry object defining the component FFD. + This is needed in all cases except when the component is a fillet. triMesh : str, optional Path to the triangulated mesh file for this component. + points: str, optional + Path to the .dat file of points for this component. + This unstructured data is only valid for a fillet component or its adjacent surfaces. + scale : float, optional A multiplicative scaling factor applied to the triangulated mesh coordinates. Useful for when the scales of the triangulated and CFD meshes do not match. @@ -100,6 +109,14 @@ def addComponent(self, comp, DVGeo, triMesh=None, scale=1.0, bbox=None, pointSet Keyword arguments to be passed to the component addPointSet call for the triangulated mesh. """ + if DVGeo is None and self.fillet is False: + raise Error("DVGeo must be assigned for non-fillet DVGeoMulti components") + + if self.fillet is False and points is not None: + raise Error("Unstructured point data is only valid for fillet DVGeoMulti") + + # if self.fillet is True and triMesh is not None: + # this should work with a triangulated surface it just isn't necessary # Assign mutable defaults if bbox is None: @@ -107,21 +124,35 @@ def addComponent(self, comp, DVGeo, triMesh=None, scale=1.0, bbox=None, pointSet if pointSetKwargs is None: pointSetKwargs = {} - if triMesh is not None: - # We also need to read the triMesh and save the points - nodes, triConn, triConnStack, barsConn = self._readCGNSFile(triMesh) + # we have a fillet so no structured surfaces are necessary + if self.fillet: + # save unstructured point data + surfPts = self._readDATFile(points, surf=True) # scale the nodes - nodes *= scale + surfPts *= scale # add these points to the corresponding dvgeo - DVGeo.addPointSet(nodes, "triMesh", **pointSetKwargs) + if DVGeo is not None: + DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) + + # we have a standard intersection group which has structured surfaces else: - # the user has not provided a triangulated surface mesh for this file - nodes = None - triConn = None - triConnStack = None - barsConn = None + if triMesh is not None: + # We also need to read the triMesh and save the points + nodes, triConn, triConnStack, barsConn = self._readCGNSFile(triMesh) + + # scale the nodes + nodes *= scale + + # add these points to the corresponding dvgeo + DVGeo.addPointSet(nodes, "triMesh", **pointSetKwargs) + else: + # the user has not provided a triangulated surface mesh for this file + nodes = None + triConn = None + triConnStack = None + barsConn = None # we will need the bounding box information later on, so save this here xMin, xMax = DVGeo.FFD.getBounds() @@ -865,6 +896,26 @@ def _readCGNSFile(self, filename): return nodes, triConn, triConnStack, barsConn + def _readDATFile(self, filename, surf=True): + if surf: + surfFile = open(filename, "r") + nElem = int(surfFile.readline()) + surfPts = np.loadtxt(filename, skiprows=1, max_rows=nElem) + + points = surfPts[surfPts[:, 0].argsort()] + + else: + curves = [] + for f in filename: + curvePts = np.loadtxt(f, skiprows=1) + curves.append(curvePts) + + points = curves[0] + for i in range(1, len(filename)): + points = np.vstack((points, points[i])) + + return points + def _computeTotalJacobian(self, ptSetName): """ This routine computes the total jacobian. It takes the jacobians @@ -935,6 +986,20 @@ def updateTriMesh(self): self.nodes = self.DVGeo.update("triMesh") +class filletComp: + def __init__(self, name, nodes, xMin, xMax, DVGeo=None): + self.name = name + self.nodes = nodes + + self.xMin = xMin + self.xMax = xMax + + if DVGeo is not None: + self.dvDict = {} + + def updatePoints(self): + self.nodes = self.DVGeo.update("name") + class PointSet: def __init__(self, points, comm): self.points = points From 02f46b1bfd31270d7af8d6e8460608189265bf6a Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 12 Jul 2023 13:30:34 -0400 Subject: [PATCH 008/110] fillet intersection class --- pygeo/parameterization/DVGeoMulti.py | 140 ++++++++++++++++----------- 1 file changed, 81 insertions(+), 59 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 6c2eba5c..1b5af4f5 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -114,7 +114,13 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b if self.fillet is False and points is not None: raise Error("Unstructured point data is only valid for fillet DVGeoMulti") - + + # determine whether this component is a fillet or a normal surface + if DVGeo is None: + filletComp = True + else: + filletComp = False + # if self.fillet is True and triMesh is not None: # this should work with a triangulated surface it just isn't necessary @@ -136,6 +142,11 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b if DVGeo is not None: DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) + nodes = surfPts + triConn = None + triConnStack = None + barsConn = None + # we have a standard intersection group which has structured surfaces else: if triMesh is not None: @@ -172,7 +183,7 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b xMax[2] = bbox["zmax"] # initialize the component object - self.comps[comp] = component(comp, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax) + self.comps[comp] = component(comp, DVGeo, nodes, filletComp, triConn, triConnStack, barsConn, xMin, xMax) # add the name to the list self.compNames.append(comp) @@ -184,6 +195,7 @@ def addIntersection( self, compA, compB, + filletComp=None, dStarA=0.2, dStarB=0.2, featureCurves=None, @@ -274,6 +286,9 @@ def addIntersection( """ + if filletComp is not None and not self.fillet: + print("no") + # Assign mutable defaults if featureCurves is None: featureCurves = [] @@ -289,6 +304,7 @@ def addIntersection( CompIntersection( compA, compB, + filletComp, dStarA, dStarB, featureCurves, @@ -328,7 +344,7 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, or when getting the derivatives of the coordinates. compNames : list, optional A list of component names that this point set should be added to. - To ease bookkeepping, an empty point set with ptName will be added to components not in this list. + To ease bookkeeping, an empty point set with ptName will be added to components not in this list. If a list is not provided, this point set is added to all components. comm : MPI.IntraComm, optional Comm that is associated with the added point set. Does not @@ -961,10 +977,11 @@ def _computeTotalJacobian(self, ptSetName): class component: - def __init__(self, name, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax): + def __init__(self, name, DVGeo, fillet, nodes, triConn, triConnStack, barsConn, xMin, xMax): # save the info self.name = name self.DVGeo = DVGeo + self.fillet = fillet self.nodes = nodes self.triConn = triConn self.triConnStack = triConnStack @@ -982,23 +999,14 @@ def __init__(self, name, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xM self.triMesh = True def updateTriMesh(self): - # update the triangulated surface mesh - self.nodes = self.DVGeo.update("triMesh") - - -class filletComp: - def __init__(self, name, nodes, xMin, xMax, DVGeo=None): - self.name = name - self.nodes = nodes + if self.fillet: + pointset = self.name + else: + pointset = "trimesh" - self.xMin = xMin - self.xMax = xMax + # update the triangulated surface mesh + self.nodes = self.DVGeo.update(pointset) - if DVGeo is not None: - self.dvDict = {} - - def updatePoints(self): - self.nodes = self.DVGeo.update("name") class PointSet: def __init__(self, points, comm): @@ -1009,7 +1017,48 @@ def __init__(self, points, comm): self.comm = comm -class CompIntersection: +class Intersection: + def __init__(self, dtype, compA, compB): + self.dtype = dtype + self.compA = compA + self.compB = compB + + def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): + """ + This function warps points using the displacements from curve projections. + + pts0: The original surface point coordinates. + ptsNew: Updated surface pt coordinates. We will add the warped delta to these inplace. + indices: Indices of the points that we will use for this operation. + curvePtCoords: Original coordinates of points on curves. + delta: Displacements of the points on curves after projecting them. + + """ + + # Return if curvePtCoords is empty + if not np.any(curvePtCoords): + return + + for j in indices: + # point coordinates with the baseline design + # this is the point we will warp + ptCoords = pts0[j] + + # Vectorized point-based warping + rr = ptCoords - curvePtCoords + LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) + LdefoDist3 = LdefoDist**3 + Wi = LdefoDist3 + den = np.sum(Wi) + interp = np.zeros(3, dtype=self.dtype) + for iDim in range(3): + interp[iDim] = np.sum(Wi * delta[:, iDim]) / den + + # finally, update the coord in place + ptsNew[j] = ptsNew[j] + interp + + +class CompIntersection(Intersection): def __init__( self, compA, @@ -1040,6 +1089,11 @@ def __init__( See the documentation for ``addIntersection`` in DVGeometryMulti for the API. """ + # names of compA and compB must be provided + componentA = DVGeo.comps[compA] + componentB = DVGeo.comps[compB] + + super.__init__(dtype, componentA, componentB) # same communicator with DVGeo self.comm = DVGeo.comm @@ -1057,7 +1111,6 @@ def __init__( self.debug = debug # Set real or complex Fortran APIs - self.dtype = dtype if dtype == float: self.adtAPI = adtAPI.adtapi self.curveSearchAPI = curveSearchAPI.curvesearchapi @@ -1106,10 +1159,6 @@ def __init__( self.surfIdxA = {} self.surfIdxB = {} - # names of compA and compB must be provided - self.compA = DVGeo.comps[compA] - self.compB = DVGeo.comps[compB] - self.dStarA = dStarA self.dStarB = dStarB self.points = OrderedDict() @@ -2242,40 +2291,6 @@ def _commCurveProj(self, pts, indices, comm): return nptsg, sizes, curvePtCoords - def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): - """ - This function warps points using the displacements from curve projections. - - pts0: The original surface point coordinates. - ptsNew: Updated surface pt coordinates. We will add the warped delta to these inplace. - indices: Indices of the points that we will use for this operation. - curvePtCoords: Original coordinates of points on curves. - delta: Displacements of the points on curves after projecting them. - - """ - - # Return if curvePtCoords is empty - if not np.any(curvePtCoords): - return - - for j in indices: - # point coordinates with the baseline design - # this is the point we will warp - ptCoords = pts0[j] - - # Vectorized point-based warping - rr = ptCoords - curvePtCoords - LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) - LdefoDist3 = LdefoDist**3 - Wi = LdefoDist3 - den = np.sum(Wi) - interp = np.zeros(3, dtype=self.dtype) - for iDim in range(3): - interp[iDim] = np.sum(Wi * delta[:, iDim]) / den - - # finally, update the coord in place - ptsNew[j] = ptsNew[j] + interp - def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # seeds for delta deltaBar = np.zeros((dIdPt.shape[0], curvePtCoords.shape[0], 3)) @@ -3250,3 +3265,10 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): self.projData[ptSetName][comp]["surfaceInd"][surface] = surfaceInd # Initialize a data dictionary for this surface self.projData[ptSetName][surface] = {} + + +class FilletIntersection(Intersection): + def __init__(self, compA, compB, filletComp): + self.compA = compA + self.compB = compB + self.filletComp = filletComp From a2260f0d967e1187f39f6bce0f58cdac71181706 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 12 Jul 2023 13:31:16 -0400 Subject: [PATCH 009/110] common warp --- pygeo/parameterization/DVGeoMulti.py | 62 ++++++++++++++-------------- 1 file changed, 32 insertions(+), 30 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 1b5af4f5..c82736e2 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1057,6 +1057,36 @@ def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): # finally, update the coord in place ptsNew[j] = ptsNew[j] + interp + def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): + # seeds for delta + deltaBar = np.zeros((dIdPt.shape[0], curvePtCoords.shape[0], 3)) + + # Return zeros if curvePtCoords is empty + if not np.any(curvePtCoords): + return deltaBar + + for k in range(dIdPt.shape[0]): + for j in indices: + # point coordinates with the baseline design + # this is the point we will warp + ptCoords = pts0[j] + + # local seed for 3 coords + localVal = dIdPt[k, j] + + # Vectorized point-based warping + rr = ptCoords - curvePtCoords + LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) + LdefoDist3 = LdefoDist**3 + Wi = LdefoDist3 + den = np.sum(Wi) + + for iDim in range(3): + deltaBar[k, :, iDim] += Wi * localVal[iDim] / den + + # return the seeds for the delta vector + return deltaBar + class CompIntersection(Intersection): def __init__( @@ -2291,36 +2321,6 @@ def _commCurveProj(self, pts, indices, comm): return nptsg, sizes, curvePtCoords - def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): - # seeds for delta - deltaBar = np.zeros((dIdPt.shape[0], curvePtCoords.shape[0], 3)) - - # Return zeros if curvePtCoords is empty - if not np.any(curvePtCoords): - return deltaBar - - for k in range(dIdPt.shape[0]): - for j in indices: - # point coordinates with the baseline design - # this is the point we will warp - ptCoords = pts0[j] - - # local seed for 3 coords - localVal = dIdPt[k, j] - - # Vectorized point-based warping - rr = ptCoords - curvePtCoords - LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) - LdefoDist3 = LdefoDist**3 - Wi = LdefoDist3 - den = np.sum(Wi) - - for iDim in range(3): - deltaBar[k, :, iDim] += Wi * localVal[iDim] / den - - # return the seeds for the delta vector - return deltaBar - def _projectToComponent(self, pts, comp, projDict, surface=None): # We build an ADT for this component using pySurf # Set bounding box for new tree @@ -3272,3 +3272,5 @@ def __init__(self, compA, compB, filletComp): self.compA = compA self.compB = compB self.filletComp = filletComp + + # def project(self): From 8b37009ef3c523a1ec58b59c9f833caedccd92fc Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 12 Jul 2023 17:41:01 -0400 Subject: [PATCH 010/110] --- pygeo/parameterization/DVGeoMulti.py | 34 ++++++++++++++++++++-------- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index c82736e2..51a17816 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1023,6 +1023,14 @@ def __init__(self, dtype, compA, compB): self.compA = compA self.compB = compB + def setSurface(self, comm): + """This set the new udpated surface on which we need to compute the new intersection curve""" + + # get the updated surface coordinates + self._getUpdatedCoords() + + self.seam = self._getIntersectionSeam(comm) + def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): """ This function warps points using the displacements from curve projections. @@ -1299,14 +1307,6 @@ def __init__( self.seam0 = self._getIntersectionSeam(self.comm, firstCall=True) self.seam = self.seam0.copy() - def setSurface(self, comm): - """This set the new udpated surface on which we need to compute the new intersection curve""" - - # get the updated surface coordinates - self._getUpdatedCoords() - - self.seam = self._getIntersectionSeam(comm) - def addPointSet(self, pts, ptSetName, compMap, comm): # Figure out which points this intersection object has to deal with @@ -3273,4 +3273,20 @@ def __init__(self, compA, compB, filletComp): self.compB = compB self.filletComp = filletComp - # def project(self): + def addPointSet(self, pts, ptSetName, comm): + nPt = len(pts) + + def update(self, ptSetName, delta): + pts = self.points[ptSetName].pts + + return delta + + def project(self): + self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) + + def _getUpdatedCoords(self): + self.compA.updatePoints() + self.compB.updatePoints() + + def _getIntersectionSeam(self, comm): + nPt = pts.shape[0] From 6ca059f9cdbca13b870019891fe1354a96fb4fbf Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 13 Jul 2023 10:28:35 -0400 Subject: [PATCH 011/110] fillet component class --- pygeo/parameterization/DVGeoMulti.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 51a17816..1e421957 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1008,6 +1008,14 @@ def updateTriMesh(self): self.nodes = self.DVGeo.update(pointset) +class Fillet: + def __init__(self, nodes, compA, compB): + self.name = "fillet" + self.nodes = nodes + self.compA = compA + self.compB = compB + + class PointSet: def __init__(self, points, comm): self.points = points From da851b6b942d6b93ccffadd30e407125a9ac988a Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 13 Jul 2023 17:19:42 -0400 Subject: [PATCH 012/110] new component class --- pygeo/parameterization/DVGeoMulti.py | 59 ++++++++++++++++++++++------ 1 file changed, 48 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 1e421957..54e86675 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1007,13 +1007,18 @@ def updateTriMesh(self): # update the triangulated surface mesh self.nodes = self.DVGeo.update(pointset) +class Comp: + def __init__(self, name, fillet, surfPts, curvePts, DVGeo=None, tol=1e-3): + self.name = name + self.fillet = fillet + self.DVGeo = DVGeo + self.surfPts = surfPts + self.surfPtsOrig = deepcopy(surfPts) + self.curvePts = curvePts -class Fillet: - def __init__(self, nodes, compA, compB): - self.name = "fillet" - self.nodes = nodes - self.compA = compA - self.compB = compB + self.intersectPts = [] + self.intersectInd = [] + self.adjacentComponents = [] class PointSet: @@ -3281,20 +3286,52 @@ def __init__(self, compA, compB, filletComp): self.compB = compB self.filletComp = filletComp - def addPointSet(self, pts, ptSetName, comm): - nPt = len(pts) + def findIntersection(self, surf, curve): # TODO fix this function + nPtSurf = surf.shape[0] + minSurfCurveDist = -np.ones(nPtSurf) + intersectPts = [] + intersectInd = [] + + # check each point in surf + for i, surfPt in enumerate(surf): + surfPt = surf[i] + + # calculate distances between this surface point and the whole curve + ptSurfCurveDist = cdist(surfPt.reshape(1, 3), curve) + + # find minimum of these distances and save it + dist2ClosestPt = min(ptSurfCurveDist[0]) + minSurfCurveDist[i] = dist2ClosestPt + + # keep this as an intersection point if it is within tolerance + if dist2ClosestPt < self.tol: + intersectPts.append(surfPt) + intersectInd.append(i) + + intersectPts = np.asarray(intersectPts) + + return intersectPts, intersectInd def update(self, ptSetName, delta): pts = self.points[ptSetName].pts + return delta def project(self): + newCurveCoords = np.vstack((self.compA.intersectPts, self.compB.intersectPts)) + curvePtCoords = np.vstack((self.compA.intPtsOrig, self.compB.intPtsOrig)) + delta = newCurveCoords - curvePtCoords + + ptsNew = deepcopy(self.fillet.surfPtOrig) + ptsNew[self.fillet.compAIntInd] = self.compA.surf[self.intersectInd] + ptsNew[self.fillet.compBIntInd] = self.compB.surf[self.intersectInd] + + pts0 = self.fillet.surfPtOrig + indices = self.indices + self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) def _getUpdatedCoords(self): self.compA.updatePoints() self.compB.updatePoints() - - def _getIntersectionSeam(self, comm): - nPt = pts.shape[0] From de245fbe88ba6f88807edbf0e063b635f8fabce0 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 17 Jul 2023 09:56:14 -0400 Subject: [PATCH 013/110] fillet interface for intersection curve --- pygeo/parameterization/DVGeoMulti.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 54e86675..577047ab 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -324,6 +324,24 @@ def addIntersection( ) ) + def addCurve(self, compName, curveFiles): + if not self.fillet: + print("no") + + curvePts = self._readDATFile(curveFiles, surf=False) + + comp = self.comps[compName] + fillet = self.comps["fillet"] + intersection = self.intersectComps[fillet.intersection] + + filletIntCurve, filletIntInd = intersection.findIntersection(fillet.pts, curvePts) + compIntCurve, compIntInd = intersection.findIntersection(comp.pts, curvePts) + + fillet.intCurve = filletIntCurve + fillet.intInd = filletIntInd + comp.intCurve = compIntCurve + comp.intInd = compIntInd + def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" return self.DVGeoDict @@ -3281,7 +3299,7 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): - def __init__(self, compA, compB, filletComp): + def __init__(self, compA, compB, filletComp, ): self.compA = compA self.compB = compB self.filletComp = filletComp From b72f727d1b1922138bb92d7608529046afadd6e0 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 17 Jul 2023 11:51:19 -0400 Subject: [PATCH 014/110] can add component --- pygeo/parameterization/DVGeoMulti.py | 50 +++++++++++++++++----------- 1 file changed, 30 insertions(+), 20 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 577047ab..83a50086 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -137,12 +137,12 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b # scale the nodes surfPts *= scale + nodes = surfPts # add these points to the corresponding dvgeo if DVGeo is not None: DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) - nodes = surfPts triConn = None triConnStack = None barsConn = None @@ -165,22 +165,27 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b triConnStack = None barsConn = None - # we will need the bounding box information later on, so save this here - xMin, xMax = DVGeo.FFD.getBounds() - - # also we might want to modify the bounding box if the user specified any coordinates - if "xmin" in bbox: - xMin[0] = bbox["xmin"] - if "ymin" in bbox: - xMin[1] = bbox["ymin"] - if "zmin" in bbox: - xMin[2] = bbox["zmin"] - if "xmax" in bbox: - xMax[0] = bbox["xmax"] - if "ymax" in bbox: - xMax[1] = bbox["ymax"] - if "zmax" in bbox: - xMax[2] = bbox["zmax"] + if DVGeo is not None: + # we will need the bounding box information later on, so save this here + xMin, xMax = DVGeo.FFD.getBounds() + + # also we might want to modify the bounding box if the user specified any coordinates + if "xmin" in bbox: + xMin[0] = bbox["xmin"] + if "ymin" in bbox: + xMin[1] = bbox["ymin"] + if "zmin" in bbox: + xMin[2] = bbox["zmin"] + if "xmax" in bbox: + xMax[0] = bbox["xmax"] + if "ymax" in bbox: + xMax[1] = bbox["ymax"] + if "zmax" in bbox: + xMax[2] = bbox["zmax"] + + else: + xMin = 3 * [0] + xMax = 3 * [0] # initialize the component object self.comps[comp] = component(comp, DVGeo, nodes, filletComp, triConn, triConnStack, barsConn, xMin, xMax) @@ -327,7 +332,7 @@ def addIntersection( def addCurve(self, compName, curveFiles): if not self.fillet: print("no") - + curvePts = self._readDATFile(curveFiles, surf=False) comp = self.comps[compName] @@ -1025,6 +1030,7 @@ def updateTriMesh(self): # update the triangulated surface mesh self.nodes = self.DVGeo.update(pointset) + class Comp: def __init__(self, name, fillet, surfPts, curvePts, DVGeo=None, tol=1e-3): self.name = name @@ -3299,7 +3305,12 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): - def __init__(self, compA, compB, filletComp, ): + def __init__( + self, + compA, + compB, + filletComp, + ): self.compA = compA self.compB = compB self.filletComp = filletComp @@ -3333,7 +3344,6 @@ def findIntersection(self, surf, curve): # TODO fix this function def update(self, ptSetName, delta): pts = self.points[ptSetName].pts - return delta def project(self): From 5d29cb3d4d7ebb35cb7fb3bdcd2ec95aa333ae9c Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 17 Jul 2023 15:15:09 -0400 Subject: [PATCH 015/110] clean up inits --- pygeo/parameterization/DVGeoMulti.py | 63 +++++++++++++++------------- 1 file changed, 35 insertions(+), 28 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 83a50086..a7af011f 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -165,6 +165,7 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b triConnStack = None barsConn = None + # get bounding box information if this component has a DVGeo (is not a fillet) if DVGeo is not None: # we will need the bounding box information later on, so save this here xMin, xMax = DVGeo.FFD.getBounds() @@ -183,6 +184,7 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b if "zmax" in bbox: xMax[2] = bbox["zmax"] + # dummy bounding box for fillet component else: xMin = 3 * [0] xMax = 3 * [0] @@ -291,22 +293,26 @@ def addIntersection( """ - if filletComp is not None and not self.fillet: - print("no") + # initialize a fillet intersection object + if self.fillet: + if filletComp is None: + print("no") - # Assign mutable defaults - if featureCurves is None: - featureCurves = [] - if curveEpsDict is None: - curveEpsDict = {} - if trackSurfaces is None: - trackSurfaces = {} - if excludeSurfaces is None: - excludeSurfaces = {} - - # just initialize the intersection object - self.intersectComps.append( - CompIntersection( + inter = FilletIntersection(compA, compB, filletComp, self, self.dtype) + + # initialize a standard intersection object + else: + # Assign mutable defaults + if featureCurves is None: + featureCurves = [] + if curveEpsDict is None: + curveEpsDict = {} + if trackSurfaces is None: + trackSurfaces = {} + if excludeSurfaces is None: + excludeSurfaces = {} + + inter = CompIntersection( compA, compB, filletComp, @@ -327,7 +333,8 @@ def addIntersection( self.debug, self.dtype, ) - ) + + self.intersectComps.append(inter) def addCurve(self, compName, curveFiles): if not self.fillet: @@ -1055,10 +1062,15 @@ def __init__(self, points, comm): class Intersection: - def __init__(self, dtype, compA, compB): + def __init__(self, dtype, compA, compB, DVGeo): self.dtype = dtype - self.compA = compA - self.compB = compB + + componentA = DVGeo.comps[compA] + componentB = DVGeo.comps[compB] + + self.compA = componentA + self.compB = componentB + self.DVGeo = DVGeo def setSurface(self, comm): """This set the new udpated surface on which we need to compute the new intersection curve""" @@ -3305,15 +3317,10 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): - def __init__( - self, - compA, - compB, - filletComp, - ): - self.compA = compA - self.compB = compB - self.filletComp = filletComp + def __init__(self, compA, compB, filletComp, DVGeo, dtype): + super().__init__(compA, compB, DVGeo, dtype) + + self.filletComp = DVGeo.comps[filletComp] def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] From 56c89ce0110a31f85b0899d3c36f0bb85774a491 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 17 Jul 2023 15:52:47 -0400 Subject: [PATCH 016/110] can add curves to components in intersection --- pygeo/parameterization/DVGeoMulti.py | 45 ++++++++++++---------------- 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index a7af011f..cf1664b6 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -146,6 +146,7 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b triConn = None triConnStack = None barsConn = None + component = Comp(comp, filletComp, nodes, DVGeo) # we have a standard intersection group which has structured surfaces else: @@ -165,8 +166,6 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b triConnStack = None barsConn = None - # get bounding box information if this component has a DVGeo (is not a fillet) - if DVGeo is not None: # we will need the bounding box information later on, so save this here xMin, xMax = DVGeo.FFD.getBounds() @@ -184,13 +183,10 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b if "zmax" in bbox: xMax[2] = bbox["zmax"] - # dummy bounding box for fillet component - else: - xMin = 3 * [0] - xMax = 3 * [0] + component = component(comp, DVGeo, nodes, filletComp, triConn, triConnStack, barsConn, xMin, xMax) # initialize the component object - self.comps[comp] = component(comp, DVGeo, nodes, filletComp, triConn, triConnStack, barsConn, xMin, xMax) + self.comps[comp] = component # add the name to the list self.compNames.append(comp) @@ -298,7 +294,7 @@ def addIntersection( if filletComp is None: print("no") - inter = FilletIntersection(compA, compB, filletComp, self, self.dtype) + inter = FilletIntersection(compA, compB, filletComp, distTol, self, self.dtype) # initialize a standard intersection object else: @@ -315,7 +311,6 @@ def addIntersection( inter = CompIntersection( compA, compB, - filletComp, dStarA, dStarB, featureCurves, @@ -344,10 +339,10 @@ def addCurve(self, compName, curveFiles): comp = self.comps[compName] fillet = self.comps["fillet"] - intersection = self.intersectComps[fillet.intersection] + intersection = fillet.intersection - filletIntCurve, filletIntInd = intersection.findIntersection(fillet.pts, curvePts) - compIntCurve, compIntInd = intersection.findIntersection(comp.pts, curvePts) + filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) + compIntCurve, compIntInd = intersection.findIntersection(comp.surfPts, curvePts) fillet.intCurve = filletIntCurve fillet.intInd = filletIntInd @@ -1045,8 +1040,8 @@ def __init__(self, name, fillet, surfPts, curvePts, DVGeo=None, tol=1e-3): self.DVGeo = DVGeo self.surfPts = surfPts self.surfPtsOrig = deepcopy(surfPts) - self.curvePts = curvePts + self.intersection = None self.intersectPts = [] self.intersectInd = [] self.adjacentComponents = [] @@ -1062,15 +1057,18 @@ def __init__(self, points, comm): class Intersection: - def __init__(self, dtype, compA, compB, DVGeo): - self.dtype = dtype - + def __init__(self, compA, compB, distTol, DVGeo, dtype): componentA = DVGeo.comps[compA] componentB = DVGeo.comps[compB] self.compA = componentA self.compB = componentB self.DVGeo = DVGeo + self.dtype = dtype + self.distTol = distTol + + # same communicator with DVGeo + self.comm = DVGeo.comm def setSurface(self, comm): """This set the new udpated surface on which we need to compute the new intersection curve""" @@ -1176,14 +1174,8 @@ def __init__( See the documentation for ``addIntersection`` in DVGeometryMulti for the API. """ - # names of compA and compB must be provided - componentA = DVGeo.comps[compA] - componentB = DVGeo.comps[compB] - super.__init__(dtype, componentA, componentB) - - # same communicator with DVGeo - self.comm = DVGeo.comm + super.__init__(compA, compB, distTol, DVGeo, dtype) # define epsilon as a small value to prevent division by zero in the inverse distance computation self.eps = 1e-20 @@ -3317,10 +3309,11 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): - def __init__(self, compA, compB, filletComp, DVGeo, dtype): - super().__init__(compA, compB, DVGeo, dtype) + def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype): + super().__init__(compA, compB, distTol, DVGeo, dtype) self.filletComp = DVGeo.comps[filletComp] + self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] @@ -3340,7 +3333,7 @@ def findIntersection(self, surf, curve): # TODO fix this function minSurfCurveDist[i] = dist2ClosestPt # keep this as an intersection point if it is within tolerance - if dist2ClosestPt < self.tol: + if dist2ClosestPt < self.distTol: intersectPts.append(surfPt) intersectInd.append(i) From f9ce47fd769051ea51f06c25ff6310053f72aaef Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 17 Jul 2023 16:55:26 -0400 Subject: [PATCH 017/110] distinguish between intersection curves on fillet --- pygeo/parameterization/DVGeoMulti.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index cf1664b6..625ed7ff 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -344,10 +344,10 @@ def addCurve(self, compName, curveFiles): filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) compIntCurve, compIntInd = intersection.findIntersection(comp.surfPts, curvePts) - fillet.intCurve = filletIntCurve - fillet.intInd = filletIntInd - comp.intCurve = compIntCurve - comp.intInd = compIntInd + fillet.intersectPts.update({compName: filletIntCurve}) + fillet.intersectInd.update({compName: filletIntInd}) + comp.intersectPts.update({"fillet": compIntCurve}) + comp.intersectInd.update({"fillet": compIntInd}) def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" @@ -953,7 +953,7 @@ def _readDATFile(self, filename, surf=True): points = curves[0] for i in range(1, len(filename)): - points = np.vstack((points, points[i])) + points = np.vstack((points, curves[i])) return points @@ -1042,9 +1042,8 @@ def __init__(self, name, fillet, surfPts, curvePts, DVGeo=None, tol=1e-3): self.surfPtsOrig = deepcopy(surfPts) self.intersection = None - self.intersectPts = [] - self.intersectInd = [] - self.adjacentComponents = [] + self.intersectPts = {} + self.intersectInd = {} class PointSet: From 2da39cd9b3c9258b8586ef03c331cb8b47dc687b Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 18 Jul 2023 12:50:53 -0400 Subject: [PATCH 018/110] add pointset to multi for fillet intersection component --- pygeo/parameterization/DVGeoMulti.py | 99 +++++++++++++++++----------- 1 file changed, 61 insertions(+), 38 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 625ed7ff..1c016ea1 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -8,6 +8,8 @@ from scipy import sparse from scipy.spatial.distance import cdist from copy import deepcopy +from pyspline.utils import closeTecplot, openTecplot, writeTecplot1D + try: # External modules @@ -146,7 +148,6 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b triConn = None triConnStack = None barsConn = None - component = Comp(comp, filletComp, nodes, DVGeo) # we have a standard intersection group which has structured surfaces else: @@ -166,6 +167,7 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b triConnStack = None barsConn = None + if not filletComp: # we will need the bounding box information later on, so save this here xMin, xMax = DVGeo.FFD.getBounds() @@ -182,8 +184,13 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b xMax[1] = bbox["ymax"] if "zmax" in bbox: xMax[2] = bbox["zmax"] + else: + xMin = xMax = 3 * [0] - component = component(comp, DVGeo, nodes, filletComp, triConn, triConnStack, barsConn, xMin, xMax) + if self.fillet: + component = Comp(comp, filletComp, nodes, DVGeo, xMin, xMax) + else: + component = component(comp, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax) # initialize the component object self.comps[comp] = component @@ -387,35 +394,36 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, # before we do anything, we need to create surface ADTs # for which the user provided triangulated meshes - for comp in compNames: - # check if we have a trimesh for this component - if self.comps[comp].triMesh: - # Now we build the ADT using pySurf - # Set bounding box for new tree - BBox = np.zeros((2, 3)) - useBBox = False - - # dummy connectivity data for quad elements since we have all tris - quadConn = np.zeros((0, 4)) - - # Compute set of nodal normals by taking the average normal of all - # elements surrounding the node. This allows the meshing algorithms, - # for instance, to march in an average direction near kinks. - nodal_normals = self.adtAPI.adtcomputenodalnormals( - self.comps[comp].nodes.T, self.comps[comp].triConnStack.T, quadConn.T - ) - self.comps[comp].nodal_normals = nodal_normals.T - - # Create new tree (the tree itself is stored in Fortran level) - self.adtAPI.adtbuildsurfaceadt( - self.comps[comp].nodes.T, - self.comps[comp].triConnStack.T, - quadConn.T, - BBox.T, - useBBox, - MPI.COMM_SELF.py2f(), - comp, - ) + if not self.fillet: + for comp in compNames: + # check if we have a trimesh for this component + if self.comps[comp].triMesh: + # Now we build the ADT using pySurf + # Set bounding box for new tree + BBox = np.zeros((2, 3)) + useBBox = False + + # dummy connectivity data for quad elements since we have all tris + quadConn = np.zeros((0, 4)) + + # Compute set of nodal normals by taking the average normal of all + # elements surrounding the node. This allows the meshing algorithms, + # for instance, to march in an average direction near kinks. + nodal_normals = self.adtAPI.adtcomputenodalnormals( + self.comps[comp].nodes.T, self.comps[comp].triConnStack.T, quadConn.T + ) + self.comps[comp].nodal_normals = nodal_normals.T + + # Create new tree (the tree itself is stored in Fortran level) + self.adtAPI.adtbuildsurfaceadt( + self.comps[comp].nodes.T, + self.comps[comp].triConnStack.T, + quadConn.T, + BBox.T, + useBBox, + MPI.COMM_SELF.py2f(), + comp, + ) # create the pointset class self.points[ptName] = PointSet(points, comm=comm) @@ -464,6 +472,8 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, # set a high initial distance dMin2 = 1e10 + # TODO need to skip this or have some alternate version of assigning a point that's in 2 + # loop over the components for comp in compNames: # check if this component is in the projList @@ -511,7 +521,7 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, ) # using the mapping array, add the pointsets to respective DVGeo objects - for comp in self.compNames: + for comp in compNames: compMap = self.points[ptName].compMap[comp] self.comps[comp].DVGeo.addPointSet(points[compMap], ptName, **kwargs) @@ -522,9 +532,10 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, IC.addPointSet(points, ptName, self.points[ptName].compMap, comm) # finally, we can deallocate the ADTs - for comp in compNames: - if self.comps[comp].triMesh: - self.adtAPI.adtdeallocateadts(comp) + if not self.fillet: + for comp in compNames: + if self.comps[comp].triMesh: + self.adtAPI.adtdeallocateadts(comp) # mark this pointset as up to date self.updated[ptName] = False @@ -888,6 +899,10 @@ def getLocalIndex(self, iVol, comp): DVGeo = self.comps[comp].DVGeo return DVGeo.FFD.topo.lIndex[iVol].copy() + def writeCompSurf(self, compName, fileName): + comp = self.comps[compName] + comp.writeSurf(fileName) + # ---------------------------------------------------------------------- # THE REMAINDER OF THE FUNCTIONS NEED NOT BE CALLED BY THE USER # ---------------------------------------------------------------------- @@ -1002,11 +1017,10 @@ def _computeTotalJacobian(self, ptSetName): class component: - def __init__(self, name, DVGeo, fillet, nodes, triConn, triConnStack, barsConn, xMin, xMax): + def __init__(self, name, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax): # save the info self.name = name self.DVGeo = DVGeo - self.fillet = fillet self.nodes = nodes self.triConn = triConn self.triConnStack = triConnStack @@ -1034,17 +1048,26 @@ def updateTriMesh(self): class Comp: - def __init__(self, name, fillet, surfPts, curvePts, DVGeo=None, tol=1e-3): + def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, tol=1e-3): self.name = name self.fillet = fillet self.DVGeo = DVGeo self.surfPts = surfPts self.surfPtsOrig = deepcopy(surfPts) + self.xMin = xMin + self.xMax = xMax + self.intersection = None self.intersectPts = {} self.intersectInd = {} + def writeSurf(self, fileName): + fileName = f"{fileName}_{self.name}_surf.dat" + f = openTecplot(fileName, 3) + writeTecplot1D(f, self.name, self.surfPts) + closeTecplot(f) + class PointSet: def __init__(self, points, comm): From 98ae0efa87384a4a66bdd6c0ba44799bb50187de Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 18 Jul 2023 16:49:03 -0400 Subject: [PATCH 019/110] reworking component --- pygeo/parameterization/DVGeoMulti.py | 116 +++++++++++++++------------ 1 file changed, 64 insertions(+), 52 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 1c016ea1..5f6e76b9 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -52,20 +52,23 @@ class DVGeometryMulti: """ - def __init__(self, comm=MPI.COMM_WORLD, fillet=False, checkDVs=True, debug=False, isComplex=False): + def __init__(self, comm=MPI.COMM_WORLD, filletIntersection=False, checkDVs=True, debug=False, isComplex=False): # Check to make sure pySurf is installed before initializing - if not pysurfInstalled and not fillet: + if not pysurfInstalled and not filletIntersection: raise ImportError("pySurf is not installed and is required to use DVGeometryMulti outside of fillet mode.") self.compNames = [] self.comps = OrderedDict() + # separate out fillets to avoid checking whether components in comps have a DVGeo everywhere + self.filletNames = [] + self.fillets = OrderedDict() self.DVGeoDict = OrderedDict() self.points = OrderedDict() self.updated = {} self.intersectComps = [] self.comm = comm - self.fillet = fillet + self.filletIntersection = filletIntersection self.checkDVs = checkDVs self.debug = debug self.complex = isComplex @@ -111,10 +114,10 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b Keyword arguments to be passed to the component addPointSet call for the triangulated mesh. """ - if DVGeo is None and self.fillet is False: + if DVGeo is None and self.filletIntersection is False: raise Error("DVGeo must be assigned for non-fillet DVGeoMulti components") - if self.fillet is False and points is not None: + if self.filletIntersection is False and points is not None: raise Error("Unstructured point data is only valid for fillet DVGeoMulti") # determine whether this component is a fillet or a normal surface @@ -123,7 +126,7 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b else: filletComp = False - # if self.fillet is True and triMesh is not None: + # if self.filletIntersection is True and triMesh is not None: # this should work with a triangulated surface it just isn't necessary # Assign mutable defaults @@ -132,8 +135,31 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b if pointSetKwargs is None: pointSetKwargs = {} + # fillets don't have a DVGeo to get a bounding box from and don't need it + if filletComp: + xMin = xMax = 3 * [0] + + # standard components need a bounding box to associate points with each FFD + else: + # we will need the bounding box information later on, so save this here + xMin, xMax = DVGeo.FFD.getBounds() + + # also we might want to modify the bounding box if the user specified any coordinates + if "xmin" in bbox: + xMin[0] = bbox["xmin"] + if "ymin" in bbox: + xMin[1] = bbox["ymin"] + if "zmin" in bbox: + xMin[2] = bbox["zmin"] + if "xmax" in bbox: + xMax[0] = bbox["xmax"] + if "ymax" in bbox: + xMax[1] = bbox["ymax"] + if "zmax" in bbox: + xMax[2] = bbox["zmax"] + # we have a fillet so no structured surfaces are necessary - if self.fillet: + if self.filletIntersection: # save unstructured point data surfPts = self._readDATFile(points, surf=True) @@ -141,13 +167,13 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b surfPts *= scale nodes = surfPts - # add these points to the corresponding dvgeo - if DVGeo is not None: + # add these points to the corresponding dvgeo unless this component is a fillet + if not filletComp: DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) - triConn = None - triConnStack = None - barsConn = None + # initialize the component object + # a different class is used for fillets & their adjacent components + component = Comp(comp, filletComp, nodes, DVGeo, xMin, xMax) # we have a standard intersection group which has structured surfaces else: @@ -167,38 +193,19 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b triConnStack = None barsConn = None - if not filletComp: - # we will need the bounding box information later on, so save this here - xMin, xMax = DVGeo.FFD.getBounds() - - # also we might want to modify the bounding box if the user specified any coordinates - if "xmin" in bbox: - xMin[0] = bbox["xmin"] - if "ymin" in bbox: - xMin[1] = bbox["ymin"] - if "zmin" in bbox: - xMin[2] = bbox["zmin"] - if "xmax" in bbox: - xMax[0] = bbox["xmax"] - if "ymax" in bbox: - xMax[1] = bbox["ymax"] - if "zmax" in bbox: - xMax[2] = bbox["zmax"] - else: - xMin = xMax = 3 * [0] - - if self.fillet: - component = Comp(comp, filletComp, nodes, DVGeo, xMin, xMax) - else: + # initialize the component object component = component(comp, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax) - # initialize the component object - self.comps[comp] = component - - # add the name to the list - self.compNames.append(comp) + # add component object to the dictionary and list keeping track of components + # if this component is a fillet (no DVGeo) put in a separate list to avoid unnecessary checks for a DVGeo later + if filletComp: + self.fillets[comp] = component + self.filletNames.append(comp) + else: + self.comps[comp] = component + self.compNames.append(comp) - # also save the DVGeometry pointer in the dictionary we pass back + # also save the DVGeometry pointer in the dictionary we pass back (fillet entry will be None) self.DVGeoDict[comp] = DVGeo def addIntersection( @@ -297,7 +304,7 @@ def addIntersection( """ # initialize a fillet intersection object - if self.fillet: + if self.filletIntersection: if filletComp is None: print("no") @@ -339,7 +346,7 @@ def addIntersection( self.intersectComps.append(inter) def addCurve(self, compName, curveFiles): - if not self.fillet: + if not self.filletIntersection: print("no") curvePts = self._readDATFile(curveFiles, surf=False) @@ -394,7 +401,7 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, # before we do anything, we need to create surface ADTs # for which the user provided triangulated meshes - if not self.fillet: + if not self.filletIntersection: for comp in compNames: # check if we have a trimesh for this component if self.comps[comp].triMesh: @@ -524,6 +531,7 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, for comp in compNames: compMap = self.points[ptName].compMap[comp] self.comps[comp].DVGeo.addPointSet(points[compMap], ptName, **kwargs) + self.comps[comp].surfPtsName = ptName # check if this pointset will get the IC treatment if applyIC: @@ -532,7 +540,7 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, IC.addPointSet(points, ptName, self.points[ptName].compMap, comm) # finally, we can deallocate the ADTs - if not self.fillet: + if not self.filletIntersection: for comp in compNames: if self.comps[comp].triMesh: self.adtAPI.adtdeallocateadts(comp) @@ -1038,13 +1046,8 @@ def __init__(self, name, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xM self.triMesh = True def updateTriMesh(self): - if self.fillet: - pointset = self.name - else: - pointset = "trimesh" - # update the triangulated surface mesh - self.nodes = self.DVGeo.update(pointset) + self.nodes = self.DVGeo.update("trimesh") class Comp: @@ -1054,7 +1057,6 @@ def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, tol=1e-3): self.DVGeo = DVGeo self.surfPts = surfPts self.surfPtsOrig = deepcopy(surfPts) - self.xMin = xMin self.xMax = xMax @@ -1062,6 +1064,12 @@ def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, tol=1e-3): self.intersectPts = {} self.intersectInd = {} + def updateSurfPts(self): + if self.fillet: + print("no") + else: + self.surfPts = self.DVGeo.update("datPts") + def writeSurf(self, fileName): fileName = f"{fileName}_{self.name}_surf.dat" f = openTecplot(fileName, 3) @@ -3363,6 +3371,10 @@ def findIntersection(self, surf, curve): # TODO fix this function return intersectPts, intersectInd + def addPointSet(self, pts, ptSetName, compMap, comm): + # Save the affected indices and the factor in the little dictionary + self.points[ptSetName] = [pts.copy(), indices, factors, comm] + def update(self, ptSetName, delta): pts = self.points[ptSetName].pts From 9b8ae1a53405a90449ed9fde4ccb9912045189b1 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 20 Jul 2023 10:22:12 -0400 Subject: [PATCH 020/110] DV changes update all pointsets --- pygeo/parameterization/DVGeoMulti.py | 72 +++++++++++++++++++--------- 1 file changed, 50 insertions(+), 22 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 5f6e76b9..244041b2 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -60,7 +60,7 @@ def __init__(self, comm=MPI.COMM_WORLD, filletIntersection=False, checkDVs=True, self.compNames = [] self.comps = OrderedDict() # separate out fillets to avoid checking whether components in comps have a DVGeo everywhere - self.filletNames = [] + self.filletNames = [] self.fillets = OrderedDict() self.DVGeoDict = OrderedDict() self.points = OrderedDict() @@ -345,14 +345,14 @@ def addIntersection( self.intersectComps.append(inter) - def addCurve(self, compName, curveFiles): + def addCurve(self, compName, filletName, curveFiles): if not self.filletIntersection: print("no") curvePts = self._readDATFile(curveFiles, surf=False) comp = self.comps[compName] - fillet = self.comps["fillet"] + fillet = self.fillets[filletName] intersection = fillet.intersection filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) @@ -360,8 +360,8 @@ def addCurve(self, compName, curveFiles): fillet.intersectPts.update({compName: filletIntCurve}) fillet.intersectInd.update({compName: filletIntInd}) - comp.intersectPts.update({"fillet": compIntCurve}) - comp.intersectInd.update({"fillet": compIntInd}) + comp.intersectPts.update({filletName: compIntCurve}) + comp.intersectInd.update({filletName: compIntInd}) def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" @@ -623,12 +623,13 @@ def update(self, ptSetName, config=None): newPts = np.zeros((self.points[ptSetName].nPts, 3), dtype=self.dtype) # we first need to update all points with their respective DVGeo objects - for comp in self.compNames: - ptsComp = self.comps[comp].DVGeo.update(ptSetName) + for compName, comp in self.comps.items(): + if ptSetName in comp.DVGeo.ptSetNames: # TODO make this work with old Multi + ptsComp = comp.DVGeo.update(ptSetName) - # now save this info with the pointset mapping - ptMap = self.points[ptSetName].compMap[comp] - newPts[ptMap] = ptsComp + # now save this info with the pointset mapping + ptMap = self.points[ptSetName].compMap[compName] + newPts[ptMap] = ptsComp # get the delta delta = newPts - self.points[ptSetName].points @@ -911,6 +912,29 @@ def writeCompSurf(self, compName, fileName): comp = self.comps[compName] comp.writeSurf(fileName) + def writePointSet(self, name, fileName, solutionTime=None): + """ + Write a given point set to a tecplot file + + Parameters + ---------- + name : str + The name of the point set to write to a file + + fileName : str + Filename for tecplot file. Should have no extension, an + extension will be added + SolutionTime : float + Solution time to write to the file. This could be a fictitious time to + make visualization easier in tecplot. + """ + + coords = self.update(name) + fileName = fileName + "_%s.dat" % name + f = openTecplot(fileName, 3) + writeTecplot1D(f, name, coords, solutionTime) + closeTecplot(f) + # ---------------------------------------------------------------------- # THE REMAINDER OF THE FUNCTIONS NEED NOT BE CALLED BY THE USER # ---------------------------------------------------------------------- @@ -1087,7 +1111,7 @@ def __init__(self, points, comm): class Intersection: - def __init__(self, compA, compB, distTol, DVGeo, dtype): + def __init__(self, compA, compB, distTol, DVGeo, dtype, project): componentA = DVGeo.comps[compA] componentB = DVGeo.comps[compB] @@ -1100,6 +1124,11 @@ def __init__(self, compA, compB, distTol, DVGeo, dtype): # same communicator with DVGeo self.comm = DVGeo.comm + self.points = OrderedDict() + + # flag to determine if we want to project nodes after intersection treatment + self.projectFlag = project + def setSurface(self, comm): """This set the new udpated surface on which we need to compute the new intersection curve""" @@ -1205,7 +1234,7 @@ def __init__( """ - super.__init__(compA, compB, distTol, DVGeo, dtype) + super.__init__(compA, compB, distTol, DVGeo, dtype, project) # define epsilon as a small value to prevent division by zero in the inverse distance computation self.eps = 1e-20 @@ -1270,7 +1299,6 @@ def __init__( self.dStarA = dStarA self.dStarB = dStarB - self.points = OrderedDict() # Make surface names lowercase self.trackSurfaces = {} @@ -1356,9 +1384,6 @@ def __init__( self.distTol = distTol - # flag to determine if we want to project nodes after intersection treatment - self.projectFlag = project - # create the dictionary if we are projecting. if project: self.projData = {} @@ -3339,10 +3364,10 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): - def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype): - super().__init__(compA, compB, distTol, DVGeo, dtype) + def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True): + super().__init__(compA, compB, distTol, DVGeo, dtype, project) - self.filletComp = DVGeo.comps[filletComp] + self.filletComp = DVGeo.fillets[filletComp] self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self def findIntersection(self, surf, curve): # TODO fix this function @@ -3373,7 +3398,7 @@ def findIntersection(self, surf, curve): # TODO fix this function def addPointSet(self, pts, ptSetName, compMap, comm): # Save the affected indices and the factor in the little dictionary - self.points[ptSetName] = [pts.copy(), indices, factors, comm] + self.points[ptSetName] = [pts.copy(), [], [], comm] def update(self, ptSetName, delta): pts = self.points[ptSetName].pts @@ -3394,6 +3419,9 @@ def project(self): self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) + def _getIntersectionSeam(self, comm): + pass + def _getUpdatedCoords(self): - self.compA.updatePoints() - self.compB.updatePoints() + self.compA.updateSurfPts() + self.compB.updateSurfPts() From 99b5a858ac6417e25a16e25482dd6e846245601d Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 20 Jul 2023 14:57:02 -0400 Subject: [PATCH 021/110] warping fillet almost works probably --- pygeo/parameterization/DVGeoMulti.py | 51 ++++++++++++++++++++++------ 1 file changed, 40 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 244041b2..579ab419 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -358,11 +358,15 @@ def addCurve(self, compName, filletName, curveFiles): filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) compIntCurve, compIntInd = intersection.findIntersection(comp.surfPts, curvePts) - fillet.intersectPts.update({compName: filletIntCurve}) fillet.intersectInd.update({compName: filletIntInd}) - comp.intersectPts.update({filletName: compIntCurve}) comp.intersectInd.update({filletName: compIntInd}) + fillet.intersectPtsOrig.update({compName: filletIntCurve}) + comp.intersectPtsOrig.update({filletName: compIntCurve}) + + fillet.intersectPts.update({compName: filletIntCurve}) + comp.intersectPts.update({filletName: compIntCurve}) + def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" return self.DVGeoDict @@ -909,7 +913,10 @@ def getLocalIndex(self, iVol, comp): return DVGeo.FFD.topo.lIndex[iVol].copy() def writeCompSurf(self, compName, fileName): - comp = self.comps[compName] + if compName in self.compNames: + comp = self.comps[compName] + elif compName in self.filletNames: + comp = self.fillets[compName] comp.writeSurf(fileName) def writePointSet(self, name, fileName, solutionTime=None): @@ -1085,6 +1092,7 @@ def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, tol=1e-3): self.xMax = xMax self.intersection = None + self.intersectPtsOrig = {} self.intersectPts = {} self.intersectInd = {} @@ -3401,20 +3409,41 @@ def addPointSet(self, pts, ptSetName, compMap, comm): self.points[ptSetName] = [pts.copy(), [], [], comm] def update(self, ptSetName, delta): - pts = self.points[ptSetName].pts + n = self.filletComp.surfPtsOrig.shape[0] + indices = np.linspace(0, n - 1, n, dtype=int) + indices = np.delete( + indices, self.filletComp.intersectInd[self.compA.name] + self.filletComp.intersectInd[self.compB.name] + ) + self.indices = indices + + # make sure each component's subset of points on the intersection curve is up to date + self.compA.intersectPts[self.filletComp.name] = self.compA.surfPts[ + self.compA.intersectInd[self.filletComp.name] + ] + self.compB.intersectPts[self.filletComp.name] = self.compB.surfPts[ + self.compB.intersectInd[self.filletComp.name] + ] + # don't update the delta because we aren't remeshing return delta - def project(self): - newCurveCoords = np.vstack((self.compA.intersectPts, self.compB.intersectPts)) - curvePtCoords = np.vstack((self.compA.intPtsOrig, self.compB.intPtsOrig)) + def project(self, ptSetName, newPts): + # redo the delta because this is how the fillet was initially set up + # TODO maybe stop doing this + newCurveCoords = np.vstack( + (self.compA.intersectPts[self.filletComp.name], self.compB.intersectPts[self.filletComp.name]) + ) + curvePtCoords = np.vstack( + (self.compA.intersectPtsOrig[self.filletComp.name], self.compB.intersectPtsOrig[self.filletComp.name]) + ) delta = newCurveCoords - curvePtCoords - ptsNew = deepcopy(self.fillet.surfPtOrig) - ptsNew[self.fillet.compAIntInd] = self.compA.surf[self.intersectInd] - ptsNew[self.fillet.compBIntInd] = self.compB.surf[self.intersectInd] + # modify the intersection curves of the fillet + ptsNew = deepcopy(self.filletComp.surfPtsOrig) + ptsNew[self.filletComp.intersectInd[self.compA.name]] = self.compA.intersectPts[self.filletComp.name] + ptsNew[self.filletComp.intersectInd[self.compB.name]] = self.compB.intersectPts[self.filletComp.name] - pts0 = self.fillet.surfPtOrig + pts0 = self.filletComp.surfPtsOrig indices = self.indices self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) From 03e3cabf829f204a46253c4b01d7ed4ccb15ee86 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 20 Jul 2023 14:57:38 -0400 Subject: [PATCH 022/110] okay yeah that would help. fillet is WARPED now --- pygeo/parameterization/DVGeoMulti.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 579ab419..2f2f4cf4 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3448,6 +3448,8 @@ def project(self, ptSetName, newPts): self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) + self.filletComp.surfPts = ptsNew + def _getIntersectionSeam(self, comm): pass From bce13204d418bdc98c14ed106707e864e93f833c Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 29 Aug 2023 10:20:32 +0200 Subject: [PATCH 023/110] fixes for tests --- pygeo/constraints/DVCon.py | 36 +++++++++++---------------- pygeo/parameterization/DVGeo.py | 10 ++++---- tests/reg_tests/test_DVConstraints.py | 10 ++++---- 3 files changed, 25 insertions(+), 31 deletions(-) diff --git a/pygeo/constraints/DVCon.py b/pygeo/constraints/DVCon.py index 10c14b7e..f5d939fd 100644 --- a/pygeo/constraints/DVCon.py +++ b/pygeo/constraints/DVCon.py @@ -1842,7 +1842,7 @@ def addLeTeConstraints( indSetB=None, name=None, config=None, - childIdx=None, + childName=None, comp=None, DVGeoName="default", ): @@ -1911,10 +1911,8 @@ def addLeTeConstraints( The DVGeo configuration to apply this constraint to. Must be either None which will apply to *ALL* the local DV groups or a single string specifying a particular configuration. - childIdx : int - The zero-based index of the child FFD, if this constraint is being applied to a child FFD. - The index is defined by the order in which you add the child FFD to the parent. - For example, the first child FFD has an index of 0, the second an index of 1, and so on. + childName : str + Name of the child FFD, if this constraint is being applied to a child FFD. comp: str The component name if using DVGeometryMulti. @@ -1944,8 +1942,8 @@ def addLeTeConstraints( else: DVGeo = self.DVGeometries[DVGeoName].DVGeoDict[comp] - if childIdx is not None: - DVGeo = DVGeo.children[childIdx] + if childName is not None: + DVGeo = DVGeo.children[childName] # Now determine what type of specification we have: if volID is not None and faceID is not None: @@ -2041,7 +2039,7 @@ def addLinearConstraintsShape( upper=0, name=None, config=None, - childIdx=None, + childName=None, comp=None, DVGeoName="default", ): @@ -2095,10 +2093,8 @@ def addLinearConstraintsShape( The DVGeo configuration to apply this constraint to. Must be either None which will apply to *ALL* the local DV groups or a single string specifying a particular configuration. - childIdx : int - The zero-based index of the child FFD, if this constraint is being applied to a child FFD. - The index is defined by the order in which you add the child FFD to the parent. - For example, the first child FFD has an index of 0, the second an index of 1, and so on. + childName : str + Name of the child FFD, if this constraint is being applied to a child FFD. comp: str The component name if using DVGeometryMulti. @@ -2122,8 +2118,8 @@ def addLinearConstraintsShape( else: DVGeo = self.DVGeometries[DVGeoName].DVGeoDict[comp] - if childIdx is not None: - DVGeo = DVGeo.children[childIdx] + if childName is not None: + DVGeo = DVGeo.children[childName] if len(indSetA) != len(indSetB): raise Error("The length of the supplied indices are not " "the same length") @@ -3137,7 +3133,7 @@ def addCurvatureConstraint1D( ) def addMonotonicConstraints( - self, key, slope=1.0, name=None, start=0, stop=-1, config=None, childIdx=None, comp=None, DVGeoName="default" + self, key, slope=1.0, name=None, start=0, stop=-1, config=None, childName=None, comp=None, DVGeoName="default" ): """ Add monotonic constraints to a given design variable. @@ -3165,10 +3161,8 @@ def addMonotonicConstraints( The DVGeo configuration to apply this constraint to. Must be either None which will apply to *ALL* the local DV groups or a single string specifying a particular configuration. - childIdx : int - The zero-based index of the child FFD, if this constraint is being applied to a child FFD. - The index is defined by the order in which you add the child FFD to the parent. - For example, the first child FFD has an index of 0, the second an index of 1, and so on. + childName : str + Name of the child FFD, if this constraint is being applied to a child FFD. comp: str The component name if using DVGeometryMulti. @@ -3183,8 +3177,8 @@ def addMonotonicConstraints( else: DVGeo = self.DVGeometries[DVGeoName].DVGeoDict[comp] - if childIdx is not None: - DVGeo = DVGeo.children[childIdx] + if childName is not None: + DVGeo = DVGeo.children[childName] if name is None: conName = "%s_monotonic_constraint_%d" % (self.name, len(self.linearCon)) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 5048eb3a..68520c47 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2671,7 +2671,7 @@ def computeTotalJacobianCS(self, ptSetName, config=None): if self.nPts[ptSetName] is None: self.nPts[ptSetName] = len(self.update(ptSetName).flatten()) - for child in self.children: + for child in self.children.values(): child.nPts[ptSetName] = self.nPts[ptSetName] DVGlobalCount, DVLocalCount, DVSecLocCount, DVSpanLocCount = self._getDVOffsets() @@ -3826,7 +3826,7 @@ def computeTotalJacobianFD(self, ptSetName, config=None): if self.nPts[ptSetName] is None: self.nPts[ptSetName] = len(coords0.flatten()) - for child in self.children: + for child in self.children.values(): child.nPts[ptSetName] = self.nPts[ptSetName] DVGlobalCount, DVLocalCount, DVSecLocCount, DVSpanLocCount = self._getDVOffsets() @@ -4439,7 +4439,7 @@ def checkDerivatives(self, ptSetName): print("Computing Analytic Jacobian...") self.zeroJacobians(ptSetName) - for child in self.children: + for child in self.children.values(): child.zeroJacobians(ptSetName) self.computeTotalJacobian(ptSetName) @@ -4580,7 +4580,7 @@ def checkDerivatives(self, ptSetName): DVCountSpanLoc += 1 self.DV_listSpanwiseLocal[key].value[j] = refVal - for child in self.children: + for child in self.children.values(): child.checkDerivatives(ptSetName) def printDesignVariables(self): @@ -4602,7 +4602,7 @@ def printDesignVariables(self): for i in range(self.DV_listSectionLocal[dsl].nVal): print("%20.15f" % (self.DV_listSectionLocal[dsl].value[i])) - for child in self.children: + for child in self.children.values(): child.printDesignVariables() def sectionFrame(self, sectionIndex, sectionTransform, sectionLink, ivol=0, orient0=None, orient2="svd"): diff --git a/tests/reg_tests/test_DVConstraints.py b/tests/reg_tests/test_DVConstraints.py index 4599ba44..54cb2f57 100644 --- a/tests/reg_tests/test_DVConstraints.py +++ b/tests/reg_tests/test_DVConstraints.py @@ -474,8 +474,8 @@ def test_LeTe(self, train=False, refDeriv=False): DVGeo, DVCon = self.generate_dvgeo_dvcon("c172") if self.child: - DVCon.addLeTeConstraints(0, "iLow", childIdx=0) - DVCon.addLeTeConstraints(0, "iHigh", childIdx=0) + DVCon.addLeTeConstraints(0, "iLow", childName="child0") + DVCon.addLeTeConstraints(0, "iHigh", childName="child0") elif self.multi: DVCon.addLeTeConstraints(0, "iLow", comp="deforming") DVCon.addLeTeConstraints(0, "iHigh", comp="deforming") @@ -669,7 +669,7 @@ def test_linearConstraintShape(self, train=False, refDeriv=False): indSetB.append(lIndex[i, 0, 1]) if self.child: DVCon.addLinearConstraintsShape( - indSetA, indSetB, factorA=1.0, factorB=-1.0, lower=0, upper=0, childIdx=0 + indSetA, indSetB, factorA=1.0, factorB=-1.0, lower=0, upper=0, childName="child0" ) elif self.multi: DVCon.addLinearConstraintsShape( @@ -791,8 +791,8 @@ def test_monotonic(self, train=False, refDeriv=False): DVGeo, DVCon = self.generate_dvgeo_dvcon("c172") if self.child: - DVCon.addMonotonicConstraints("twist", childIdx=0) - DVCon.addMonotonicConstraints("twist", start=1, stop=2, childIdx=0) + DVCon.addMonotonicConstraints("twist", childName="child0") + DVCon.addMonotonicConstraints("twist", start=1, stop=2, childName="child0") elif self.multi: DVCon.addMonotonicConstraints("twist", comp="deforming") DVCon.addMonotonicConstraints("twist", start=1, stop=2, comp="deforming") From 57204dc08497b6234c53f05cce6b4a2a6a4eb499 Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 29 Aug 2023 10:35:27 +0200 Subject: [PATCH 024/110] added docstring --- pygeo/parameterization/DVGeo.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 68520c47..a3cdf1e6 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -743,7 +743,18 @@ def coordXfer(coords, mode="fwd", applyDisplacement=True, **kwargs): coords_new = np.dot(coords_new, rot_mat.T) return coords_new - + activeChildren : list + List of names of the child FFDs that should be used with this pointset. + For example, lets say there are 3 child FFDs with names a, b, and c. + When a pointset is added to this DVGeo object, it will always be added + to the parent. Then, if the activeChildren argument is none, the pointset + will also be added to all 3 child FFDs. If activeChildren argument is ["a", "b"], + then the pointset will only be added to the children named "a" and "b", and not "c". + If activeChildren argument is an empty dictionary, i.e. [], the pointset wont be added + to any of the child FFDs. When a pointset is added to a child FFD, the changes in the + child FFD is added to the displacement of the pointset. If it is not added to a child, + the changes from that child is not included in this pointset. This is useful to + control the effect of different child FFDs on different pointsets. """ # compNames is only needed for DVGeometryMulti, so remove it if passed From a37410f81d71876be7d9962a94c5a4889e1f296b Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 29 Aug 2023 12:00:34 +0200 Subject: [PATCH 025/110] change mphys wrapper to use child names instead of indices --- pygeo/mphys/mphys_dvgeo.py | 84 ++++++++++++++++++-------------------- 1 file changed, 39 insertions(+), 45 deletions(-) diff --git a/pygeo/mphys/mphys_dvgeo.py b/pygeo/mphys/mphys_dvgeo.py index ea533314..0a73f8ae 100644 --- a/pygeo/mphys/mphys_dvgeo.py +++ b/pygeo/mphys/mphys_dvgeo.py @@ -125,7 +125,7 @@ def compute(self, inputs, outputs): # next time the jacvec product routine is called self.update_jac = True - def nom_addChild(self, ffd_file, DVGeoName=None): + def nom_addChild(self, ffd_file, DVGeoName=None, childName=None): # if we have multiple DVGeos use the one specified by name DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) @@ -137,12 +137,12 @@ def nom_addChild(self, ffd_file, DVGeoName=None): # Add child FFD child_ffd = DVGeometry(ffd_file, child=True) - DVGeo.addChild(child_ffd) + DVGeo.addChild(child_ffd, childName=childName) # Embed points from parent if not already done for pointSet in DVGeo.points: - if pointSet not in DVGeo.children[-1].points: - DVGeo.children[-1].addPointSet(DVGeo.points[pointSet], pointSet) + if pointSet not in child_ffd.points: + child_ffd.addPointSet(DVGeo.points[pointSet], pointSet) def nom_add_discipline_coords(self, discipline, points=None, DVGeoName=None): # TODO remove one of these methods to keep only one method to add pointsets @@ -167,11 +167,11 @@ def nom_addPointSet(self, points, ptName, add_output=True, DVGeoName=None, **kwa self.omPtSetList.append(ptName) if isinstance(DVGeo, DVGeometry): - for i in range(len(DVGeo.children)): + for child in DVGeo.children.values(): # Embed points from parent if not already done for pointSet in DVGeo.points: - if pointSet not in DVGeo.children[i].points: - DVGeo.children[i].addPointSet(DVGeo.points[pointSet], pointSet) + if pointSet not in child.points: + child.addPointSet(DVGeo.points[pointSet], pointSet) if add_output: # add an output to the om component @@ -182,7 +182,7 @@ def nom_add_point_dict(self, point_dict): for k, v in point_dict.items(): self.nom_addPointSet(v, k) - def nom_getDVGeo(self, childIdx=None, DVGeoName=None): + def nom_getDVGeo(self, childName=None, DVGeoName=None): """ Gets the DVGeometry object held in the geometry component so DVGeo methods can be called directly on it @@ -191,8 +191,8 @@ def nom_getDVGeo(self, childIdx=None, DVGeoName=None): DVGeoName : string, optional The name of the DVGeo to return, necessary if there are multiple DVGeo objects - childIdx : int, optional - The zero-based index of the child FFD, if you want a child DVGeo returned + childName : str, optional + Name of the child FFD, if you want a child DVGeo returned Returns ------- @@ -206,12 +206,12 @@ def nom_getDVGeo(self, childIdx=None, DVGeoName=None): DVGeo = self.DVGeos["defaultDVGeo"] # return the top level DVGeo - if childIdx is None: + if childName is None: return DVGeo # return a child DVGeo else: - return DVGeo.children[childIdx] + return DVGeo.children[childName] def nom_getDVCon(self): """ @@ -228,7 +228,7 @@ def nom_getDVCon(self): Wrapper for DVGeo functions """ - def nom_addGlobalDV(self, dvName, value, func, childIdx=None, isComposite=False, DVGeoName=None): + def nom_addGlobalDV(self, dvName, value, func, childName=None, isComposite=False, DVGeoName=None): """ Add a global design variable to the DVGeo object. This is a wrapper for the DVGeo.addGlobalDV method. @@ -243,10 +243,8 @@ def nom_addGlobalDV(self, dvName, value, func, childIdx=None, isComposite=False, func : python function See :meth:`addGlobalDV <.DVGeometry.addGlobalDV>` - childIdx : int, optional - The zero-based index of the child FFD, if this DV is for a child FFD. - The index is defined by the order in which you add the child FFD to the parent. - For example, the first child FFD has an index of 0, the second an index of 1, and so on. + childName : str, optional + Name of the child FFD, if this DV is for a child FFD. isComposite : bool, optional Whether this DV is to be included in the composite DVs, by default False @@ -268,10 +266,10 @@ def nom_addGlobalDV(self, dvName, value, func, childIdx=None, isComposite=False, raise RuntimeError(f"Only FFD-based DVGeo objects can use global DVs, not type: {type(DVGeo).__name__}") # call the dvgeo object and add this dv - if childIdx is None: + if childName is None: DVGeo.addGlobalDV(dvName, value, func) else: - DVGeo.children[childIdx].addGlobalDV(dvName, value, func) + DVGeo.children[childName].addGlobalDV(dvName, value, func) # define the input # When composite DVs are used, input is not required for the default DVs. Now the composite DVs are @@ -280,12 +278,12 @@ def nom_addGlobalDV(self, dvName, value, func, childIdx=None, isComposite=False, self.add_input(dvName, distributed=False, shape=len(np.atleast_1d(value))) # call the dvgeo object and add this dv - if childIdx is None: + if childName is None: DVGeo.addGlobalDV(dvName, value, func) else: - DVGeo.children[childIdx].addGlobalDV(dvName, value, func) + DVGeo.children[childName].addGlobalDV(dvName, value, func) - def nom_addLocalDV(self, dvName, axis="y", pointSelect=None, childIdx=None, isComposite=False, DVGeoName=None): + def nom_addLocalDV(self, dvName, axis="y", pointSelect=None, childName=None, isComposite=False, DVGeoName=None): # if we have multiple DVGeos use the one specified by name DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) @@ -293,10 +291,10 @@ def nom_addLocalDV(self, dvName, axis="y", pointSelect=None, childIdx=None, isCo if not isinstance(DVGeo, DVGeometry): raise RuntimeError(f"Only FFD-based DVGeo objects can use local DVs, not type: {type(DVGeo).__name__}") - if childIdx is None: + if childName is None: nVal = DVGeo.addLocalDV(dvName, axis=axis, pointSelect=pointSelect) else: - nVal = DVGeo.children[childIdx].addLocalDV(dvName, axis=axis, pointSelect=pointSelect) + nVal = DVGeo.children[childName].addLocalDV(dvName, axis=axis, pointSelect=pointSelect) # define the input # When composite DVs are used, input is not required for the default DVs. Now the composite DVs are @@ -309,7 +307,7 @@ def nom_addLocalSectionDV( self, dvName, secIndex, - childIdx=None, + childName=None, axis=1, pointSelect=None, volList=None, @@ -331,10 +329,8 @@ def nom_addLocalSectionDV( secIndex : char or list of chars See wrapped - childIdx : int, optional - The zero-based index of the child FFD, if this DV is for a child FFD - The index is defined by the order in which you add the child FFD to the parent - For example, the first child FFD has an index of 0, the second an index of 1, and so on + childName : str, optional + Name of the child FFD, if this DV is for a child FFD. axis : int, optional See wrapped @@ -378,11 +374,11 @@ def nom_addLocalSectionDV( ) # add the DV to a normal DVGeo - if childIdx is None: + if childName is None: nVal = DVGeo.addLocalSectionDV(dvName, secIndex, axis, pointSelect, volList, orient0, orient2, config) # add the DV to a child DVGeo else: - nVal = DVGeo.children[childIdx].addLocalSectionDV( + nVal = DVGeo.children[childName].addLocalSectionDV( dvName, secIndex, axis, @@ -397,7 +393,7 @@ def nom_addLocalSectionDV( self.add_input(dvName, distributed=False, shape=nVal) return nVal - def nom_addShapeFunctionDV(self, dvName, shapes, childIdx=None, config=None, DVGeoName=None): + def nom_addShapeFunctionDV(self, dvName, shapes, childName=None, config=None, DVGeoName=None): """ Add one or more local shape function design variables to the DVGeometry object Wrapper for :meth:`addShapeFunctionDV <.DVGeometry.addShapeFunctionDV>` @@ -411,10 +407,8 @@ def nom_addShapeFunctionDV(self, dvName, shapes, childIdx=None, config=None, DVG shapes : list of dictionaries, or a single dictionary See wrapped - childIdx : int, optional - The zero-based index of the child FFD, if this DV is for a child FFD - The index is defined by the order in which you add the child FFD to the parent - For example, the first child FFD has an index of 0, the second an index of 1, and so on + childName : str, optional + Name of the child FFD, if this DV is for a child FFD. config : str or list, optional See wrapped @@ -443,11 +437,11 @@ def nom_addShapeFunctionDV(self, dvName, shapes, childIdx=None, config=None, DVG ) # add the DV to a normal DVGeo - if childIdx is None: + if childName is None: nVal = DVGeo.addShapeFunctionDV(dvName, shapes, config) # add the DV to a child DVGeo else: - nVal = DVGeo.children[childIdx].addShapeFunctionDV(dvName, shapes, config) + nVal = DVGeo.children[childName].addShapeFunctionDV(dvName, shapes, config) # define the input self.add_input(dvName, distributed=False, shape=nVal) @@ -507,7 +501,7 @@ def nom_addESPVariable(self, desmptr_name, isComposite=False, DVGeoName=None, ** if not isComposite: self.add_input(desmptr_name, distributed=False, shape=val.shape, val=val) - def nom_addRefAxis(self, childIdx=None, DVGeoName=None, **kwargs): + def nom_addRefAxis(self, childName=None, DVGeoName=None, **kwargs): # if we have multiple DVGeos use the one specified by name DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) @@ -516,11 +510,11 @@ def nom_addRefAxis(self, childIdx=None, DVGeoName=None, **kwargs): raise RuntimeError(f"Only FFD-based DVGeo objects can use reference axes, not type: {type(DVGeo).__name__}") # add ref axis to this DVGeo - if childIdx is None: + if childName is None: return DVGeo.addRefAxis(**kwargs) # add ref axis to the specified child else: - return DVGeo.children[childIdx].addRefAxis(**kwargs) + return DVGeo.children[childName].addRefAxis(**kwargs) """ Wrapper for DVCon functions @@ -616,8 +610,8 @@ def nom_addProjectedAreaConstraint(self, name, axis, scaled=True, surface_name=" self.DVCon.addProjectedAreaConstraint(axis, name=name, scaled=scaled, surfaceName=surface_name) self.add_output(name, distributed=False, val=1.0) - def nom_add_LETEConstraint(self, name, volID, faceID, topID=None, childIdx=None): - self.DVCon.addLeTeConstraints(volID, faceID, name=name, topID=topID, childIdx=childIdx) + def nom_add_LETEConstraint(self, name, volID, faceID, topID=None, childName=None): + self.DVCon.addLeTeConstraints(volID, faceID, name=name, topID=topID, childName=childName) # how many are there? conobj = self.DVCon.linearCon[name] nCon = len(conobj.indSetA) @@ -632,9 +626,9 @@ def nom_addCurvatureConstraint1D(self, name, start, end, nPts, axis, **kwargs): self.DVCon.addCurvatureConstraint1D(start=start, end=end, nPts=nPts, axis=axis, name=name, **kwargs) self.add_output(name, distributed=False, val=1.0) - def nom_addLinearConstraintsShape(self, name, indSetA, indSetB, factorA, factorB, childIdx=None): + def nom_addLinearConstraintsShape(self, name, indSetA, indSetB, factorA, factorB, childName=None): self.DVCon.addLinearConstraintsShape( - indSetA=indSetA, indSetB=indSetB, factorA=factorA, factorB=factorB, name=name, childIdx=childIdx + indSetA=indSetA, indSetB=indSetB, factorA=factorA, factorB=factorB, name=name, childName=childName ) lSize = len(indSetA) self.add_output(name, distributed=False, val=np.zeros(lSize), shape=lSize) From 4584ce2d6e0289a0cfbfd3b83b826829c87710a4 Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 29 Aug 2023 12:49:37 +0200 Subject: [PATCH 026/110] fix for the name check --- pygeo/parameterization/DVGeo.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index a3cdf1e6..71c10c1e 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -835,7 +835,15 @@ def addChild(self, childDVGeo, childName=None): if childDVGeo.isChild is False: raise Error("Trying to add a child FFD that has NOT been " "created as a child. This operation is illegal.") - # check if this custom name has already been used + # set the index + iChild = len(self.children) + childDVGeo.iChild = iChild + + # check if a custom name is provided, if not, we will use the old naming scheme based on the iChild index + if childName is None: + childName = f"child{iChild:d}" + + # check if this child name has already been used if childName in self.children: raise Error( f"Another child DVGeo has already been added with the name {childName}. Change the name of one of the child FFDs with the same name and try again." @@ -843,13 +851,6 @@ def addChild(self, childDVGeo, childName=None): # Extract the coef from the child FFD and ref axis and embed # them into the parent and compute their derivatives - iChild = len(self.children) - childDVGeo.iChild = iChild - - # check if a custom name is provided, if not, we will use the old naming scheme based on the iChild - if childName is None: - childName = f"child{iChild:d}" - self.FFD.attachPoints(childDVGeo.FFD.coef, f"{childName}_coef") self.FFD.calcdPtdCoef(f"{childName}_coef") From a1414982948f6981900fd393887e2adedde57a9f Mon Sep 17 00:00:00 2001 From: Anil Yildirim Date: Tue, 29 Aug 2023 12:49:53 +0200 Subject: [PATCH 027/110] added tests for active child argument --- tests/reg_tests/ref/test_active_children.ref | 302 +++++++++++++++++++ tests/reg_tests/test_DVGeometry.py | 66 ++++ 2 files changed, 368 insertions(+) create mode 100644 tests/reg_tests/ref/test_active_children.ref diff --git a/tests/reg_tests/ref/test_active_children.ref b/tests/reg_tests/ref/test_active_children.ref new file mode 100644 index 00000000..77cc25e1 --- /dev/null +++ b/tests/reg_tests/ref/test_active_children.ref @@ -0,0 +1,302 @@ +{ + "dIdx_testPoints1": { + "span1": { + "__ndarray__": [ + [ + 0.375 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.125 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + }, + "span2": { + "__ndarray__": [ + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + } + }, + "dIdx_testPoints2": { + "span1": { + "__ndarray__": [ + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + }, + "span2": { + "__ndarray__": [ + [ + 0.375 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.125 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + } + }, + "dIdx_testPointsAll": { + "span1": { + "__ndarray__": [ + [ + 0.375 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.125 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + }, + "span2": { + "__ndarray__": [ + [ + 0.375 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.125 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + } + }, + "dIdx_testPointsNone": { + "span1": { + "__ndarray__": [ + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + }, + "span2": { + "__ndarray__": [ + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ], + [ + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 6, + 1 + ] + } + }, + "new_coords_testPoints1": { + "__ndarray__": [ + [ + 0.6250000000000002, + 0.0, + 0.0 + ], + [ + -0.12499999999999997, + 0.0, + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 2, + 3 + ] + }, + "new_coords_testPoints2": { + "__ndarray__": [ + [ + 1.0000000000000004, + 0.0, + 0.0 + ], + [ + 1.3877787807814457e-16, + 0.0, + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 2, + 3 + ] + }, + "new_coords_testPointsAll": { + "__ndarray__": [ + [ + 1.3750000000000004, + 0.0, + 0.0 + ], + [ + 0.12500000000000014, + 0.0, + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 2, + 3 + ] + }, + "new_coords_testPointsNone": { + "__ndarray__": [ + [ + 0.2500000000000001, + 0.0, + 0.0 + ], + [ + -0.24999999999999997, + 0.0, + 0.0 + ] + ], + "dtype": "float64", + "shape": [ + 2, + 3 + ] + } +} \ No newline at end of file diff --git a/tests/reg_tests/test_DVGeometry.py b/tests/reg_tests/test_DVGeometry.py index 464b3c26..afc83a4c 100644 --- a/tests/reg_tests/test_DVGeometry.py +++ b/tests/reg_tests/test_DVGeometry.py @@ -1400,6 +1400,72 @@ def test_shape_functions(self, train=False, refDeriv=False): handler.root_add_val("new_pts", new_pts, rtol=1e-10, atol=1e-10) + def train_active_children(self, train=True): + self.test_active_children(train=train) + + def test_active_children(self, train=False): + """ + Test active children option for adding pointsets + """ + + refFile = os.path.join(self.base_path, "ref/test_active_children.ref") + with BaseRegTest(refFile, train=train) as handler: + handler.root_print("Test shape function DVs") + + DVGeo, DVGeoChild1 = commonUtils.setupDVGeo(self.base_path) + _, DVGeoChild2 = commonUtils.setupDVGeo(self.base_path) + + # add design variables + DVGeoChild1.addGlobalDV(dvName="span1", value=0.5, func=commonUtils.spanX, lower=0.1, upper=10, scale=1) + DVGeoChild2.addGlobalDV(dvName="span2", value=0.5, func=commonUtils.spanX, lower=0.1, upper=10, scale=1) + DVGeo.addChild(DVGeoChild1, "child1") + DVGeo.addChild(DVGeoChild2, "child2") + + points = np.zeros([2, 3]) + points[0, :] = [0.25, 0, 0] + points[1, :] = [-0.25, 0, 0] + + # first, all children active + ptName = "testPointsAll" + DVGeo.addPointSet(points, ptName) + + # only the first child + ptName = "testPoints1" + DVGeo.addPointSet(points, ptName, activeChildren=["child1"]) + + # only second + ptName = "testPoints2" + DVGeo.addPointSet(points, ptName, activeChildren=["child2"]) + + # no children + ptName = "testPointsNone" + DVGeo.addPointSet(points, ptName, activeChildren=[]) + + nPt = points.size + ptNames = ["testPointsAll", "testPoints1", "testPoints2", "testPointsNone"] + for ptName in ptNames: + # test derivatives + dIdPt = np.zeros([nPt, 2, 3]) + dIdPt[0, 0, 0] = 1.0 + dIdPt[1, 0, 1] = 1.0 + dIdPt[2, 0, 2] = 1.0 + dIdPt[3, 1, 0] = 1.0 + dIdPt[4, 1, 1] = 1.0 + dIdPt[5, 1, 2] = 1.0 + dIdx = DVGeo.totalSensitivity(dIdPt, ptName) + + handler.root_add_dict(f"dIdx_{ptName}", dIdx, rtol=1e-10, atol=1e-10) + + # perturb the DV and test point coordinates + xDV = {"span1": np.array([2.0]), "span2": np.array([3.0])} + DVGeo.setDesignVars(xDV) + + for ptName in ptNames: + # testPoints were added in the commonUtils.testSensitivities call + new_pts = DVGeo.update(ptName) + + handler.root_add_val(f"new_coords_{ptName}", new_pts, rtol=1e-10, atol=1e-10) + if __name__ == "__main__": unittest.main() From 2f7d5f282998e1467c59c4f45c00409ba1bbb63e Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 25 Sep 2023 14:38:24 -0400 Subject: [PATCH 028/110] families go to correct pointsets --- pygeo/parameterization/DVGeoMulti.py | 588 +++++++++++++++++++-------- 1 file changed, 419 insertions(+), 169 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 2f2f4cf4..61b7665e 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -52,10 +52,19 @@ class DVGeometryMulti: """ - def __init__(self, comm=MPI.COMM_WORLD, filletIntersection=False, checkDVs=True, debug=False, isComplex=False): + def __init__( + self, + comm=MPI.COMM_WORLD, + filletIntersection=False, + checkDVs=True, + debug=False, + isComplex=False, + ): # Check to make sure pySurf is installed before initializing if not pysurfInstalled and not filletIntersection: - raise ImportError("pySurf is not installed and is required to use DVGeometryMulti outside of fillet mode.") + raise ImportError( + "pySurf is not installed and is required to use DVGeometryMulti outside of fillet mode." + ) self.compNames = [] self.comps = OrderedDict() @@ -81,7 +90,16 @@ def __init__(self, comm=MPI.COMM_WORLD, filletIntersection=False, checkDVs=True, self.dtype = float self.adtAPI = adtAPI.adtapi - def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, bbox=None, pointSetKwargs=None): + def addComponent( + self, + comp, + DVGeo=None, + triMesh=None, + points=None, + scale=1.0, + bbox=None, + pointSetKwargs=None, + ): """ Method to add components to the DVGeometryMulti object. @@ -160,16 +178,20 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b # we have a fillet so no structured surfaces are necessary if self.filletIntersection: - # save unstructured point data - surfPts = self._readDATFile(points, surf=True) + if points is not None: + # save unstructured point data + surfPts = self._readDATFile(points, surf=True) - # scale the nodes - surfPts *= scale - nodes = surfPts + # scale the nodes + surfPts *= scale + nodes = surfPts + + # add these points to the corresponding dvgeo unless this component is a fillet + if not filletComp: + DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) - # add these points to the corresponding dvgeo unless this component is a fillet - if not filletComp: - DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) + else: + nodes = None # initialize the component object # a different class is used for fillets & their adjacent components @@ -194,7 +216,9 @@ def addComponent(self, comp, DVGeo=None, triMesh=None, points=None, scale=1.0, b barsConn = None # initialize the component object - component = component(comp, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax) + component = component( + comp, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax + ) # add component object to the dictionary and list keeping track of components # if this component is a fillet (no DVGeo) put in a separate list to avoid unnecessary checks for a DVGeo later @@ -308,7 +332,9 @@ def addIntersection( if filletComp is None: print("no") - inter = FilletIntersection(compA, compB, filletComp, distTol, self, self.dtype) + inter = FilletIntersection( + compA, compB, filletComp, distTol, self, self.dtype + ) # initialize a standard intersection object else: @@ -355,7 +381,9 @@ def addCurve(self, compName, filletName, curveFiles): fillet = self.fillets[filletName] intersection = fillet.intersection - filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) + filletIntCurve, filletIntInd = intersection.findIntersection( + fillet.surfPts, curvePts + ) compIntCurve, compIntInd = intersection.findIntersection(comp.surfPts, curvePts) fillet.intersectInd.update({compName: filletIntInd}) @@ -371,7 +399,16 @@ def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" return self.DVGeoDict - def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, **kwargs): + def addPointSet( + self, + points, + ptName, + familyName=None, + compNames=None, + comm=None, + applyIC=False, + **kwargs, + ): """ Add a set of coordinates to DVGeometryMulti. The is the main way that geometry, in the form of a coordinate list, is manipulated. @@ -421,7 +458,9 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, # elements surrounding the node. This allows the meshing algorithms, # for instance, to march in an average direction near kinks. nodal_normals = self.adtAPI.adtcomputenodalnormals( - self.comps[comp].nodes.T, self.comps[comp].triConnStack.T, quadConn.T + self.comps[comp].nodes.T, + self.comps[comp].triConnStack.T, + quadConn.T, ) self.comps[comp].nodal_normals = nodal_normals.T @@ -435,6 +474,8 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, MPI.COMM_SELF.py2f(), comp, ) + else: + compNames = [familyName] # create the pointset class self.points[ptName] = PointSet(points, comm=comm) @@ -444,98 +485,111 @@ def addPointSet(self, points, ptName, compNames=None, comm=None, applyIC=False, self.points[ptName].compMap[comp] = [] self.points[ptName].compMapFlat[comp] = [] - # we now need to create the component mapping information - for i in range(self.points[ptName].nPts): - # initial flags - inFFD = False - proj = False - projList = [] - - # loop over components and check if this point is in a single BBox - for comp in compNames: - # apply a small tolerance for the bounding box in case points are coincident with the FFD - boundTol = 1e-16 - xMin = self.comps[comp].xMin - xMax = self.comps[comp].xMax - xMin -= np.abs(xMin * boundTol) + boundTol - xMax += np.abs(xMax * boundTol) + boundTol - - # check if inside - if ( - xMin[0] < points[i, 0] < xMax[0] - and xMin[1] < points[i, 1] < xMax[1] - and xMin[2] < points[i, 2] < xMax[2] - ): - # add this component to the projection list - projList.append(comp) - - # this point was not inside any other FFD before - if not inFFD: - inFFD = True - inComp = comp - # this point was inside another FFD, so we need to project it... - else: - # set the projection flag - proj = True - - # project this point to components, we need to set inComp string - if proj: - # set a high initial distance - dMin2 = 1e10 - - # TODO need to skip this or have some alternate version of assigning a point that's in 2 - - # loop over the components + if not self.filletIntersection: + # we now need to create the component mapping information + for i in range(self.points[ptName].nPts): + # initial flags + inFFD = False + proj = False + projList = [] + + # loop over components and check if this point is in a single BBox for comp in compNames: - # check if this component is in the projList - if comp in projList: - # check if we have an ADT: - if self.comps[comp].triMesh: - # Initialize reference values (see explanation above) - numPts = 1 - dist2 = np.ones(numPts, dtype=self.dtype) * 1e10 - xyzProj = np.zeros((numPts, 3), dtype=self.dtype) - normProjNotNorm = np.zeros((numPts, 3), dtype=self.dtype) - - # Call projection function - _, _, _, _ = self.adtAPI.adtmindistancesearch( - points[i].T, comp, dist2, xyzProj.T, self.comps[comp].nodal_normals.T, normProjNotNorm.T - ) - - # if this is closer than the previous min, take this comp - if dist2 < dMin2: - dMin2 = dist2[0] - inComp = comp - + # apply a small tolerance for the bounding box in case points are coincident with the FFD + boundTol = 1e-16 + xMin = self.comps[comp].xMin + xMax = self.comps[comp].xMax + xMin -= np.abs(xMin * boundTol) + boundTol + xMax += np.abs(xMax * boundTol) + boundTol + + # check if inside + if ( + xMin[0] < points[i, 0] < xMax[0] + and xMin[1] < points[i, 1] < xMax[1] + and xMin[2] < points[i, 2] < xMax[2] + ): + # add this component to the projection list + projList.append(comp) + + # this point was not inside any other FFD before + if not inFFD: + inFFD = True + inComp = comp + # this point was inside another FFD, so we need to project it... else: - raise Error( - f"The point at (x, y, z) = ({points[i, 0]:.3f}, {points[i, 1]:.3f} {points[i, 2]:.3f})" - + f"in point set {ptName} is inside multiple FFDs but a triangulated mesh " - + f"for component {comp} is not provided to determine which component owns this point." - ) - - # this point was inside at least one FFD. If it was inside multiple, - # we projected it before to figure out which component it should belong to - if inFFD: - # we can add the point index to the list of points inComp owns - self.points[ptName].compMap[inComp].append(i) - - # also create a flattened version of the compMap - for j in range(3): - self.points[ptName].compMapFlat[inComp].append(3 * i + j) - - # this point is outside any FFD... - else: - raise Error( - f"The point at (x, y, z) = ({points[i, 0]:.3f}, {points[i, 1]:.3f} {points[i, 2]:.3f}) " - + f"in point set {ptName} is not inside any FFDs." - ) + # set the projection flag + proj = True + + # project this point to components, we need to set inComp string + if proj: + # set a high initial distance + dMin2 = 1e10 + + # TODO need to skip this or have some alternate version of assigning a point that's in 2 + + # loop over the components + for comp in compNames: + # check if this component is in the projList + if comp in projList: + # check if we have an ADT: + if self.comps[comp].triMesh: + # Initialize reference values (see explanation above) + numPts = 1 + dist2 = np.ones(numPts, dtype=self.dtype) * 1e10 + xyzProj = np.zeros((numPts, 3), dtype=self.dtype) + normProjNotNorm = np.zeros( + (numPts, 3), dtype=self.dtype + ) + + # Call projection function + _, _, _, _ = self.adtAPI.adtmindistancesearch( + points[i].T, + comp, + dist2, + xyzProj.T, + self.comps[comp].nodal_normals.T, + normProjNotNorm.T, + ) + + # if this is closer than the previous min, take this comp + if dist2 < dMin2: + dMin2 = dist2[0] + inComp = comp + + else: + raise Error( + f"The point at (x, y, z) = ({points[i, 0]:.3f}, {points[i, 1]:.3f} {points[i, 2]:.3f})" + + f"in point set {ptName} is inside multiple FFDs but a triangulated mesh " + + f"for component {comp} is not provided to determine which component owns this point." + ) + + # this point was inside at least one FFD. If it was inside multiple, + # we projected it before to figure out which component it should belong to + if inFFD: + # we can add the point index to the list of points inComp owns + self.points[ptName].compMap[inComp].append(i) + + # also create a flattened version of the compMap + for j in range(3): + self.points[ptName].compMapFlat[inComp].append(3 * i + j) + + # this point is outside any FFD... + else: + raise Error( + f"The point at (x, y, z) = ({points[i, 0]:.3f}, {points[i, 1]:.3f} {points[i, 2]:.3f}) " + + f"in point set {ptName} is not inside any FFDs." + ) # using the mapping array, add the pointsets to respective DVGeo objects for comp in compNames: - compMap = self.points[ptName].compMap[comp] - self.comps[comp].DVGeo.addPointSet(points[compMap], ptName, **kwargs) - self.comps[comp].surfPtsName = ptName + if comp != "fillet": + compMap = self.points[ptName].compMap[comp] + self.comps[comp].DVGeo.addPointSet(points[compMap], ptName, **kwargs) + self.comps[comp].surfPtsName = ptName + self.comps[comp].surfPts = points + else: + self.fillets[comp].surfPtsName = ptName + self.fillets[comp].surfPts = points # check if this pointset will get the IC treatment if applyIC: @@ -765,7 +819,9 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): for IC in self.intersectComps: if IC.projectFlag and ptSetName in IC.points: # initialize the seed contribution to the intersection seam and feature curves from project_b - IC.seamBarProj[ptSetName] = np.zeros((N, IC.seam0.shape[0], IC.seam0.shape[1])) + IC.seamBarProj[ptSetName] = np.zeros( + (N, IC.seam0.shape[0], IC.seam0.shape[1]) + ) # we pass in dIdpt and the intersection object, along with pointset information # the intersection object adjusts the entries corresponding to projected points @@ -819,7 +875,9 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we only do this if this component has at least one DV if nDVComp > 0: # this part of the sensitivity matrix is owned by this dvgeo - dIdxComp = DVGeo.convertSensitivityToDict(dIdx[:, dvOffset : dvOffset + nDVComp]) + dIdxComp = DVGeo.convertSensitivityToDict( + dIdx[:, dvOffset : dvOffset + nDVComp] + ) for k, v in dIdxComp.items(): dIdxDict[k] = v @@ -955,7 +1013,9 @@ def _readCGNSFile(self, filename): if self.comm.rank == 0: print(f"Reading file {filename}") # use the default routine in tsurftools - nodes, sectionDict = tsurf_tools.getCGNSsections(filename, comm=MPI.COMM_SELF) + nodes, sectionDict = tsurf_tools.getCGNSsections( + filename, comm=MPI.COMM_SELF + ) print("Finished reading the cgns file") # Convert the nodes to complex if necessary @@ -969,13 +1029,17 @@ def _readCGNSFile(self, filename): if "triaConnF" in sectionDict[part].keys(): # this is a surface, read the tri connectivities triConn[part.lower()] = sectionDict[part]["triaConnF"] - triConnStack = np.vstack((triConnStack, sectionDict[part]["triaConnF"])) + triConnStack = np.vstack( + (triConnStack, sectionDict[part]["triaConnF"]) + ) if "barsConn" in sectionDict[part].keys(): # this is a curve, save the curve connectivity barsConn[part.lower()] = sectionDict[part]["barsConn"] - print(f"The {filename} mesh has {len(nodes)} nodes and {len(triConnStack)} elements.") + print( + f"The {filename} mesh has {len(nodes)} nodes and {len(triConnStack)} elements." + ) else: # create these to recieve the data nodes = None @@ -1168,7 +1232,9 @@ def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): # Vectorized point-based warping rr = ptCoords - curvePtCoords - LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) + LdefoDist = 1.0 / np.sqrt( + rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16 + ) LdefoDist3 = LdefoDist**3 Wi = LdefoDist3 den = np.sum(Wi) @@ -1198,7 +1264,9 @@ def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # Vectorized point-based warping rr = ptCoords - curvePtCoords - LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) + LdefoDist = 1.0 / np.sqrt( + rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16 + ) LdefoDist3 = LdefoDist**3 Wi = LdefoDist3 den = np.sum(Wi) @@ -1316,7 +1384,9 @@ def __init__( self.excludeSurfaces = {} for k, v in excludeSurfaces.items(): if k.lower() in self.trackSurfaces: - raise Error(f"Surface {k} cannot be in both trackSurfaces and excludeSurfaces.") + raise Error( + f"Surface {k} cannot be in both trackSurfaces and excludeSurfaces." + ) self.excludeSurfaces[k.lower()] = v # Save anisotropy list @@ -1357,14 +1427,18 @@ def __init__( curveComp = self.compA self.curvesOnA.append(curveName) else: - raise Error(f"Curve {curveName} does not belong in {self.compA.name} or {self.compB.name}.") + raise Error( + f"Curve {curveName} does not belong in {self.compA.name} or {self.compB.name}." + ) # sort the feature curve newConn, newMap = tsurf_tools.FEsort(curveComp.barsConn[curveName].tolist()) # we only want to have a single curve if len(newConn) > 1: - raise Error(f"The curve {curveName} generated more than one curve with FESort.") + raise Error( + f"The curve {curveName} generated more than one curve with FESort." + ) # get the connectivity newConn = newConn[0] @@ -1382,7 +1456,10 @@ def __init__( msign = np.sign(marchDirs[ii]) # check if we need to flip - if msign * curveNodes[newConn[0][0]][mdir] > msign * curveNodes[newConn[0][1]][mdir]: + if ( + msign * curveNodes[newConn[0][0]][mdir] + > msign * curveNodes[newConn[0][1]][mdir] + ): # flip on both axes newConn = np.flip(newConn, axis=0) newConn = np.flip(newConn, axis=1) @@ -1433,7 +1510,13 @@ def addPointSet(self, pts, ptSetName, compMap, comm): elemIDs + 1 ) # (we need to do this separetely because Fortran will actively change elemIDs contents. self.curveSearchAPI.mindistancecurve( - pts.T, self.nodes0.T, self.conn0.T + 1, xyzProj.T, tanProj.T, dist2, elemIDs + pts.T, + self.nodes0.T, + self.conn0.T + 1, + xyzProj.T, + tanProj.T, + dist2, + elemIDs, ) # Adjust indices back to Python standards @@ -1483,16 +1566,22 @@ def addPointSet(self, pts, ptSetName, compMap, comm): # Associate points with the excluded surfaces for surface in self.excludeSurfaces: surfaceEps = self.excludeSurfaces[surface] - self.associatePointsToSurface(intersectPts, ptSetName, surface, surfaceEps) + self.associatePointsToSurface( + intersectPts, ptSetName, surface, surfaceEps + ) # Combine the excluded indices using a set to avoid duplicates excludeSet = set() for surface in self.excludeSurfaces: if surface in self.compA.triConn: # Pop this surface from the saved data - surfaceInd = self.projData[ptSetName]["compA"]["surfaceInd"].pop(surface) + surfaceInd = self.projData[ptSetName]["compA"][ + "surfaceInd" + ].pop(surface) elif surface in self.compB.triConn: - surfaceInd = self.projData[ptSetName]["compB"]["surfaceInd"].pop(surface) + surfaceInd = self.projData[ptSetName]["compB"][ + "surfaceInd" + ].pop(surface) excludeSet.update(surfaceInd) @@ -1545,11 +1634,15 @@ def addPointSet(self, pts, ptSetName, compMap, comm): elif surface in self.compB.triConn: compPoints = pts[indB] else: - raise Error(f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}.") + raise Error( + f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}." + ) # This proc has some points to project if len(compPoints) > 0: - self.associatePointsToSurface(compPoints, ptSetName, surface, surfaceEps) + self.associatePointsToSurface( + compPoints, ptSetName, surface, surfaceEps + ) # if we include the feature curves in the warping, we also need to project the added points to the intersection and feature curves and determine how the points map to the curves if self.incCurves: @@ -1579,7 +1672,13 @@ def addPointSet(self, pts, ptSetName, compMap, comm): elemIDs[:] = elemIDs + 1 # (we need to do this separetely because Fortran will actively change elemIDs contents. self.curveSearchAPI.mindistancecurve( - ptsToCurves.T, self.seam0.T, self.seamConn.T + 1, xyzProj.T, tanProj.T, dist2, elemIDs + ptsToCurves.T, + self.seam0.T, + self.seamConn.T + 1, + xyzProj.T, + tanProj.T, + dist2, + elemIDs, ) # Adjust indices back to Python standards @@ -1615,7 +1714,9 @@ def addPointSet(self, pts, ptSetName, compMap, comm): # this returns a bool array of indices that satisfy the conditions # we check for elemIDs because we projected to all curves at once - curveBool = np.all([d < eps, elemIDs >= seamBeg, elemIDs < seamEnd], axis=0) + curveBool = np.all( + [d < eps, elemIDs >= seamBeg, elemIDs < seamEnd], axis=0 + ) # get the indices of the points mapped to this element idxs = np.nonzero(curveBool) @@ -1664,7 +1765,9 @@ def addPointSet(self, pts, ptSetName, compMap, comm): idxs = self.curveProjIdx[ptSetName][curveName] # call the utility function - nPtsTotal, nPtsProcs, curvePtCoords = self._commCurveProj(pts, idxs, comm) + nPtsTotal, nPtsProcs, curvePtCoords = self._commCurveProj( + pts, idxs, comm + ) # save the displacements and points self.curvePtCounts[ptSetName][curveName] = nPtsProcs @@ -1699,7 +1802,9 @@ def update(self, ptSetName, delta): # 1) the mesh to be output for visualization # 2) the optimization to continue after raising a fail flag if self.comm.rank == 0: - print("The intersection topology has changed. The intersection will not be updated.") + print( + "The intersection topology has changed. The intersection will not be updated." + ) return delta # Get the two end points for the line elements @@ -1771,7 +1876,9 @@ def update(self, ptSetName, delta): interp = np.zeros(3, dtype=self.dtype) for iDim in range(3): # numerator gets two integrals with the delta components - num = np.sum((dr1[:, iDim] - dr0[:, iDim]) * eval2 + dr0[:, iDim] * eval1) + num = np.sum( + (dr1[:, iDim] - dr0[:, iDim]) * eval2 + dr0[:, iDim] * eval1 + ) # final result interp[iDim] = num / den @@ -1927,7 +2034,10 @@ def project(self, ptSetName, newPts): if self.debug: tecplot_interface.write_tecplot_scatter( - f"{curveName}_warped_pts.plt", "intersection", ["X", "Y", "Z"], ptsOnCurve + f"{curveName}_warped_pts.plt", + "intersection", + ["X", "Y", "Z"], + ptsOnCurve, ) # conn of the current curve @@ -1940,7 +2050,9 @@ def project(self, ptSetName, newPts): nPoints = ptsOnCurve.shape[0] if self.debug: - print(f"[{self.comm.rank}] curveName: {curveName}, nPoints on the fwd pass: {nPoints}") + print( + f"[{self.comm.rank}] curveName: {curveName}, nPoints on the fwd pass: {nPoints}" + ) # Initialize references if user provided none dist2 = np.ones(nPoints, dtype=self.dtype) * 1e10 @@ -1957,7 +2069,13 @@ def project(self, ptSetName, newPts): elemIDs + 1 ) # (we need to do this separetely because Fortran will actively change elemIDs contents. curveMask = self.curveSearchAPI.mindistancecurve( - ptsOnCurve.T, self.seam.T, curveConn.T + 1, xyzProj.T, tanProj.T, dist2, elemIDs + ptsOnCurve.T, + self.seam.T, + curveConn.T + 1, + xyzProj.T, + tanProj.T, + dist2, + elemIDs, ) # Adjust indices back to Python standards @@ -1979,7 +2097,10 @@ def project(self, ptSetName, newPts): if self.debug: tecplot_interface.write_tecplot_scatter( - f"{curveName}_projected_pts.plt", curveName, ["X", "Y", "Z"], xyzProj + f"{curveName}_projected_pts.plt", + curveName, + ["X", "Y", "Z"], + xyzProj, ) # update the point coordinates on this processor. @@ -1990,7 +2111,9 @@ def project(self, ptSetName, newPts): # communicate the deltas if comm: sizes = self.curvePtCounts[ptSetName][curveName] - disp = np.array([np.sum(sizes[:i]) for i in range(comm.size)], dtype="intc") + disp = np.array( + [np.sum(sizes[:i]) for i in range(comm.size)], dtype="intc" + ) # save these for grad comp self.curveProjData[ptSetName][curveName]["sizes"] = sizes @@ -2017,7 +2140,9 @@ def project(self, ptSetName, newPts): deltaGlobal = deltaLocal # also save the sizes and disp stuff as if we have one proc - self.curveProjData[ptSetName][curveName]["sizes"] = self.curvePtCounts[ptSetName][curveName] + self.curveProjData[ptSetName][curveName]["sizes"] = self.curvePtCounts[ + ptSetName + ][curveName] self.curveProjData[ptSetName][curveName]["disp"] = [0] # we only add the deltaLocal to deltaA if this curve is on compA, @@ -2046,10 +2171,22 @@ def project(self, ptSetName, newPts): # using the deltas from the previous project to curve step if flagA: - self._warpSurfPts(self.points[ptSetName][0], newPts, self.surfIdxA[ptSetName], curvePtCoordsA, deltaA) + self._warpSurfPts( + self.points[ptSetName][0], + newPts, + self.surfIdxA[ptSetName], + curvePtCoordsA, + deltaA, + ) if flagB: - self._warpSurfPts(self.points[ptSetName][0], newPts, self.surfIdxB[ptSetName], curvePtCoordsB, deltaB) + self._warpSurfPts( + self.points[ptSetName][0], + newPts, + self.surfIdxB[ptSetName], + curvePtCoordsB, + deltaB, + ) # save some info for the sens. computations self.curveProjData[ptSetName]["curvePtCoordsA"] = curvePtCoordsA @@ -2088,7 +2225,9 @@ def project(self, ptSetName, newPts): # Project remaining points to the component as a whole if indAComp: ptsA = newPts[indAComp] - newPts[indAComp] = self._projectToComponent(ptsA, self.compA, self.projData[ptSetName]["compA"]) + newPts[indAComp] = self._projectToComponent( + ptsA, self.compA, self.projData[ptSetName]["compA"] + ) # do the same for B if flagB: @@ -2105,7 +2244,9 @@ def project(self, ptSetName, newPts): if indBComp: ptsB = newPts[indBComp] - newPts[indBComp] = self._projectToComponent(ptsB, self.compB, self.projData[ptSetName]["compB"]) + newPts[indBComp] = self._projectToComponent( + ptsB, self.compB, self.projData[ptSetName]["compB"] + ) # Store component-wide indices for derivative computation self.projData[ptSetName]["compA"]["indAComp"] = indAComp @@ -2151,7 +2292,10 @@ def project_b(self, ptSetName, dIdpt, comm): # call the projection routine with the info # this returns the projected points and we use the same mapping to put them back in place dIdpt[:, indA], compSensA_temp = self._projectToComponent_b( - dIdptA, self.compA, self.projData[ptSetName][surface], surface=surface + dIdptA, + self.compA, + self.projData[ptSetName][surface], + surface=surface, ) # Accumulate triangulated mesh sensitivities @@ -2190,7 +2334,10 @@ def project_b(self, ptSetName, dIdpt, comm): indB = [self.projData[ptSetName]["compB"]["ind"][i] for i in surfaceInd] dIdptB = dIdpt[:, indB] dIdpt[:, indB], compSensB_temp = self._projectToComponent_b( - dIdptB, self.compB, self.projData[ptSetName][surface], surface=surface + dIdptB, + self.compB, + self.projData[ptSetName][surface], + surface=surface, ) for k, v in compSensB_temp.items(): @@ -2257,7 +2404,10 @@ def project_b(self, ptSetName, dIdpt, comm): # deltaA_b is the seed for the points projected to curves if flagA: deltaA_b_local = self._warpSurfPts_b( - dIdpt, self.points[ptSetName][0], self.surfIdxA[ptSetName], curvePtCoordsA + dIdpt, + self.points[ptSetName][0], + self.surfIdxA[ptSetName], + curvePtCoordsA, ) else: deltaA_b_local = np.zeros((N, nCurvePtCoordsAG, 3)) @@ -2265,7 +2415,10 @@ def project_b(self, ptSetName, dIdpt, comm): # do the same for comp B if flagB: deltaB_b_local = self._warpSurfPts_b( - dIdpt, self.points[ptSetName][0], self.surfIdxB[ptSetName], curvePtCoordsB + dIdpt, + self.points[ptSetName][0], + self.surfIdxB[ptSetName], + curvePtCoordsB, ) else: deltaB_b_local = np.zeros((N, nCurvePtCoordsBG, 3)) @@ -2448,12 +2601,20 @@ def _projectToComponent(self, pts, comp, projDict, surface=None): # Compute set of nodal normals by taking the average normal of all # elements surrounding the node. This allows the meshing algorithms, # for instance, to march in an average direction near kinks. - nodal_normals = self.adtAPI.adtcomputenodalnormals(comp.nodes.T, triConn.T, quadConn.T) + nodal_normals = self.adtAPI.adtcomputenodalnormals( + comp.nodes.T, triConn.T, quadConn.T + ) comp.nodal_normals = nodal_normals.T # Create new tree (the tree itself is stored in Fortran level) self.adtAPI.adtbuildsurfaceadt( - comp.nodes.T, triConn.T, quadConn.T, BBox.T, useBBox, MPI.COMM_SELF.py2f(), adtID + comp.nodes.T, + triConn.T, + quadConn.T, + BBox.T, + useBBox, + MPI.COMM_SELF.py2f(), + adtID, ) # project @@ -2463,7 +2624,9 @@ def _projectToComponent(self, pts, comp, projDict, surface=None): normProjNotNorm = np.zeros((numPts, 3), dtype=self.dtype) if self.debug: - print(f"[{self.comm.rank}] Projecting to component {comp.name}, pts.shape = {pts.shape}") + print( + f"[{self.comm.rank}] Projecting to component {comp.name}, pts.shape = {pts.shape}" + ) # Call projection function procID, elementType, elementID, uvw = self.adtAPI.adtmindistancesearch( @@ -2519,12 +2682,20 @@ def _projectToComponent_b(self, dIdpt, comp, projDict, surface=None): # Compute set of nodal normals by taking the average normal of all # elements surrounding the node. This allows the meshing algorithms, # for instance, to march in an average direction near kinks. - nodal_normals = self.adtAPI.adtcomputenodalnormals(comp.nodes.T, triConn.T, quadConn.T) + nodal_normals = self.adtAPI.adtcomputenodalnormals( + comp.nodes.T, triConn.T, quadConn.T + ) comp.nodal_normals = nodal_normals.T # Create new tree (the tree itself is stored in Fortran level) self.adtAPI.adtbuildsurfaceadt( - comp.nodes.T, triConn.T, quadConn.T, BBox.T, useBBox, MPI.COMM_SELF.py2f(), adtID + comp.nodes.T, + triConn.T, + quadConn.T, + BBox.T, + useBBox, + MPI.COMM_SELF.py2f(), + adtID, ) # also extract the projection data we have from the fwd pass @@ -2648,7 +2819,9 @@ def _getIntersectionSeam(self, comm, firstCall=False): self.seamDict["parentTria"] = parentTria else: - raise Error(f"The components {self.compA.name} and {self.compB.name} do not intersect.") + raise Error( + f"The components {self.compA.name} and {self.compB.name} do not intersect." + ) # Release memory used by Fortran self.intersectionAPI.releasememory() @@ -2666,7 +2839,9 @@ def _getIntersectionSeam(self, comm, firstCall=False): # we have multiple intersection curves but the user did not specify which direction to pick for i in range(len(newConn)): curvename = f"{self.compA.name}_{self.compB.name}_{i}" - tecplot_interface.writeTecplotFEdata(intNodes, newConn[i], curvename, curvename) + tecplot_interface.writeTecplotFEdata( + intNodes, newConn[i], curvename, curvename + ) raise Error( f"More than one intersection curve between comps {self.compA.name} and {self.compB.name}. " + "The curves are written as Tecplot files in the current directory. " @@ -2740,7 +2915,13 @@ def _getIntersectionSeam(self, comm, firstCall=False): elemIDs + 1 ) # (we need to do this separetely because Fortran will actively change elemIDs contents. curveMask = self.curveSearchAPI.mindistancecurve( - intNodesOrd.T, self.compB.nodes.T, curveConn.T + 1, xyzProj.T, tanProj.T, dist2, elemIDs + intNodesOrd.T, + self.compB.nodes.T, + curveConn.T + 1, + xyzProj.T, + tanProj.T, + dist2, + elemIDs, ) # Adjust indices back to Python standards @@ -2752,7 +2933,9 @@ def _getIntersectionSeam(self, comm, firstCall=False): self.seamDict[curveName]["xyzProj"] = xyzProj.copy() self.seamDict[curveName]["tanProj"] = tanProj.copy() self.seamDict[curveName]["dist2"] = dist2.copy() - self.seamDict[curveName]["projPtIndx"] = seamConn[:, 0][np.argmin(dist2)].copy() + self.seamDict[curveName]["projPtIndx"] = seamConn[:, 0][ + np.argmin(dist2) + ].copy() # now, find the index of the smallest distance breakList.append(np.argmin(dist2)) @@ -2793,7 +2976,9 @@ def _getIntersectionSeam(self, comm, firstCall=False): if ii == 1: # we need at least 2 features where the element number increases... # we need to reverse the order of our feature curves # and we will flip the elements too so keep track of this change - breakList = np.mod(seamConn.shape[0] - np.array(breakList), seamConn.shape[0]) + breakList = np.mod( + seamConn.shape[0] - np.array(breakList), seamConn.shape[0] + ) # and we need to invert the curves themselves seamConn = np.flip(seamConn, axis=0) @@ -2834,7 +3019,13 @@ def _getIntersectionSeam(self, comm, firstCall=False): # re-sample the curve (try linear for now), to get N number of nodes on it spaced linearly # Call Fortran code. Remember to adjust transposes and indices newCoor, newBarsConn = self.utilitiesAPI.remesh( - nNewNodes, coor.T, barsConn.T + 1, method, spacing, initialSpacing, finalSpacing + nNewNodes, + coor.T, + barsConn.T + 1, + method, + spacing, + initialSpacing, + finalSpacing, ) newCoor = newCoor.T newBarsConn = newBarsConn.T - 1 @@ -2866,7 +3057,9 @@ def _getIntersectionSeam(self, comm, firstCall=False): # Output the intersection curve if self.comm.rank == 0 and self.debug: curvename = f"{self.compA.name}_{self.compB.name}_{self.counter}" - tecplot_interface.writeTecplotFEdata(intNodes, seamConn, curvename, curvename) + tecplot_interface.writeTecplotFEdata( + intNodes, seamConn, curvename, curvename + ) # we need to re-mesh feature curves if the user wants... if self.incCurves: @@ -2902,7 +3095,9 @@ def _getIntersectionSeam(self, comm, firstCall=False): # save the original coordinate of the first point ptBegSave = self.compB.nodes[curveConn[elemBeg, 0]].copy() # and replace this with the starting point we want - self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[curveName].copy() + self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[ + curveName + ].copy() # compute the element lengths starting from elemBeg firstNodes = curveComp.nodes[curveConn[elemBeg:, 0]] @@ -2947,7 +3142,13 @@ def _getIntersectionSeam(self, comm, firstCall=False): elemIDs + 1 ) # (we need to do this separetely because Fortran will actively change elemIDs contents. curveMask = self.curveSearchAPI.mindistancecurve( - curvePts.T, self.nodes0.T, self.conn0.T + 1, xyzProj.T, tanProj.T, dist2, elemIDs + curvePts.T, + self.nodes0.T, + self.conn0.T + 1, + xyzProj.T, + tanProj.T, + dist2, + elemIDs, ) dNodes = np.sqrt(dist2) @@ -2972,7 +3173,9 @@ def _getIntersectionSeam(self, comm, firstCall=False): else: # figure out how many elements we need to go in this direction - elemEnd = (np.abs(cumDist - self.distFeature[curveName])).argmin() + elemBeg + elemEnd = ( + np.abs(cumDist - self.distFeature[curveName]) + ).argmin() + elemBeg # get the new connectivity data between the initial and final elements curveConnTrim = curveConn[elemBeg : elemEnd + 1] @@ -2989,7 +3192,13 @@ def _getIntersectionSeam(self, comm, firstCall=False): # now re-sample the curve (try linear for now), to get N number of nodes on it spaced linearly # Call Fortran code. Remember to adjust transposes and indices newCoor, newBarsConn = self.utilitiesAPI.remesh( - nNewNodes, coor.T, barsConn.T + 1, method, spacing, initialSpacing, finalSpacing + nNewNodes, + coor.T, + barsConn.T + 1, + method, + spacing, + initialSpacing, + finalSpacing, ) newCoor = newCoor.T newBarsConn = newBarsConn.T - 1 @@ -3018,7 +3227,13 @@ def _getIntersectionSeam(self, comm, firstCall=False): # now re-sample the curve (try linear for now), to get N number of nodes on it spaced linearly # Call Fortran code. Remember to adjust transposes and indices newCoor, newBarsConn = self.utilitiesAPI.remesh( - nNewNodesReverse, coor.T, barsConn.T + 1, method, spacing, initialSpacing, finalSpacing + nNewNodesReverse, + coor.T, + barsConn.T + 1, + method, + spacing, + initialSpacing, + finalSpacing, ) newCoor = newCoor.T newBarsConn = newBarsConn.T - 1 @@ -3044,14 +3259,19 @@ def _getIntersectionSeam(self, comm, firstCall=False): if firstCall: # save the beginning and end indices of these elements self.seamBeg[curveName] = ( - len(finalConn) + len(remeshedCurveConn) - (nNewNodes + nNewNodesReverse) + 2 + len(finalConn) + + len(remeshedCurveConn) + - (nNewNodes + nNewNodesReverse) + + 2 ) self.seamEnd[curveName] = len(finalConn) + len(remeshedCurveConn) # Output the feature curves if self.comm.rank == 0 and self.debug: curvename = f"featureCurves_{self.counter}" - tecplot_interface.writeTecplotFEdata(remeshedCurves, remeshedCurveConn, curvename, curvename) + tecplot_interface.writeTecplotFEdata( + remeshedCurves, remeshedCurveConn, curvename, curvename + ) # now we are done going over curves, # so we can append all the new curves to the "seam", @@ -3131,7 +3351,9 @@ def _getIntersectionSeam_b(self, seamBar, comm): # save the original coordinate of the first point ptBegSave = self.compB.nodes[curveConn[elemBeg, 0]].copy() # and replace this with the starting point we want - self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[curveName].copy() + self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[ + curveName + ].copy() # get the coordinates of points coor = curveComp.nodes @@ -3279,7 +3501,14 @@ def _getIntersectionSeam_b(self, seamBar, comm): # re-sample the curve (try linear for now), to get N number of nodes on it spaced linearly # Call Fortran code. Remember to adjust transposes and indices newCoor, newBarsConn, cb = self.utilitiesAPI.remesh_b( - nNewElems, coor.T, newCoorb.T, barsConn.T + 1, method, spacing, initialSpacing, finalSpacing + nNewElems, + coor.T, + newCoorb.T, + barsConn.T + 1, + method, + spacing, + initialSpacing, + finalSpacing, ) intNodesb[ii] += cb.T @@ -3350,18 +3579,25 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): self._projectToComponent(points, self.compB, projDict, surface=surface) comp = "compB" else: - raise Error(f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}.") + raise Error( + f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}." + ) # Identify the points that are within the given tolerance from this surface # surfaceInd contains indices of the provided points not the entire point set surfaceDist = np.sqrt(np.array(projDict["dist2"])) - surfaceInd = [ind for ind, value in enumerate(surfaceDist) if (value < surfaceEps)] + surfaceInd = [ + ind for ind, value in enumerate(surfaceDist) if (value < surfaceEps) + ] # Output the points associated with this surface if self.debug: data = [np.append(points[i], surfaceDist[i]) for i in surfaceInd] tecplot_interface.write_tecplot_scatter( - f"{surface}_points_{self.comm.rank}.plt", f"{surface}", ["X", "Y", "Z", "dist"], data + f"{surface}_points_{self.comm.rank}.plt", + f"{surface}", + ["X", "Y", "Z", "dist"], + data, ) # Save the indices only if there is at least one point @@ -3376,7 +3612,9 @@ def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True super().__init__(compA, compB, distTol, DVGeo, dtype, project) self.filletComp = DVGeo.fillets[filletComp] - self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self + self.compA.intersection = ( + self.compB.intersection + ) = self.filletComp.intersection = self def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] @@ -3412,7 +3650,9 @@ def update(self, ptSetName, delta): n = self.filletComp.surfPtsOrig.shape[0] indices = np.linspace(0, n - 1, n, dtype=int) indices = np.delete( - indices, self.filletComp.intersectInd[self.compA.name] + self.filletComp.intersectInd[self.compB.name] + indices, + self.filletComp.intersectInd[self.compA.name] + + self.filletComp.intersectInd[self.compB.name], ) self.indices = indices @@ -3431,17 +3671,27 @@ def project(self, ptSetName, newPts): # redo the delta because this is how the fillet was initially set up # TODO maybe stop doing this newCurveCoords = np.vstack( - (self.compA.intersectPts[self.filletComp.name], self.compB.intersectPts[self.filletComp.name]) + ( + self.compA.intersectPts[self.filletComp.name], + self.compB.intersectPts[self.filletComp.name], + ) ) curvePtCoords = np.vstack( - (self.compA.intersectPtsOrig[self.filletComp.name], self.compB.intersectPtsOrig[self.filletComp.name]) + ( + self.compA.intersectPtsOrig[self.filletComp.name], + self.compB.intersectPtsOrig[self.filletComp.name], + ) ) delta = newCurveCoords - curvePtCoords # modify the intersection curves of the fillet ptsNew = deepcopy(self.filletComp.surfPtsOrig) - ptsNew[self.filletComp.intersectInd[self.compA.name]] = self.compA.intersectPts[self.filletComp.name] - ptsNew[self.filletComp.intersectInd[self.compB.name]] = self.compB.intersectPts[self.filletComp.name] + ptsNew[self.filletComp.intersectInd[self.compA.name]] = self.compA.intersectPts[ + self.filletComp.name + ] + ptsNew[self.filletComp.intersectInd[self.compB.name]] = self.compB.intersectPts[ + self.filletComp.name + ] pts0 = self.filletComp.surfPtsOrig indices = self.indices From 1732402ed0ed2c427e485a7989b89c42923c687f Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 25 Sep 2023 17:34:59 -0400 Subject: [PATCH 029/110] debugging fillet warp --- pygeo/parameterization/DVGeoMulti.py | 319 +++++++++++---------------- 1 file changed, 131 insertions(+), 188 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 61b7665e..c32ff8a2 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -62,9 +62,7 @@ def __init__( ): # Check to make sure pySurf is installed before initializing if not pysurfInstalled and not filletIntersection: - raise ImportError( - "pySurf is not installed and is required to use DVGeometryMulti outside of fillet mode." - ) + raise ImportError("pySurf is not installed and is required to use DVGeometryMulti outside of fillet mode.") self.compNames = [] self.comps = OrderedDict() @@ -216,9 +214,7 @@ def addComponent( barsConn = None # initialize the component object - component = component( - comp, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax - ) + component = component(comp, DVGeo, nodes, triConn, triConnStack, barsConn, xMin, xMax) # add component object to the dictionary and list keeping track of components # if this component is a fillet (no DVGeo) put in a separate list to avoid unnecessary checks for a DVGeo later @@ -332,9 +328,7 @@ def addIntersection( if filletComp is None: print("no") - inter = FilletIntersection( - compA, compB, filletComp, distTol, self, self.dtype - ) + inter = FilletIntersection(compA, compB, filletComp, distTol, self, self.dtype) # initialize a standard intersection object else: @@ -381,11 +375,20 @@ def addCurve(self, compName, filletName, curveFiles): fillet = self.fillets[filletName] intersection = fillet.intersection - filletIntCurve, filletIntInd = intersection.findIntersection( - fillet.surfPts, curvePts - ) + filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) compIntCurve, compIntInd = intersection.findIntersection(comp.surfPts, curvePts) + lenF = len(filletIntInd) + lenC = len(compIntInd) + + # TODO this is very hacky. stop + if lenF < lenC: + lenC = lenF + filletIntInd = filletIntInd[: lenC or None] + compIntInd = compIntInd[: lenC or None] + filletIntCurve = filletIntCurve[:lenC, :] + compIntCurve = filletIntCurve[:lenC, :] + fillet.intersectInd.update({compName: filletIntInd}) comp.intersectInd.update({filletName: compIntInd}) @@ -406,7 +409,7 @@ def addPointSet( familyName=None, compNames=None, comm=None, - applyIC=False, + applyIC=True, **kwargs, ): """ @@ -537,9 +540,7 @@ def addPointSet( numPts = 1 dist2 = np.ones(numPts, dtype=self.dtype) * 1e10 xyzProj = np.zeros((numPts, 3), dtype=self.dtype) - normProjNotNorm = np.zeros( - (numPts, 3), dtype=self.dtype - ) + normProjNotNorm = np.zeros((numPts, 3), dtype=self.dtype) # Call projection function _, _, _, _ = self.adtAPI.adtmindistancesearch( @@ -581,15 +582,24 @@ def addPointSet( ) # using the mapping array, add the pointsets to respective DVGeo objects - for comp in compNames: - if comp != "fillet": + if not self.filletIntersection: + for comp in compNames: compMap = self.points[ptName].compMap[comp] self.comps[comp].DVGeo.addPointSet(points[compMap], ptName, **kwargs) self.comps[comp].surfPtsName = ptName self.comps[comp].surfPts = points - else: - self.fillets[comp].surfPtsName = ptName - self.fillets[comp].surfPts = points + + elif self.filletIntersection: + for comp in compNames: + if comp != "fillet": + self.comps[comp].DVGeo.addPointSet(points, ptName, **kwargs) + self.comps[comp].surfPtsName = ptName + self.comps[comp].surfPts = points + self.comps[comp].surfPtsOrig = points + else: + self.fillets[comp].surfPtsName = ptName + self.fillets[comp].surfPts = points + self.fillets[comp].surfPtsOrig = deepcopy(self.fillets[comp].surfPts) # check if this pointset will get the IC treatment if applyIC: @@ -686,8 +696,11 @@ def update(self, ptSetName, config=None): ptsComp = comp.DVGeo.update(ptSetName) # now save this info with the pointset mapping - ptMap = self.points[ptSetName].compMap[compName] - newPts[ptMap] = ptsComp + if not self.filletIntersection: + ptMap = self.points[ptSetName].compMap[compName] + newPts[ptMap] = ptsComp + else: + newPts = self.points[ptSetName].points # get the delta delta = newPts - self.points[ptSetName].points @@ -819,9 +832,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): for IC in self.intersectComps: if IC.projectFlag and ptSetName in IC.points: # initialize the seed contribution to the intersection seam and feature curves from project_b - IC.seamBarProj[ptSetName] = np.zeros( - (N, IC.seam0.shape[0], IC.seam0.shape[1]) - ) + IC.seamBarProj[ptSetName] = np.zeros((N, IC.seam0.shape[0], IC.seam0.shape[1])) # we pass in dIdpt and the intersection object, along with pointset information # the intersection object adjusts the entries corresponding to projected points @@ -875,9 +886,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we only do this if this component has at least one DV if nDVComp > 0: # this part of the sensitivity matrix is owned by this dvgeo - dIdxComp = DVGeo.convertSensitivityToDict( - dIdx[:, dvOffset : dvOffset + nDVComp] - ) + dIdxComp = DVGeo.convertSensitivityToDict(dIdx[:, dvOffset : dvOffset + nDVComp]) for k, v in dIdxComp.items(): dIdxDict[k] = v @@ -977,6 +986,13 @@ def writeCompSurf(self, compName, fileName): comp = self.fillets[compName] comp.writeSurf(fileName) + def writeCompCurve(self, compName, curveName, fileName): + if compName in self.compNames: + comp = self.comps[compName] + elif compName in self.filletNames: + comp = self.fillets[compName] + comp.writeCurve(curveName, fileName) + def writePointSet(self, name, fileName, solutionTime=None): """ Write a given point set to a tecplot file @@ -1013,9 +1029,7 @@ def _readCGNSFile(self, filename): if self.comm.rank == 0: print(f"Reading file {filename}") # use the default routine in tsurftools - nodes, sectionDict = tsurf_tools.getCGNSsections( - filename, comm=MPI.COMM_SELF - ) + nodes, sectionDict = tsurf_tools.getCGNSsections(filename, comm=MPI.COMM_SELF) print("Finished reading the cgns file") # Convert the nodes to complex if necessary @@ -1029,17 +1043,13 @@ def _readCGNSFile(self, filename): if "triaConnF" in sectionDict[part].keys(): # this is a surface, read the tri connectivities triConn[part.lower()] = sectionDict[part]["triaConnF"] - triConnStack = np.vstack( - (triConnStack, sectionDict[part]["triaConnF"]) - ) + triConnStack = np.vstack((triConnStack, sectionDict[part]["triaConnF"])) if "barsConn" in sectionDict[part].keys(): # this is a curve, save the curve connectivity barsConn[part.lower()] = sectionDict[part]["barsConn"] - print( - f"The {filename} mesh has {len(nodes)} nodes and {len(triConnStack)} elements." - ) + print(f"The {filename} mesh has {len(nodes)} nodes and {len(triConnStack)} elements.") else: # create these to recieve the data nodes = None @@ -1146,7 +1156,7 @@ def updateTriMesh(self): class Comp: - def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, tol=1e-3): + def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, surfPtsName=None, tol=1e-3): self.name = name self.fillet = fillet self.DVGeo = DVGeo @@ -1154,6 +1164,7 @@ def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, tol=1e-3): self.surfPtsOrig = deepcopy(surfPts) self.xMin = xMin self.xMax = xMax + self.surfPtsName = surfPtsName self.intersection = None self.intersectPtsOrig = {} @@ -1164,7 +1175,7 @@ def updateSurfPts(self): if self.fillet: print("no") else: - self.surfPts = self.DVGeo.update("datPts") + self.surfPts = self.DVGeo.update(self.surfPtsName) def writeSurf(self, fileName): fileName = f"{fileName}_{self.name}_surf.dat" @@ -1172,6 +1183,12 @@ def writeSurf(self, fileName): writeTecplot1D(f, self.name, self.surfPts) closeTecplot(f) + def writeCurve(self, inter, fileName): + fileName = f"{fileName}_{self.name}_curve.dat" + f = openTecplot(fileName, 3) + writeTecplot1D(f, self.name, self.intersectPts[inter]) + closeTecplot(f) + class PointSet: def __init__(self, points, comm): @@ -1232,9 +1249,7 @@ def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): # Vectorized point-based warping rr = ptCoords - curvePtCoords - LdefoDist = 1.0 / np.sqrt( - rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16 - ) + LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) LdefoDist3 = LdefoDist**3 Wi = LdefoDist3 den = np.sum(Wi) @@ -1264,9 +1279,7 @@ def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # Vectorized point-based warping rr = ptCoords - curvePtCoords - LdefoDist = 1.0 / np.sqrt( - rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16 - ) + LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) LdefoDist3 = LdefoDist**3 Wi = LdefoDist3 den = np.sum(Wi) @@ -1384,9 +1397,7 @@ def __init__( self.excludeSurfaces = {} for k, v in excludeSurfaces.items(): if k.lower() in self.trackSurfaces: - raise Error( - f"Surface {k} cannot be in both trackSurfaces and excludeSurfaces." - ) + raise Error(f"Surface {k} cannot be in both trackSurfaces and excludeSurfaces.") self.excludeSurfaces[k.lower()] = v # Save anisotropy list @@ -1427,18 +1438,14 @@ def __init__( curveComp = self.compA self.curvesOnA.append(curveName) else: - raise Error( - f"Curve {curveName} does not belong in {self.compA.name} or {self.compB.name}." - ) + raise Error(f"Curve {curveName} does not belong in {self.compA.name} or {self.compB.name}.") # sort the feature curve newConn, newMap = tsurf_tools.FEsort(curveComp.barsConn[curveName].tolist()) # we only want to have a single curve if len(newConn) > 1: - raise Error( - f"The curve {curveName} generated more than one curve with FESort." - ) + raise Error(f"The curve {curveName} generated more than one curve with FESort.") # get the connectivity newConn = newConn[0] @@ -1456,10 +1463,7 @@ def __init__( msign = np.sign(marchDirs[ii]) # check if we need to flip - if ( - msign * curveNodes[newConn[0][0]][mdir] - > msign * curveNodes[newConn[0][1]][mdir] - ): + if msign * curveNodes[newConn[0][0]][mdir] > msign * curveNodes[newConn[0][1]][mdir]: # flip on both axes newConn = np.flip(newConn, axis=0) newConn = np.flip(newConn, axis=1) @@ -1566,22 +1570,16 @@ def addPointSet(self, pts, ptSetName, compMap, comm): # Associate points with the excluded surfaces for surface in self.excludeSurfaces: surfaceEps = self.excludeSurfaces[surface] - self.associatePointsToSurface( - intersectPts, ptSetName, surface, surfaceEps - ) + self.associatePointsToSurface(intersectPts, ptSetName, surface, surfaceEps) # Combine the excluded indices using a set to avoid duplicates excludeSet = set() for surface in self.excludeSurfaces: if surface in self.compA.triConn: # Pop this surface from the saved data - surfaceInd = self.projData[ptSetName]["compA"][ - "surfaceInd" - ].pop(surface) + surfaceInd = self.projData[ptSetName]["compA"]["surfaceInd"].pop(surface) elif surface in self.compB.triConn: - surfaceInd = self.projData[ptSetName]["compB"][ - "surfaceInd" - ].pop(surface) + surfaceInd = self.projData[ptSetName]["compB"]["surfaceInd"].pop(surface) excludeSet.update(surfaceInd) @@ -1634,15 +1632,11 @@ def addPointSet(self, pts, ptSetName, compMap, comm): elif surface in self.compB.triConn: compPoints = pts[indB] else: - raise Error( - f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}." - ) + raise Error(f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}.") # This proc has some points to project if len(compPoints) > 0: - self.associatePointsToSurface( - compPoints, ptSetName, surface, surfaceEps - ) + self.associatePointsToSurface(compPoints, ptSetName, surface, surfaceEps) # if we include the feature curves in the warping, we also need to project the added points to the intersection and feature curves and determine how the points map to the curves if self.incCurves: @@ -1714,9 +1708,7 @@ def addPointSet(self, pts, ptSetName, compMap, comm): # this returns a bool array of indices that satisfy the conditions # we check for elemIDs because we projected to all curves at once - curveBool = np.all( - [d < eps, elemIDs >= seamBeg, elemIDs < seamEnd], axis=0 - ) + curveBool = np.all([d < eps, elemIDs >= seamBeg, elemIDs < seamEnd], axis=0) # get the indices of the points mapped to this element idxs = np.nonzero(curveBool) @@ -1765,9 +1757,7 @@ def addPointSet(self, pts, ptSetName, compMap, comm): idxs = self.curveProjIdx[ptSetName][curveName] # call the utility function - nPtsTotal, nPtsProcs, curvePtCoords = self._commCurveProj( - pts, idxs, comm - ) + nPtsTotal, nPtsProcs, curvePtCoords = self._commCurveProj(pts, idxs, comm) # save the displacements and points self.curvePtCounts[ptSetName][curveName] = nPtsProcs @@ -1802,9 +1792,7 @@ def update(self, ptSetName, delta): # 1) the mesh to be output for visualization # 2) the optimization to continue after raising a fail flag if self.comm.rank == 0: - print( - "The intersection topology has changed. The intersection will not be updated." - ) + print("The intersection topology has changed. The intersection will not be updated.") return delta # Get the two end points for the line elements @@ -1876,9 +1864,7 @@ def update(self, ptSetName, delta): interp = np.zeros(3, dtype=self.dtype) for iDim in range(3): # numerator gets two integrals with the delta components - num = np.sum( - (dr1[:, iDim] - dr0[:, iDim]) * eval2 + dr0[:, iDim] * eval1 - ) + num = np.sum((dr1[:, iDim] - dr0[:, iDim]) * eval2 + dr0[:, iDim] * eval1) # final result interp[iDim] = num / den @@ -2050,9 +2036,7 @@ def project(self, ptSetName, newPts): nPoints = ptsOnCurve.shape[0] if self.debug: - print( - f"[{self.comm.rank}] curveName: {curveName}, nPoints on the fwd pass: {nPoints}" - ) + print(f"[{self.comm.rank}] curveName: {curveName}, nPoints on the fwd pass: {nPoints}") # Initialize references if user provided none dist2 = np.ones(nPoints, dtype=self.dtype) * 1e10 @@ -2111,9 +2095,7 @@ def project(self, ptSetName, newPts): # communicate the deltas if comm: sizes = self.curvePtCounts[ptSetName][curveName] - disp = np.array( - [np.sum(sizes[:i]) for i in range(comm.size)], dtype="intc" - ) + disp = np.array([np.sum(sizes[:i]) for i in range(comm.size)], dtype="intc") # save these for grad comp self.curveProjData[ptSetName][curveName]["sizes"] = sizes @@ -2140,9 +2122,7 @@ def project(self, ptSetName, newPts): deltaGlobal = deltaLocal # also save the sizes and disp stuff as if we have one proc - self.curveProjData[ptSetName][curveName]["sizes"] = self.curvePtCounts[ - ptSetName - ][curveName] + self.curveProjData[ptSetName][curveName]["sizes"] = self.curvePtCounts[ptSetName][curveName] self.curveProjData[ptSetName][curveName]["disp"] = [0] # we only add the deltaLocal to deltaA if this curve is on compA, @@ -2225,9 +2205,7 @@ def project(self, ptSetName, newPts): # Project remaining points to the component as a whole if indAComp: ptsA = newPts[indAComp] - newPts[indAComp] = self._projectToComponent( - ptsA, self.compA, self.projData[ptSetName]["compA"] - ) + newPts[indAComp] = self._projectToComponent(ptsA, self.compA, self.projData[ptSetName]["compA"]) # do the same for B if flagB: @@ -2244,9 +2222,7 @@ def project(self, ptSetName, newPts): if indBComp: ptsB = newPts[indBComp] - newPts[indBComp] = self._projectToComponent( - ptsB, self.compB, self.projData[ptSetName]["compB"] - ) + newPts[indBComp] = self._projectToComponent(ptsB, self.compB, self.projData[ptSetName]["compB"]) # Store component-wide indices for derivative computation self.projData[ptSetName]["compA"]["indAComp"] = indAComp @@ -2601,9 +2577,7 @@ def _projectToComponent(self, pts, comp, projDict, surface=None): # Compute set of nodal normals by taking the average normal of all # elements surrounding the node. This allows the meshing algorithms, # for instance, to march in an average direction near kinks. - nodal_normals = self.adtAPI.adtcomputenodalnormals( - comp.nodes.T, triConn.T, quadConn.T - ) + nodal_normals = self.adtAPI.adtcomputenodalnormals(comp.nodes.T, triConn.T, quadConn.T) comp.nodal_normals = nodal_normals.T # Create new tree (the tree itself is stored in Fortran level) @@ -2624,9 +2598,7 @@ def _projectToComponent(self, pts, comp, projDict, surface=None): normProjNotNorm = np.zeros((numPts, 3), dtype=self.dtype) if self.debug: - print( - f"[{self.comm.rank}] Projecting to component {comp.name}, pts.shape = {pts.shape}" - ) + print(f"[{self.comm.rank}] Projecting to component {comp.name}, pts.shape = {pts.shape}") # Call projection function procID, elementType, elementID, uvw = self.adtAPI.adtmindistancesearch( @@ -2682,9 +2654,7 @@ def _projectToComponent_b(self, dIdpt, comp, projDict, surface=None): # Compute set of nodal normals by taking the average normal of all # elements surrounding the node. This allows the meshing algorithms, # for instance, to march in an average direction near kinks. - nodal_normals = self.adtAPI.adtcomputenodalnormals( - comp.nodes.T, triConn.T, quadConn.T - ) + nodal_normals = self.adtAPI.adtcomputenodalnormals(comp.nodes.T, triConn.T, quadConn.T) comp.nodal_normals = nodal_normals.T # Create new tree (the tree itself is stored in Fortran level) @@ -2819,9 +2789,7 @@ def _getIntersectionSeam(self, comm, firstCall=False): self.seamDict["parentTria"] = parentTria else: - raise Error( - f"The components {self.compA.name} and {self.compB.name} do not intersect." - ) + raise Error(f"The components {self.compA.name} and {self.compB.name} do not intersect.") # Release memory used by Fortran self.intersectionAPI.releasememory() @@ -2839,9 +2807,7 @@ def _getIntersectionSeam(self, comm, firstCall=False): # we have multiple intersection curves but the user did not specify which direction to pick for i in range(len(newConn)): curvename = f"{self.compA.name}_{self.compB.name}_{i}" - tecplot_interface.writeTecplotFEdata( - intNodes, newConn[i], curvename, curvename - ) + tecplot_interface.writeTecplotFEdata(intNodes, newConn[i], curvename, curvename) raise Error( f"More than one intersection curve between comps {self.compA.name} and {self.compB.name}. " + "The curves are written as Tecplot files in the current directory. " @@ -2933,9 +2899,7 @@ def _getIntersectionSeam(self, comm, firstCall=False): self.seamDict[curveName]["xyzProj"] = xyzProj.copy() self.seamDict[curveName]["tanProj"] = tanProj.copy() self.seamDict[curveName]["dist2"] = dist2.copy() - self.seamDict[curveName]["projPtIndx"] = seamConn[:, 0][ - np.argmin(dist2) - ].copy() + self.seamDict[curveName]["projPtIndx"] = seamConn[:, 0][np.argmin(dist2)].copy() # now, find the index of the smallest distance breakList.append(np.argmin(dist2)) @@ -2976,9 +2940,7 @@ def _getIntersectionSeam(self, comm, firstCall=False): if ii == 1: # we need at least 2 features where the element number increases... # we need to reverse the order of our feature curves # and we will flip the elements too so keep track of this change - breakList = np.mod( - seamConn.shape[0] - np.array(breakList), seamConn.shape[0] - ) + breakList = np.mod(seamConn.shape[0] - np.array(breakList), seamConn.shape[0]) # and we need to invert the curves themselves seamConn = np.flip(seamConn, axis=0) @@ -3057,9 +3019,7 @@ def _getIntersectionSeam(self, comm, firstCall=False): # Output the intersection curve if self.comm.rank == 0 and self.debug: curvename = f"{self.compA.name}_{self.compB.name}_{self.counter}" - tecplot_interface.writeTecplotFEdata( - intNodes, seamConn, curvename, curvename - ) + tecplot_interface.writeTecplotFEdata(intNodes, seamConn, curvename, curvename) # we need to re-mesh feature curves if the user wants... if self.incCurves: @@ -3095,9 +3055,7 @@ def _getIntersectionSeam(self, comm, firstCall=False): # save the original coordinate of the first point ptBegSave = self.compB.nodes[curveConn[elemBeg, 0]].copy() # and replace this with the starting point we want - self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[ - curveName - ].copy() + self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[curveName].copy() # compute the element lengths starting from elemBeg firstNodes = curveComp.nodes[curveConn[elemBeg:, 0]] @@ -3173,9 +3131,7 @@ def _getIntersectionSeam(self, comm, firstCall=False): else: # figure out how many elements we need to go in this direction - elemEnd = ( - np.abs(cumDist - self.distFeature[curveName]) - ).argmin() + elemBeg + elemEnd = (np.abs(cumDist - self.distFeature[curveName])).argmin() + elemBeg # get the new connectivity data between the initial and final elements curveConnTrim = curveConn[elemBeg : elemEnd + 1] @@ -3259,19 +3215,14 @@ def _getIntersectionSeam(self, comm, firstCall=False): if firstCall: # save the beginning and end indices of these elements self.seamBeg[curveName] = ( - len(finalConn) - + len(remeshedCurveConn) - - (nNewNodes + nNewNodesReverse) - + 2 + len(finalConn) + len(remeshedCurveConn) - (nNewNodes + nNewNodesReverse) + 2 ) self.seamEnd[curveName] = len(finalConn) + len(remeshedCurveConn) # Output the feature curves if self.comm.rank == 0 and self.debug: curvename = f"featureCurves_{self.counter}" - tecplot_interface.writeTecplotFEdata( - remeshedCurves, remeshedCurveConn, curvename, curvename - ) + tecplot_interface.writeTecplotFEdata(remeshedCurves, remeshedCurveConn, curvename, curvename) # now we are done going over curves, # so we can append all the new curves to the "seam", @@ -3351,9 +3302,7 @@ def _getIntersectionSeam_b(self, seamBar, comm): # save the original coordinate of the first point ptBegSave = self.compB.nodes[curveConn[elemBeg, 0]].copy() # and replace this with the starting point we want - self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[ - curveName - ].copy() + self.compB.nodes[curveConn[elemBeg, 0]] = curveBegCoor[curveName].copy() # get the coordinates of points coor = curveComp.nodes @@ -3579,16 +3528,12 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): self._projectToComponent(points, self.compB, projDict, surface=surface) comp = "compB" else: - raise Error( - f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}." - ) + raise Error(f"Surface {surface} was not found in {self.compA.name} or {self.compB.name}.") # Identify the points that are within the given tolerance from this surface # surfaceInd contains indices of the provided points not the entire point set surfaceDist = np.sqrt(np.array(projDict["dist2"])) - surfaceInd = [ - ind for ind, value in enumerate(surfaceDist) if (value < surfaceEps) - ] + surfaceInd = [ind for ind, value in enumerate(surfaceDist) if (value < surfaceEps)] # Output the points associated with this surface if self.debug: @@ -3612,9 +3557,7 @@ def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True super().__init__(compA, compB, distTol, DVGeo, dtype, project) self.filletComp = DVGeo.fillets[filletComp] - self.compA.intersection = ( - self.compB.intersection - ) = self.filletComp.intersection = self + self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] @@ -3647,22 +3590,23 @@ def addPointSet(self, pts, ptSetName, compMap, comm): self.points[ptSetName] = [pts.copy(), [], [], comm] def update(self, ptSetName, delta): - n = self.filletComp.surfPtsOrig.shape[0] - indices = np.linspace(0, n - 1, n, dtype=int) - indices = np.delete( - indices, - self.filletComp.intersectInd[self.compA.name] - + self.filletComp.intersectInd[self.compB.name], - ) - self.indices = indices + # update the pointset unless we haven't figured out the intersections yet + if len(self.filletComp.intersectInd) > 0: + n = self.filletComp.surfPtsOrig.shape[0] + indices = np.linspace(0, n - 1, n, dtype=int) + indices = np.delete( + indices, + self.filletComp.intersectInd[self.compA.name] + self.filletComp.intersectInd[self.compB.name], + ) + self.indices = indices - # make sure each component's subset of points on the intersection curve is up to date - self.compA.intersectPts[self.filletComp.name] = self.compA.surfPts[ - self.compA.intersectInd[self.filletComp.name] - ] - self.compB.intersectPts[self.filletComp.name] = self.compB.surfPts[ - self.compB.intersectInd[self.filletComp.name] - ] + # make sure each component's subset of points on the intersection curve is up to date + self.compA.intersectPts[self.filletComp.name] = self.compA.surfPts[ + self.compA.intersectInd[self.filletComp.name] + ] + self.compB.intersectPts[self.filletComp.name] = self.compB.surfPts[ + self.compB.intersectInd[self.filletComp.name] + ] # don't update the delta because we aren't remeshing return delta @@ -3670,35 +3614,34 @@ def update(self, ptSetName, delta): def project(self, ptSetName, newPts): # redo the delta because this is how the fillet was initially set up # TODO maybe stop doing this - newCurveCoords = np.vstack( - ( - self.compA.intersectPts[self.filletComp.name], - self.compB.intersectPts[self.filletComp.name], + + # update the pointset unless we haven't figured out the intersections yet + if len(self.filletComp.intersectInd) > 0: + newCurveCoords = np.vstack( + ( + self.compA.intersectPts[self.filletComp.name], + self.compB.intersectPts[self.filletComp.name], + ) ) - ) - curvePtCoords = np.vstack( - ( - self.compA.intersectPtsOrig[self.filletComp.name], - self.compB.intersectPtsOrig[self.filletComp.name], + curvePtCoords = np.vstack( + ( + self.compA.intersectPtsOrig[self.filletComp.name], + self.compB.intersectPtsOrig[self.filletComp.name], + ) ) - ) - delta = newCurveCoords - curvePtCoords + delta = newCurveCoords - curvePtCoords - # modify the intersection curves of the fillet - ptsNew = deepcopy(self.filletComp.surfPtsOrig) - ptsNew[self.filletComp.intersectInd[self.compA.name]] = self.compA.intersectPts[ - self.filletComp.name - ] - ptsNew[self.filletComp.intersectInd[self.compB.name]] = self.compB.intersectPts[ - self.filletComp.name - ] + # modify the intersection curves of the fillet + ptsNew = deepcopy(self.filletComp.surfPtsOrig) + ptsNew[self.filletComp.intersectInd[self.compA.name]] = self.compA.intersectPts[self.filletComp.name] + ptsNew[self.filletComp.intersectInd[self.compB.name]] = self.compB.intersectPts[self.filletComp.name] - pts0 = self.filletComp.surfPtsOrig - indices = self.indices + pts0 = self.filletComp.surfPtsOrig + indices = self.indices - self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) + self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) - self.filletComp.surfPts = ptsNew + self.filletComp.surfPts = ptsNew def _getIntersectionSeam(self, comm): pass From db87800fdd959e94a7bd935819488eabc94834cb Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 26 Sep 2023 18:41:05 -0400 Subject: [PATCH 030/110] warping works through adflow --- pygeo/parameterization/DVGeoMulti.py | 45 +++++++++++++++++----------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index c32ff8a2..c6da0859 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -193,7 +193,7 @@ def addComponent( # initialize the component object # a different class is used for fillets & their adjacent components - component = Comp(comp, filletComp, nodes, DVGeo, xMin, xMax) + component = Comp(comp, filletComp, nodes, DVGeo, xMin, xMax, self.comm) # we have a standard intersection group which has structured surfaces else: @@ -378,16 +378,18 @@ def addCurve(self, compName, filletName, curveFiles): filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) compIntCurve, compIntInd = intersection.findIntersection(comp.surfPts, curvePts) - lenF = len(filletIntInd) - lenC = len(compIntInd) + # lenF = len(filletIntInd) + # lenC = len(compIntInd) - # TODO this is very hacky. stop - if lenF < lenC: - lenC = lenF - filletIntInd = filletIntInd[: lenC or None] - compIntInd = compIntInd[: lenC or None] - filletIntCurve = filletIntCurve[:lenC, :] - compIntCurve = filletIntCurve[:lenC, :] + # # TODO this is very hacky. stop + # if lenF < lenC: + # lenC = lenF + + # filletIntInd = filletIntInd[: lenC or None] + # compIntInd = compIntInd[: lenC or None] + + # filletIntCurve = filletIntCurve[:lenC, :] + # compIntCurve = filletIntCurve[:lenC, :] fillet.intersectInd.update({compName: filletIntInd}) comp.intersectInd.update({filletName: compIntInd}) @@ -478,7 +480,8 @@ def addPointSet( comp, ) else: - compNames = [familyName] + if familyName is not None: + compNames = [familyName] # create the pointset class self.points[ptName] = PointSet(points, comm=comm) @@ -1156,7 +1159,7 @@ def updateTriMesh(self): class Comp: - def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, surfPtsName=None, tol=1e-3): + def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName=None, tol=1e-3): self.name = name self.fillet = fillet self.DVGeo = DVGeo @@ -1164,6 +1167,7 @@ def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, surfPtsName=None, t self.surfPtsOrig = deepcopy(surfPts) self.xMin = xMin self.xMax = xMax + self.comm = comm self.surfPtsName = surfPtsName self.intersection = None @@ -1184,10 +1188,14 @@ def writeSurf(self, fileName): closeTecplot(f) def writeCurve(self, inter, fileName): - fileName = f"{fileName}_{self.name}_curve.dat" + fileName = f"{fileName}_{self.name}_curve_a.dat" f = openTecplot(fileName, 3) writeTecplot1D(f, self.name, self.intersectPts[inter]) closeTecplot(f) + fileName = f"{fileName}_{self.name}_curve_b.dat" + f = openTecplot(fileName, 3) + writeTecplot1D(f, self.name, self.surfPts[self.intersectInd[inter]]) + closeTecplot(f) class PointSet: @@ -1219,7 +1227,7 @@ def __init__(self, compA, compB, distTol, DVGeo, dtype, project): self.projectFlag = project def setSurface(self, comm): - """This set the new udpated surface on which we need to compute the new intersection curve""" + """This set the new updated surface on which we need to compute the new intersection curve""" # get the updated surface coordinates self._getUpdatedCoords() @@ -3621,23 +3629,26 @@ def project(self, ptSetName, newPts): ( self.compA.intersectPts[self.filletComp.name], self.compB.intersectPts[self.filletComp.name], + # self.compA.intersectCurvePts, + # self.compB.intersectCurvePts, ) ) curvePtCoords = np.vstack( ( self.compA.intersectPtsOrig[self.filletComp.name], self.compB.intersectPtsOrig[self.filletComp.name], + # self.compA.intersectCurvePtsOrig, + # self.compB.intersectCurvePtsOrig, ) ) delta = newCurveCoords - curvePtCoords # modify the intersection curves of the fillet ptsNew = deepcopy(self.filletComp.surfPtsOrig) - ptsNew[self.filletComp.intersectInd[self.compA.name]] = self.compA.intersectPts[self.filletComp.name] - ptsNew[self.filletComp.intersectInd[self.compB.name]] = self.compB.intersectPts[self.filletComp.name] pts0 = self.filletComp.surfPtsOrig - indices = self.indices + # indices = self.indices + indices = np.linspace(0, ptsNew.shape[0] - 1, ptsNew.shape[0], dtype=int) self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) From fd05dea8f6e2fca63c94817f793e98843baed2be Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 2 Oct 2023 10:54:01 -0400 Subject: [PATCH 031/110] works with full curve for intersection definition --- pygeo/parameterization/DVGeoMulti.py | 93 ++++++++++------------------ 1 file changed, 34 insertions(+), 59 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index c6da0859..76b8a166 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -367,38 +367,27 @@ def addIntersection( def addCurve(self, compName, filletName, curveFiles): if not self.filletIntersection: - print("no") + print("no") # TODO real error curvePts = self._readDATFile(curveFiles, surf=False) + # figure out which component and fillet we're dealing with + # and which intersection object they belong to comp = self.comps[compName] fillet = self.fillets[filletName] intersection = fillet.intersection - filletIntCurve, filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) - compIntCurve, compIntInd = intersection.findIntersection(comp.surfPts, curvePts) - - # lenF = len(filletIntInd) - # lenC = len(compIntInd) - - # # TODO this is very hacky. stop - # if lenF < lenC: - # lenC = lenF - - # filletIntInd = filletIntInd[: lenC or None] - # compIntInd = compIntInd[: lenC or None] - - # filletIntCurve = filletIntCurve[:lenC, :] - # compIntCurve = filletIntCurve[:lenC, :] - + # find the indices of fillet points that lie on this intersection curve + # we need to know this to handle duplicates in derivative calcs later + filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) fillet.intersectInd.update({compName: filletIntInd}) - comp.intersectInd.update({filletName: compIntInd}) - - fillet.intersectPtsOrig.update({compName: filletIntCurve}) - comp.intersectPtsOrig.update({filletName: compIntCurve}) - fillet.intersectPts.update({compName: filletIntCurve}) - comp.intersectPts.update({filletName: compIntCurve}) + # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD + ptSetName = f"{compName}_curve" + comp.curvePtsName = ptSetName + comp.curvePts = curvePts + comp.curvePtsOrig = deepcopy(curvePts) + comp.DVGeo.addPointSet(curvePts, ptSetName) def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" @@ -603,6 +592,7 @@ def addPointSet( self.fillets[comp].surfPtsName = ptName self.fillets[comp].surfPts = points self.fillets[comp].surfPtsOrig = deepcopy(self.fillets[comp].surfPts) + self.fillets[comp].nPts = len(points) # check if this pointset will get the IC treatment if applyIC: @@ -1169,10 +1159,9 @@ def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName=N self.xMax = xMax self.comm = comm self.surfPtsName = surfPtsName - + self.curvePts = [] + self.curvePtsName = None self.intersection = None - self.intersectPtsOrig = {} - self.intersectPts = {} self.intersectInd = {} def updateSurfPts(self): @@ -1180,6 +1169,7 @@ def updateSurfPts(self): print("no") else: self.surfPts = self.DVGeo.update(self.surfPtsName) + self.curvePts = self.DVGeo.update(self.curvePtsName) def writeSurf(self, fileName): fileName = f"{fileName}_{self.name}_surf.dat" @@ -1188,13 +1178,14 @@ def writeSurf(self, fileName): closeTecplot(f) def writeCurve(self, inter, fileName): - fileName = f"{fileName}_{self.name}_curve_a.dat" - f = openTecplot(fileName, 3) - writeTecplot1D(f, self.name, self.intersectPts[inter]) - closeTecplot(f) - fileName = f"{fileName}_{self.name}_curve_b.dat" + fileName = f"{fileName}_{self.name}_curve.dat" f = openTecplot(fileName, 3) - writeTecplot1D(f, self.name, self.surfPts[self.intersectInd[inter]]) + + if self.fillet: + writeTecplot1D(f, self.name, self.surfPts[self.intersectInd[inter]]) + else: + writeTecplot1D(f, self.name, self.curvePts) + closeTecplot(f) @@ -3599,23 +3590,15 @@ def addPointSet(self, pts, ptSetName, compMap, comm): def update(self, ptSetName, delta): # update the pointset unless we haven't figured out the intersections yet - if len(self.filletComp.intersectInd) > 0: + # TODO change to a firstUpdate flag or something + if len(self.compA.curvePts) > 0: + pass + + else: n = self.filletComp.surfPtsOrig.shape[0] indices = np.linspace(0, n - 1, n, dtype=int) - indices = np.delete( - indices, - self.filletComp.intersectInd[self.compA.name] + self.filletComp.intersectInd[self.compB.name], - ) self.indices = indices - # make sure each component's subset of points on the intersection curve is up to date - self.compA.intersectPts[self.filletComp.name] = self.compA.surfPts[ - self.compA.intersectInd[self.filletComp.name] - ] - self.compB.intersectPts[self.filletComp.name] = self.compB.surfPts[ - self.compB.intersectInd[self.filletComp.name] - ] - # don't update the delta because we aren't remeshing return delta @@ -3624,33 +3607,25 @@ def project(self, ptSetName, newPts): # TODO maybe stop doing this # update the pointset unless we haven't figured out the intersections yet - if len(self.filletComp.intersectInd) > 0: + if len(self.compA.curvePts) > 0: # TODO change to a first project flag or something newCurveCoords = np.vstack( ( - self.compA.intersectPts[self.filletComp.name], - self.compB.intersectPts[self.filletComp.name], - # self.compA.intersectCurvePts, - # self.compB.intersectCurvePts, + self.compA.curvePts, + self.compB.curvePts, ) ) curvePtCoords = np.vstack( ( - self.compA.intersectPtsOrig[self.filletComp.name], - self.compB.intersectPtsOrig[self.filletComp.name], - # self.compA.intersectCurvePtsOrig, - # self.compB.intersectCurvePtsOrig, + self.compA.curvePtsOrig, + self.compB.curvePtsOrig, ) ) delta = newCurveCoords - curvePtCoords - # modify the intersection curves of the fillet ptsNew = deepcopy(self.filletComp.surfPtsOrig) - pts0 = self.filletComp.surfPtsOrig - # indices = self.indices - indices = np.linspace(0, ptsNew.shape[0] - 1, ptsNew.shape[0], dtype=int) - self._warpSurfPts(pts0, ptsNew, indices, curvePtCoords, delta) + self._warpSurfPts(pts0, ptsNew, self.indices, curvePtCoords, delta) self.filletComp.surfPts = ptsNew From 54c97c8b9d21990dbb5e52aa37194cea7f52b659 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 2 Oct 2023 11:37:27 -0400 Subject: [PATCH 032/110] start project_b --- pygeo/parameterization/DVGeoMulti.py | 41 ++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 76b8a166..9c3380d5 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3629,6 +3629,47 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts = ptsNew + def project_b(self, ptSetName, dIdpt, comm): + # number of functions we have + N = dIdpt.shape[0] + + # Initialize dictionaries to accumulate triangulated mesh sensitivities + compSens_local = {} + compSensA = {} + compSensB = {} + + curvePtCoordsA = self.compA.curvePts + curvePtCoordsB = self.compB.curvePts + + # call the bwd warping routine + # deltaA_b is the seed for the points projected to curves + deltaA_b_local = self._warpSurfPts_b( + dIdpt, + self.points[ptSetName][0], + self.surfIdxA[ptSetName], + curvePtCoordsA, + ) + + # do the same for comp B + deltaB_b_local = self._warpSurfPts_b( + dIdpt, + self.points[ptSetName][0], + self.surfIdxB[ptSetName], + curvePtCoordsB, + ) + + # reduce seeds for both + if ptSetComm: + deltaA_b = ptSetComm.allreduce(deltaA_b_local, op=MPI.SUM) + deltaB_b = ptSetComm.allreduce(deltaB_b_local, op=MPI.SUM) + # no comm, local is global + else: + deltaA_b = deltaA_b_local + deltaB_b = deltaB_b_local + + return compSens + + def _getIntersectionSeam(self, comm): pass From 74159d85abfda2e5c1818f9350a716b18e182e64 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 3 Oct 2023 15:36:06 -0400 Subject: [PATCH 033/110] comment --- pygeo/parameterization/DVGeoMulti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index c6da0859..368b995f 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -897,7 +897,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # also increment the offset dvOffset += nDVComp - # finally, we can add the contributions from triangulated component meshes + # finally, we can add the contributions from intersections for compSens in compSensList: # loop over the items of compSens, which are guaranteed to be in dIdxDict for k, v in compSens.items(): From c369a497cb0fa16b7eeb644bc7fe778deb4f127c Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 3 Oct 2023 15:36:33 -0400 Subject: [PATCH 034/110] dummy sens --- pygeo/parameterization/DVGeoMulti.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 9c3380d5..d6e2f079 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3602,6 +3602,10 @@ def update(self, ptSetName, delta): # don't update the delta because we aren't remeshing return delta + def sens(self, dIdPt, ptSetName, comm): + compSens = {} + return compSens + def project(self, ptSetName, newPts): # redo the delta because this is how the fillet was initially set up # TODO maybe stop doing this @@ -3632,12 +3636,12 @@ def project(self, ptSetName, newPts): def project_b(self, ptSetName, dIdpt, comm): # number of functions we have N = dIdpt.shape[0] - + # Initialize dictionaries to accumulate triangulated mesh sensitivities compSens_local = {} compSensA = {} compSensB = {} - + curvePtCoordsA = self.compA.curvePts curvePtCoordsB = self.compB.curvePts @@ -3669,7 +3673,6 @@ def project_b(self, ptSetName, dIdpt, comm): return compSens - def _getIntersectionSeam(self, comm): pass From 0b4cd95138e4ff5fbd12cb9f115bc5a3d5643d7d Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 3 Oct 2023 15:53:00 -0400 Subject: [PATCH 035/110] sketching derivatives --- pygeo/parameterization/DVGeoMulti.py | 41 +++++++++++++++++----------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 331a7c5c..d6ea92a0 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -825,7 +825,9 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): for IC in self.intersectComps: if IC.projectFlag and ptSetName in IC.points: # initialize the seed contribution to the intersection seam and feature curves from project_b - IC.seamBarProj[ptSetName] = np.zeros((N, IC.seam0.shape[0], IC.seam0.shape[1])) + # fillet intersections don't track seams + if not self.filletIntersection: + IC.seamBarProj[ptSetName] = np.zeros((N, IC.seam0.shape[0], IC.seam0.shape[1])) # we pass in dIdpt and the intersection object, along with pointset information # the intersection object adjusts the entries corresponding to projected points @@ -842,7 +844,8 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we need to go through all ICs bec even though some procs might not have points on the intersection, # communication is easier and we can reduce compSens as we compute them - for IC in self.intersectComps: + # fillet intersections do not do curve-based warping + for IC in self.intersectComps and not self.filletIntersection: if ptSetName in IC.points: compSens = IC.sens(dIdpt, ptSetName, comm) # save the sensitivities from the intersection stuff @@ -945,14 +948,15 @@ def addVariablesPyOpt( # We can simply loop over all DV objects and call their respective addVariablesPyOpt function for comp in comps: - self.comps[comp].DVGeo.addVariablesPyOpt( - optProb, - globalVars=globalVars, - localVars=localVars, - sectionlocalVars=sectionlocalVars, - ignoreVars=ignoreVars, - freezeVars=freezeVars, - ) + if not comp.isFillet: + self.comps[comp].DVGeo.addVariablesPyOpt( + optProb, + globalVars=globalVars, + localVars=localVars, + sectionlocalVars=sectionlocalVars, + ignoreVars=ignoreVars, + freezeVars=freezeVars, + ) def getLocalIndex(self, iVol, comp): """Return the local index mapping that points to the global coefficient list for a given volume. @@ -1098,19 +1102,24 @@ def _computeTotalJacobian(self, ptSetName): dvOffset = 0 # we need to call computeTotalJacobian from all comps and get the jacobians for this pointset - for comp in self.compNames: + for name in self.compNames: + comp = self.comps[name] + # fillet pointset needs points on boundary removed + if comp.fillet: + removeIntersectionPts() + # number of design variables - nDVComp = self.comps[comp].DVGeo.getNDV() + nDVComp = comp.DVGeo.getNDV() # call the function to compute the total jacobian - self.comps[comp].DVGeo.computeTotalJacobian(ptSetName) + comp.DVGeo.computeTotalJacobian(ptSetName) - if self.comps[comp].DVGeo.JT[ptSetName] is not None: + if comp.DVGeo.JT[ptSetName] is not None: # Get the component Jacobian - compJ = self.comps[comp].DVGeo.JT[ptSetName].T + compJ = comp.DVGeo.JT[ptSetName].T # Set the block of the full Jacobian associated with this component - jac[ptSet.compMapFlat[comp], dvOffset : dvOffset + nDVComp] = compJ + jac[ptSet.compMapFlat[name], dvOffset : dvOffset + nDVComp] = compJ # increment the offset dvOffset += nDVComp From 7bc137b3297124f74562b508b0d122b87539798f Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 3 Oct 2023 16:09:54 -0400 Subject: [PATCH 036/110] stop separate comp&fillet dicts --- pygeo/parameterization/DVGeoMulti.py | 89 ++++++++++++---------------- 1 file changed, 38 insertions(+), 51 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index d6ea92a0..83daa30d 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -66,9 +66,6 @@ def __init__( self.compNames = [] self.comps = OrderedDict() - # separate out fillets to avoid checking whether components in comps have a DVGeo everywhere - self.filletNames = [] - self.fillets = OrderedDict() self.DVGeoDict = OrderedDict() self.points = OrderedDict() self.updated = {} @@ -138,9 +135,9 @@ def addComponent( # determine whether this component is a fillet or a normal surface if DVGeo is None: - filletComp = True + isFillet = True else: - filletComp = False + isFillet = False # if self.filletIntersection is True and triMesh is not None: # this should work with a triangulated surface it just isn't necessary @@ -152,7 +149,7 @@ def addComponent( pointSetKwargs = {} # fillets don't have a DVGeo to get a bounding box from and don't need it - if filletComp: + if isFillet: xMin = xMax = 3 * [0] # standard components need a bounding box to associate points with each FFD @@ -185,7 +182,7 @@ def addComponent( nodes = surfPts # add these points to the corresponding dvgeo unless this component is a fillet - if not filletComp: + if not isFillet: DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) else: @@ -193,7 +190,7 @@ def addComponent( # initialize the component object # a different class is used for fillets & their adjacent components - component = Comp(comp, filletComp, nodes, DVGeo, xMin, xMax, self.comm) + component = Comp(comp, isFillet, nodes, DVGeo, xMin, xMax, self.comm) # we have a standard intersection group which has structured surfaces else: @@ -218,12 +215,8 @@ def addComponent( # add component object to the dictionary and list keeping track of components # if this component is a fillet (no DVGeo) put in a separate list to avoid unnecessary checks for a DVGeo later - if filletComp: - self.fillets[comp] = component - self.filletNames.append(comp) - else: - self.comps[comp] = component - self.compNames.append(comp) + self.comps[comp] = component + self.compNames.append(comp) # also save the DVGeometry pointer in the dictionary we pass back (fillet entry will be None) self.DVGeoDict[comp] = DVGeo @@ -374,7 +367,7 @@ def addCurve(self, compName, filletName, curveFiles): # figure out which component and fillet we're dealing with # and which intersection object they belong to comp = self.comps[compName] - fillet = self.fillets[filletName] + fillet = self.comps[filletName] intersection = fillet.intersection # find the indices of fillet points that lie on this intersection curve @@ -583,16 +576,12 @@ def addPointSet( elif self.filletIntersection: for comp in compNames: + self.comps[comp].surfPtsName = ptName + self.comps[comp].surfPts = points + self.comps[comp].nPts = len(points) + self.comps[comp].surfPtsOrig = deepcopy(points) if comp != "fillet": self.comps[comp].DVGeo.addPointSet(points, ptName, **kwargs) - self.comps[comp].surfPtsName = ptName - self.comps[comp].surfPts = points - self.comps[comp].surfPtsOrig = points - else: - self.fillets[comp].surfPtsName = ptName - self.fillets[comp].surfPts = points - self.fillets[comp].surfPtsOrig = deepcopy(self.fillets[comp].surfPts) - self.fillets[comp].nPts = len(points) # check if this pointset will get the IC treatment if applyIC: @@ -634,7 +623,8 @@ def setDesignVars(self, dvDict): # loop over the components and set the values for comp in self.compNames: - self.comps[comp].DVGeo.setDesignVars(dvDict) + if self.comps[comp].DVGeo is not None: + self.comps[comp].DVGeo.setDesignVars(dvDict) # We need to give the updated coordinates to each of the # intersectComps (if we have any) so they can update the new intersection curve @@ -660,10 +650,11 @@ def getValues(self): dvDict = {} # we need to loop over each DVGeo object and get the DVs for comp in self.compNames: - dvDictComp = self.comps[comp].DVGeo.getValues() - # we need to loop over these DVs - for k, v in dvDictComp.items(): - dvDict[k] = v + if self.comps[comp].DVGeo is not None: + dvDictComp = self.comps[comp].DVGeo.getValues() + # we need to loop over these DVs + for k, v in dvDictComp.items(): + dvDict[k] = v return dvDict @@ -685,15 +676,16 @@ def update(self, ptSetName, config=None): # we first need to update all points with their respective DVGeo objects for compName, comp in self.comps.items(): - if ptSetName in comp.DVGeo.ptSetNames: # TODO make this work with old Multi - ptsComp = comp.DVGeo.update(ptSetName) - - # now save this info with the pointset mapping - if not self.filletIntersection: - ptMap = self.points[ptSetName].compMap[compName] - newPts[ptMap] = ptsComp - else: - newPts = self.points[ptSetName].points + if comp.DVGeo is not None: + if ptSetName in comp.DVGeo.ptSetNames: # TODO make this work with old Multi + ptsComp = comp.DVGeo.update(ptSetName) + + # now save this info with the pointset mapping + if not self.filletIntersection: + ptMap = self.points[ptSetName].compMap[compName] + newPts[ptMap] = ptsComp + else: + newPts = self.points[ptSetName].points # get the delta delta = newPts - self.points[ptSetName].points @@ -757,11 +749,12 @@ def getVarNames(self, pyOptSparse=False): dvNames = [] # create a list of DVs from each comp for comp in self.compNames: - # first get the list of DVs from this component - varNames = self.comps[comp].DVGeo.getVarNames() + if self.comps[comp].DVGeo is not None: + # first get the list of DVs from this component + varNames = self.comps[comp].DVGeo.getVarNames() - # add the component DVs to the full list - dvNames.extend(varNames) + # add the component DVs to the full list + dvNames.extend(varNames) return dvNames @@ -948,7 +941,7 @@ def addVariablesPyOpt( # We can simply loop over all DV objects and call their respective addVariablesPyOpt function for comp in comps: - if not comp.isFillet: + if comp.DVGeo is not None: self.comps[comp].DVGeo.addVariablesPyOpt( optProb, globalVars=globalVars, @@ -977,17 +970,11 @@ def getLocalIndex(self, iVol, comp): return DVGeo.FFD.topo.lIndex[iVol].copy() def writeCompSurf(self, compName, fileName): - if compName in self.compNames: - comp = self.comps[compName] - elif compName in self.filletNames: - comp = self.fillets[compName] + comp = self.comps[compName] comp.writeSurf(fileName) def writeCompCurve(self, compName, curveName, fileName): - if compName in self.compNames: - comp = self.comps[compName] - elif compName in self.filletNames: - comp = self.fillets[compName] + comp = self.comps[compName] comp.writeCurve(curveName, fileName) def writePointSet(self, name, fileName, solutionTime=None): @@ -3564,7 +3551,7 @@ class FilletIntersection(Intersection): def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True): super().__init__(compA, compB, distTol, DVGeo, dtype, project) - self.filletComp = DVGeo.fillets[filletComp] + self.filletComp = DVGeo.comps[filletComp] self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self def findIntersection(self, surf, curve): # TODO fix this function From fe13ee759568ed18316c92dc426a08ba631ac1af Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 3 Oct 2023 16:56:07 -0400 Subject: [PATCH 037/110] project_b for fillet --- pygeo/parameterization/DVGeoMulti.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 83daa30d..8d17158d 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3629,18 +3629,15 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts = ptsNew - def project_b(self, ptSetName, dIdpt, comm): - # number of functions we have - N = dIdpt.shape[0] - - # Initialize dictionaries to accumulate triangulated mesh sensitivities - compSens_local = {} - compSensA = {} - compSensB = {} + def project_b(self, ptSetName, dIdpt, comm=None): + compSens = {} curvePtCoordsA = self.compA.curvePts curvePtCoordsB = self.compB.curvePts + # get the comm for this point set + ptSetComm = self.points[ptSetName][3] + # call the bwd warping routine # deltaA_b is the seed for the points projected to curves deltaA_b_local = self._warpSurfPts_b( @@ -3667,6 +3664,11 @@ def project_b(self, ptSetName, dIdpt, comm): deltaA_b = deltaA_b_local deltaB_b = deltaB_b_local + # zero out the seeds for the intersection on the fillet + # these points will be present in the fillet pointset and the components + deltaA_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 + deltaB_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 + return compSens def _getIntersectionSeam(self, comm): From aaf157d6523ff7d92a26268a8c58e5d80bddb606 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 4 Oct 2023 15:21:09 -0400 Subject: [PATCH 038/110] split fillet points into 3 --- pygeo/parameterization/DVGeoMulti.py | 33 ++++++++++++++++++---------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 8d17158d..89fd2bb7 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -372,8 +372,8 @@ def addCurve(self, compName, filletName, curveFiles): # find the indices of fillet points that lie on this intersection curve # we need to know this to handle duplicates in derivative calcs later - filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) - fillet.intersectInd.update({compName: filletIntInd}) + # filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) + # fillet.intersectInd.update({compName: filletIntInd}) # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD ptSetName = f"{compName}_curve" @@ -473,7 +473,21 @@ def addPointSet( self.points[ptName].compMap[comp] = [] self.points[ptName].compMapFlat[comp] = [] - if not self.filletIntersection: + # is this intersection group a fillet or normal + if self.filletIntersection: + # is this pointset being added to a fillet component or a controlled component + if familyName == "fillet": + for IC in self.intersectComps: + # find the points on the fillet that match each intersection + compAInterPts, IC.filletComp.compAInterInd = IC.findIntersection(points, IC.compA.curvePts) + compBInterPts, IC.filletComp.compBInterInd = IC.findIntersection(points, IC.compB.curvePts) + + # add those intersection points to each DVGeo so they get deformed with the FFD + IC.compA.DVGeo.addPointSet(compAInterPts, f"{IC.compA.name}_fillet_intersection") + IC.compB.DVGeo.addPointSet(compBInterPts, f"{IC.compB.name}_fillet_intersection") + + # non-fillet intersections require more checking + else: # we now need to create the component mapping information for i in range(self.points[ptName].nPts): # initial flags @@ -580,6 +594,7 @@ def addPointSet( self.comps[comp].surfPts = points self.comps[comp].nPts = len(points) self.comps[comp].surfPtsOrig = deepcopy(points) + if comp != "fillet": self.comps[comp].DVGeo.addPointSet(points, ptName, **kwargs) @@ -884,6 +899,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dvOffset += nDVComp # finally, we can add the contributions from intersections + # TODO is this how the fillet contributions will get in? they aren't included in the DVGeo dIdxComp for compSens in compSensList: # loop over the items of compSens, which are guaranteed to be in dIdxDict for k, v in compSens.items(): @@ -1092,8 +1108,6 @@ def _computeTotalJacobian(self, ptSetName): for name in self.compNames: comp = self.comps[name] # fillet pointset needs points on boundary removed - if comp.fillet: - removeIntersectionPts() # number of design variables nDVComp = comp.DVGeo.getNDV() @@ -3587,13 +3601,10 @@ def addPointSet(self, pts, ptSetName, compMap, comm): def update(self, ptSetName, delta): # update the pointset unless we haven't figured out the intersections yet # TODO change to a firstUpdate flag or something - if len(self.compA.curvePts) > 0: - pass - else: - n = self.filletComp.surfPtsOrig.shape[0] - indices = np.linspace(0, n - 1, n, dtype=int) - self.indices = indices + n = self.filletComp.surfPtsOrig.shape[0] + indices = np.linspace(0, n - 1, n, dtype=int) + self.indices = indices # don't update the delta because we aren't remeshing return delta From 2b27126a466798f22847852f34bd89baabbfbdc1 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 5 Oct 2023 15:03:06 -0400 Subject: [PATCH 039/110] adding all the pointsets ever --- pygeo/parameterization/DVGeo.py | 2 +- pygeo/parameterization/DVGeoMulti.py | 131 ++++++++++++++++++++------- 2 files changed, 97 insertions(+), 36 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 71c10c1e..2bf1cf51 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2898,7 +2898,7 @@ def addVariablesPyOpt( ) def writeTecplot(self, fileName, solutionTime=None): - """Write the (deformed) current state of the FFD's to a tecplot file, + """Write the (deformed) current state of the FFDs to a tecplot file, including the children Parameters diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 89fd2bb7..932a81c9 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -365,22 +365,23 @@ def addCurve(self, compName, filletName, curveFiles): curvePts = self._readDATFile(curveFiles, surf=False) # figure out which component and fillet we're dealing with - # and which intersection object they belong to comp = self.comps[compName] - fillet = self.comps[filletName] - intersection = fillet.intersection - - # find the indices of fillet points that lie on this intersection curve - # we need to know this to handle duplicates in derivative calcs later - # filletIntInd = intersection.findIntersection(fillet.surfPts, curvePts) - # fillet.intersectInd.update({compName: filletIntInd}) # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD ptSetName = f"{compName}_curve" comp.curvePtsName = ptSetName comp.curvePts = curvePts comp.curvePtsOrig = deepcopy(curvePts) - comp.DVGeo.addPointSet(curvePts, ptSetName) + + # add the curve pointset to the component's DVGeo + comp.DVGeo.addPointSet(curvePts, ptSetName, self.comm) # TODO is comm right here + + # add the curve pointset to DVGeoMulti + self.points[ptSetName] = PointSet(curvePts, comm=self.comm, comp=compName) + + # add the curve pointset to the intersection + for IC in self.intersectComps: + IC.addPointSet(curvePts, ptSetName, [], self.comm) def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" @@ -466,7 +467,11 @@ def addPointSet( compNames = [familyName] # create the pointset class - self.points[ptName] = PointSet(points, comm=comm) + if self.filletIntersection: + comp = compNames[0] + else: + comp = None + self.points[ptName] = PointSet(points, comm=comm, comp=comp) for comp in self.compNames: # initialize the list for this component @@ -479,12 +484,31 @@ def addPointSet( if familyName == "fillet": for IC in self.intersectComps: # find the points on the fillet that match each intersection - compAInterPts, IC.filletComp.compAInterInd = IC.findIntersection(points, IC.compA.curvePts) - compBInterPts, IC.filletComp.compBInterInd = IC.findIntersection(points, IC.compB.curvePts) + compAInterPts, compAInterInd = IC.findIntersection(points, IC.compA.curvePts) + compBInterPts, compBInterInd = IC.findIntersection(points, IC.compB.curvePts) # add those intersection points to each DVGeo so they get deformed with the FFD - IC.compA.DVGeo.addPointSet(compAInterPts, f"{IC.compA.name}_fillet_intersection") - IC.compB.DVGeo.addPointSet(compBInterPts, f"{IC.compB.name}_fillet_intersection") + compAPtsName = f"{IC.compA.name}_fillet_intersection" + compBPtsName = f"{IC.compB.name}_fillet_intersection" + IC.compA.DVGeo.addPointSet(compAInterPts, compAPtsName) + IC.compB.DVGeo.addPointSet(compBInterPts, compBPtsName) + + # add intersection points to DVGeoMulti too + self.points[compAPtsName] = PointSet(compAInterPts, comm=comm, comp=IC.compA.name) + self.points[compBPtsName] = PointSet(compBInterPts, comm=comm, comp=IC.compB.name) + + # save the indices in the fillet component + IC.filletComp.compAInterInd = compAInterInd + IC.filletComp.compBInterInd = compBInterInd + + # save the names of the fillet intersection pointsets to find them later + IC.filletComp.compAPtsName = compAPtsName + IC.filletComp.compBPtsName = compBPtsName + + # add the points to the intersection object + for IC in self.intersectComps: + IC.addPointSet(compAInterPts, compAPtsName, [], comm) + IC.addPointSet(compBInterPts, compBPtsName, [], comm) # non-fillet intersections require more checking else: @@ -709,7 +733,7 @@ def update(self, ptSetName, config=None): for IC in self.intersectComps: # check if this IC is active for this ptSet if ptSetName in IC.points: - delta = IC.update(ptSetName, delta) + delta = IC.update(ptSetName, delta, self.comps[self.points[ptSetName].comp]) # now we are ready to take the delta which may be modified by the intersections newPts = self.points[ptSetName].points + delta @@ -816,8 +840,10 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ - # Compute the total Jacobian for this point set - self._computeTotalJacobian(ptSetName) + # Compute the total Jacobian for this point set as long as this isn't a fillet (no DVGeo control) + comp = self.points[ptSetName].comp + if comp is None or not comp.isFillet: + self._computeTotalJacobian(ptSetName) # Make dIdpt at least 3D if len(dIdpt.shape) == 2: @@ -840,7 +866,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we pass in dIdpt and the intersection object, along with pointset information # the intersection object adjusts the entries corresponding to projected points # and passes back dIdpt in place. - compSens = IC.project_b(ptSetName, dIdpt, comm) + compSens = IC.project_b(ptSetName, dIdpt, comm, comp) # append this to the dictionary list... compSensList.append(compSens) @@ -989,9 +1015,9 @@ def writeCompSurf(self, compName, fileName): comp = self.comps[compName] comp.writeSurf(fileName) - def writeCompCurve(self, compName, curveName, fileName): + def writeCompCurve(self, compName, fileName): comp = self.comps[compName] - comp.writeCurve(curveName, fileName) + comp.writeCurve(fileName) def writePointSet(self, name, fileName, solutionTime=None): """ @@ -1159,9 +1185,9 @@ def updateTriMesh(self): class Comp: - def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName=None, tol=1e-3): + def __init__(self, name, isFillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName=None, tol=1e-3): self.name = name - self.fillet = fillet + self.isFillet = isFillet self.DVGeo = DVGeo self.surfPts = surfPts self.surfPtsOrig = deepcopy(surfPts) @@ -1175,7 +1201,7 @@ def __init__(self, name, fillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName=N self.intersectInd = {} def updateSurfPts(self): - if self.fillet: + if self.isFillet: print("no") else: self.surfPts = self.DVGeo.update(self.surfPtsName) @@ -1187,24 +1213,42 @@ def writeSurf(self, fileName): writeTecplot1D(f, self.name, self.surfPts) closeTecplot(f) - def writeCurve(self, inter, fileName): + def writeCurve(self, fileName): fileName = f"{fileName}_{self.name}_curve.dat" f = openTecplot(fileName, 3) - if self.fillet: - writeTecplot1D(f, self.name, self.surfPts[self.intersectInd[inter]]) + if self.isFillet: + writeTecplot1D(f, self.name, self.surfPts[self.compAInterInd]) + writeTecplot1D(f, self.name, self.surfPts[self.compBInterInd]) else: writeTecplot1D(f, self.name, self.curvePts) closeTecplot(f) + def updateFilletPts(self, newInterPts, ptSetName): + newPts = deepcopy(self.surfPts) + if ptSetName == self.compAPtsName: + newPts[self.compAInterInd] = newInterPts # TODO update compB here too + elif ptSetName == self.compBPtsName: + newPts[self.compAInterInd] = newInterPts + else: + print("no") + + # update points stored in fillet object + self.surfPts = newPts + # update points stored in intersection object + self.intersection.points[self.surfPtsName] = newPts + # update DVGeoMulti pointset + self.intersection.DVGeo.points[ptSetName].points = newPts + class PointSet: - def __init__(self, points, comm): + def __init__(self, points, comm, comp=None): self.points = points self.nPts = len(self.points) self.compMap = OrderedDict() self.compMapFlat = OrderedDict() + self.comp = comp self.comm = comm @@ -1775,7 +1819,7 @@ def addPointSet(self, pts, ptSetName, compMap, comm): # also save the total number for convenience self.nCurvePts[ptSetName][curveName] = nPtsTotal - def update(self, ptSetName, delta): + def update(self, ptSetName, delta, comp=None): """Update the delta in ptSetName with our correction. The delta need to be supplied as we will be changing it and returning them """ @@ -3567,6 +3611,7 @@ def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True self.filletComp = DVGeo.comps[filletComp] self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self + self.firstUpdate = True def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] @@ -3598,13 +3643,23 @@ def addPointSet(self, pts, ptSetName, compMap, comm): # Save the affected indices and the factor in the little dictionary self.points[ptSetName] = [pts.copy(), [], [], comm] - def update(self, ptSetName, delta): + def update(self, ptSetName, delta, comp=None): # update the pointset unless we haven't figured out the intersections yet # TODO change to a firstUpdate flag or something - n = self.filletComp.surfPtsOrig.shape[0] - indices = np.linspace(0, n - 1, n, dtype=int) - self.indices = indices + if self.firstUpdate: + n = self.filletComp.surfPtsOrig.shape[0] + indices = np.linspace(0, n - 1, n, dtype=int) + self.indices = indices + self.firstUpdate = False + else: + # fillet points on boundaries need updated based on the points embedded in the neighbor FFDs + if comp is not None: + if not comp.isFillet: + fillet = self.filletComp + if ptSetName is fillet.compAPtsName or ptSetName is fillet.compBPtsName: + points = self.points[ptSetName].points + fillet.updateFilletPts(points, ptSetName) # don't update the delta because we aren't remeshing return delta @@ -3640,7 +3695,13 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts = ptsNew - def project_b(self, ptSetName, dIdpt, comm=None): + def project_b(self, ptSetName, dIdpt, comm=None, comp=None): + points = self.points[ptSetName][0] + + # don't accumulate derivatives for fillet points on intersections + if comp.isFillet: + dIdpt = np.delete(dIdpt, [comp.compAInterInd, comp.compBInterInd]) + compSens = {} curvePtCoordsA = self.compA.curvePts @@ -3653,7 +3714,7 @@ def project_b(self, ptSetName, dIdpt, comm=None): # deltaA_b is the seed for the points projected to curves deltaA_b_local = self._warpSurfPts_b( dIdpt, - self.points[ptSetName][0], + points, self.surfIdxA[ptSetName], curvePtCoordsA, ) @@ -3661,7 +3722,7 @@ def project_b(self, ptSetName, dIdpt, comm=None): # do the same for comp B deltaB_b_local = self._warpSurfPts_b( dIdpt, - self.points[ptSetName][0], + points, self.surfIdxB[ptSetName], curvePtCoordsB, ) From e8b7a7238aa648bfd1066c906e976cd8b27e845f Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 5 Oct 2023 15:58:13 -0400 Subject: [PATCH 040/110] bug --- pygeo/parameterization/DVGeoMulti.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 932a81c9..54142162 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1228,9 +1228,9 @@ def writeCurve(self, fileName): def updateFilletPts(self, newInterPts, ptSetName): newPts = deepcopy(self.surfPts) if ptSetName == self.compAPtsName: - newPts[self.compAInterInd] = newInterPts # TODO update compB here too - elif ptSetName == self.compBPtsName: newPts[self.compAInterInd] = newInterPts + elif ptSetName == self.compBPtsName: + newPts[self.compBInterInd] = newInterPts else: print("no") From 670f5a6db42e81e660972832a7f07646f25d6ef9 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 5 Oct 2023 19:01:18 -0400 Subject: [PATCH 041/110] getndv bug --- pygeo/parameterization/DVGeoMulti.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 54142162..a8dae254 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -726,6 +726,10 @@ def update(self, ptSetName, config=None): else: newPts = self.points[ptSetName].points + comp = self.comps[self.points[ptSetName].comp] + if comp.isFillet: + newPts = comp.surfPts + # get the delta delta = newPts - self.points[ptSetName].points @@ -733,7 +737,7 @@ def update(self, ptSetName, config=None): for IC in self.intersectComps: # check if this IC is active for this ptSet if ptSetName in IC.points: - delta = IC.update(ptSetName, delta, self.comps[self.points[ptSetName].comp]) + delta = IC.update(ptSetName, delta, comp) # now we are ready to take the delta which may be modified by the intersections newPts = self.points[ptSetName].points + delta @@ -771,8 +775,10 @@ def getNDV(self): """Return the number of DVs.""" # Loop over components and sum the number of DVs nDV = 0 - for comp in self.compNames: - nDV += self.comps[comp].DVGeo.getNDV() + for name in self.compNames: + comp = self.comps[name] + if comp.DVGeo is not None: + nDV += comp.DVGeo.getNDV() return nDV def getVarNames(self, pyOptSparse=False): @@ -983,7 +989,7 @@ def addVariablesPyOpt( # We can simply loop over all DV objects and call their respective addVariablesPyOpt function for comp in comps: - if comp.DVGeo is not None: + if self.comps[comp].DVGeo is not None: self.comps[comp].DVGeo.addVariablesPyOpt( optProb, globalVars=globalVars, @@ -3695,6 +3701,10 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts = ptsNew + # write curve coords from file to see which proc has which (all should have complete set) + np.savetxt(f"compACurve{self.DVGeo.comm.rank}.txt", self.compA.curvePts) + np.savetxt(f"compBCurve{self.DVGeo.comm.rank}.txt", self.compB.curvePts) + def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = self.points[ptSetName][0] From 65d5d43ee5f31bf65729bff00056792d85db3f3e Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 6 Oct 2023 14:28:37 -0400 Subject: [PATCH 042/110] ncurvepts --- pygeo/parameterization/DVGeoMulti.py | 75 +++++++++++++++++++--------- 1 file changed, 51 insertions(+), 24 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index a8dae254..65f39d7f 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -381,7 +381,12 @@ def addCurve(self, compName, filletName, curveFiles): # add the curve pointset to the intersection for IC in self.intersectComps: + # IC.nCurvePts[ptSetName] = {} IC.addPointSet(curvePts, ptSetName, [], self.comm) + # IC.nCurvePts[ptSetName][curveName] = curvePts.shape[0] + + print(f"awrite {compName} curve from proc {self.comm.rank}") + np.savetxt(f"comp{compName}_{self.comm.rank}.txt", curvePts) def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" @@ -484,8 +489,16 @@ def addPointSet( if familyName == "fillet": for IC in self.intersectComps: # find the points on the fillet that match each intersection - compAInterPts, compAInterInd = IC.findIntersection(points, IC.compA.curvePts) - compBInterPts, compBInterInd = IC.findIntersection(points, IC.compB.curvePts) + compAInterPtsLocal, compAInterIndLocal = IC.findIntersection(points, IC.compA.curvePts) + compBInterPtsLocal, compBInterIndLocal = IC.findIntersection(points, IC.compB.curvePts) + + compAInterPts = self.comm.allreduce(compAInterPtsLocal, op=MPI.SUM) + compBInterPts = self.comm.allreduce(compBInterPtsLocal, op=MPI.SUM) + compAInterInd = self.comm.allreduce(compAInterIndLocal, op=MPI.SUM) + compBInterInd = self.comm.allreduce(compBInterIndLocal, op=MPI.SUM) + + print(f"rank {self.comm.rank} compAInterInd {compAInterInd}") + print(f"rank {self.comm.rank} compBInterInd {compBInterInd}") # add those intersection points to each DVGeo so they get deformed with the FFD compAPtsName = f"{IC.compA.name}_fillet_intersection" @@ -847,7 +860,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ # Compute the total Jacobian for this point set as long as this isn't a fillet (no DVGeo control) - comp = self.points[ptSetName].comp + comp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if comp is None or not comp.isFillet: self._computeTotalJacobian(ptSetName) @@ -1102,9 +1115,7 @@ def _readDATFile(self, filename, surf=True): surfFile = open(filename, "r") nElem = int(surfFile.readline()) surfPts = np.loadtxt(filename, skiprows=1, max_rows=nElem) - points = surfPts[surfPts[:, 0].argsort()] - else: curves = [] for f in filename: @@ -1137,25 +1148,31 @@ def _computeTotalJacobian(self, ptSetName): dvOffset = 0 # we need to call computeTotalJacobian from all comps and get the jacobians for this pointset - for name in self.compNames: - comp = self.comps[name] - # fillet pointset needs points on boundary removed - - # number of design variables - nDVComp = comp.DVGeo.getNDV() - # call the function to compute the total jacobian + if self.filletIntersection: + comp = self.comps[self.points[ptSetName].comp] comp.DVGeo.computeTotalJacobian(ptSetName) - if comp.DVGeo.JT[ptSetName] is not None: - # Get the component Jacobian - compJ = comp.DVGeo.JT[ptSetName].T + else: + for name in self.compNames: + comp = self.comps[name] + # fillet pointset needs points on boundary removed + + # number of design variables + nDVComp = comp.DVGeo.getNDV() + + # call the function to compute the total jacobian + comp.DVGeo.computeTotalJacobian(ptSetName) - # Set the block of the full Jacobian associated with this component - jac[ptSet.compMapFlat[name], dvOffset : dvOffset + nDVComp] = compJ + if comp.DVGeo.JT[ptSetName] is not None: + # Get the component Jacobian + compJ = comp.DVGeo.JT[ptSetName].T - # increment the offset - dvOffset += nDVComp + # Set the block of the full Jacobian associated with this component + jac[ptSet.compMapFlat[name], dvOffset : dvOffset + nDVComp] = compJ + + # increment the offset + dvOffset += nDVComp # Convert to CSR format because this is better for arithmetic jac = sparse.csr_matrix(jac) @@ -3618,6 +3635,8 @@ def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True self.filletComp = DVGeo.comps[filletComp] self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self self.firstUpdate = True + # dict to keep track of the total number of points on each curve + # self.nCurvePts = {} def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] @@ -3702,6 +3721,7 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts = ptsNew # write curve coords from file to see which proc has which (all should have complete set) + print(f"write curves from proc {self.DVGeo.comm.rank}") np.savetxt(f"compACurve{self.DVGeo.comm.rank}.txt", self.compA.curvePts) np.savetxt(f"compBCurve{self.DVGeo.comm.rank}.txt", self.compB.curvePts) @@ -3710,22 +3730,29 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # don't accumulate derivatives for fillet points on intersections if comp.isFillet: - dIdpt = np.delete(dIdpt, [comp.compAInterInd, comp.compBInterInd]) + intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) + dIdpt[intInd] = 0 compSens = {} + # skip calculating warping derivatives for curve points + if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: + return compSens + curvePtCoordsA = self.compA.curvePts curvePtCoordsB = self.compB.curvePts # get the comm for this point set ptSetComm = self.points[ptSetName][3] + n = points.shape[0] + indices = np.linspace(0, n - 1, n, dtype=int) # call the bwd warping routine # deltaA_b is the seed for the points projected to curves deltaA_b_local = self._warpSurfPts_b( dIdpt, points, - self.surfIdxA[ptSetName], + indices, # TODO could maybe just feed in all indices except boundaries in fillet case curvePtCoordsA, ) @@ -3733,7 +3760,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): deltaB_b_local = self._warpSurfPts_b( dIdpt, points, - self.surfIdxB[ptSetName], + indices, curvePtCoordsB, ) @@ -3748,8 +3775,8 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # zero out the seeds for the intersection on the fillet # these points will be present in the fillet pointset and the components - deltaA_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 - deltaB_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 + # deltaA_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 + # deltaB_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 return compSens From 709a3a9d4d09ea667460cafc2f1582a1ec680a76 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 6 Oct 2023 16:21:39 -0400 Subject: [PATCH 043/110] fixing small bugs with larger ones --- pygeo/parameterization/DVGeoMulti.py | 35 +++++++++++++++++++--------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 65f39d7f..f791f031 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -898,11 +898,12 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we need to go through all ICs bec even though some procs might not have points on the intersection, # communication is easier and we can reduce compSens as we compute them # fillet intersections do not do curve-based warping - for IC in self.intersectComps and not self.filletIntersection: - if ptSetName in IC.points: - compSens = IC.sens(dIdpt, ptSetName, comm) - # save the sensitivities from the intersection stuff - compSensList.append(compSens) + for IC in self.intersectComps: + if not self.filletIntersection: + if ptSetName in IC.points: + compSens = IC.sens(dIdpt, ptSetName, comm) + # save the sensitivities from the intersection stuff + compSensList.append(compSens) if self.debug: print(f"[{self.comm.rank}] finished IC.sens") @@ -911,7 +912,11 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdpt = dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3)) # jacobian for the pointset - jac = self.points[ptSetName].jac + if comp.isFillet: + n = self.points[ptSetName].points.shape[0] + jac = np.zeros((n * 3, n * 3)) + else: + jac = self.points[ptSetName].jac # this is the mat-vec product for the remaining seeds. # this only contains the effects of the FFD motion, @@ -928,9 +933,16 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # use respective DVGeo's convert to dict functionality dIdxDict = OrderedDict() dvOffset = 0 - for comp in self.compNames: - DVGeo = self.comps[comp].DVGeo - nDVComp = DVGeo.getNDV() + for comp in self.comps.values(): + if comp.isFillet: + nDVComp = 0 + for compDVGeo in self.DVGeoDict.values(): + # fillet is still stored in dict with None DVGeo + if compDVGeo is not None: + nDVComp += compDVGeo.getNDV() + else: + DVGeo = comp.DVGeo + nDVComp = DVGeo.getNDV() # we only do this if this component has at least one DV if nDVComp > 0: @@ -3730,8 +3742,9 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # don't accumulate derivatives for fillet points on intersections if comp.isFillet: - intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) - dIdpt[intInd] = 0 + # intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) + comp.compAInterInd.extend(comp.compBInterInd) + dIdpt[comp.compAInterInd, :, :] = 0 # TODO indices compSens = {} From a2613958ee88ca12d05db28e1e86a32673f07ffd Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 9 Oct 2023 11:38:21 -0400 Subject: [PATCH 044/110] wing surf ptset passes test --- pygeo/parameterization/DVGeoMulti.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index f791f031..f3c65499 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -936,10 +936,6 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): for comp in self.comps.values(): if comp.isFillet: nDVComp = 0 - for compDVGeo in self.DVGeoDict.values(): - # fillet is still stored in dict with None DVGeo - if compDVGeo is not None: - nDVComp += compDVGeo.getNDV() else: DVGeo = comp.DVGeo nDVComp = DVGeo.getNDV() From c29fa99e484e9e8966bfe1ec86fa68e7a442ff0a Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 10 Oct 2023 08:26:22 -0400 Subject: [PATCH 045/110] hey maybe actually get updated points into multi --- pygeo/parameterization/DVGeoMulti.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index f3c65499..53e916a3 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -737,7 +737,7 @@ def update(self, ptSetName, config=None): ptMap = self.points[ptSetName].compMap[compName] newPts[ptMap] = ptsComp else: - newPts = self.points[ptSetName].points + newPts = ptsComp comp = self.comps[self.points[ptSetName].comp] if comp.isFillet: @@ -764,6 +764,8 @@ def update(self, ptSetName, config=None): # set the pointset up to date self.updated[ptSetName] = True + self.points[ptSetName].points = newPts + return newPts def pointSetUpToDate(self, ptSetName): @@ -3729,9 +3731,9 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts = ptsNew # write curve coords from file to see which proc has which (all should have complete set) - print(f"write curves from proc {self.DVGeo.comm.rank}") - np.savetxt(f"compACurve{self.DVGeo.comm.rank}.txt", self.compA.curvePts) - np.savetxt(f"compBCurve{self.DVGeo.comm.rank}.txt", self.compB.curvePts) + # print(f"write curves from proc {self.DVGeo.comm.rank}") + # np.savetxt(f"compACurve{self.DVGeo.comm.rank}.txt", self.compA.curvePts) + # np.savetxt(f"compBCurve{self.DVGeo.comm.rank}.txt", self.compB.curvePts) def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = self.points[ptSetName][0] From e92a6fabdef1b82f1144f681daa3af3ba6736f2c Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 10 Oct 2023 08:31:48 -0400 Subject: [PATCH 046/110] derivative debug still --- pygeo/parameterization/DVGeoMulti.py | 50 ++++++++++++++++------------ 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index f3c65499..9e5137b7 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -885,7 +885,11 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we pass in dIdpt and the intersection object, along with pointset information # the intersection object adjusts the entries corresponding to projected points # and passes back dIdpt in place. + # print(f"before sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") + # np.savetxt("didpt1.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) compSens = IC.project_b(ptSetName, dIdpt, comm, comp) + # print(f"after sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") + # np.savetxt("didpt2.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) # append this to the dictionary list... compSensList.append(compSens) @@ -914,7 +918,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # jacobian for the pointset if comp.isFillet: n = self.points[ptSetName].points.shape[0] - jac = np.zeros((n * 3, n * 3)) + jac = np.ones((n * 3, self.getNDV())) # TODO else: jac = self.points[ptSetName].jac @@ -3735,6 +3739,7 @@ def project(self, ptSetName, newPts): def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = self.points[ptSetName][0] + N = dIdpt.shape[0] # don't accumulate derivatives for fillet points on intersections if comp.isFillet: @@ -3758,34 +3763,35 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): indices = np.linspace(0, n - 1, n, dtype=int) # call the bwd warping routine # deltaA_b is the seed for the points projected to curves - deltaA_b_local = self._warpSurfPts_b( + + curvePtCoords = np.vstack( + ( + curvePtCoordsA, + curvePtCoordsB, + ) + ) + print(f"before sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") + np.savetxt("didpt1.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) + dIdpt = self._warpSurfPts_b( dIdpt, points, indices, # TODO could maybe just feed in all indices except boundaries in fillet case - curvePtCoordsA, + curvePtCoords, ) + print(f"after sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") + np.savetxt("didpt2.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) - # do the same for comp B - deltaB_b_local = self._warpSurfPts_b( - dIdpt, - points, - indices, - curvePtCoordsB, - ) + # # reduce seeds for both + # if ptSetComm: + # delta_b = ptSetComm.allreduce(delta_b_local, op=MPI.SUM) + # # no comm, local is global + # else: + # delta_b = delta_b_local - # reduce seeds for both - if ptSetComm: - deltaA_b = ptSetComm.allreduce(deltaA_b_local, op=MPI.SUM) - deltaB_b = ptSetComm.allreduce(deltaB_b_local, op=MPI.SUM) - # no comm, local is global - else: - deltaA_b = deltaA_b_local - deltaB_b = deltaB_b_local + # deltaBar = delta_b[:, :n, :] - # zero out the seeds for the intersection on the fillet - # these points will be present in the fillet pointset and the components - # deltaA_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 - # deltaB_b[:, self.nCurvePts[ptSetName]["intersection"] :] = 0 + # for k in range(N): + # dIdpt[k, :, :] = deltaBar[k] return compSens From a24bc1662fe4a614ebb47330dd0958516a1c3930 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 10 Oct 2023 16:57:34 -0400 Subject: [PATCH 047/110] parallelism for intersection indices, might be necessary? --- pygeo/parameterization/DVGeoMulti.py | 136 +++++++++++++++------------ 1 file changed, 76 insertions(+), 60 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 0bd69ae3..f721f4ef 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -489,20 +489,31 @@ def addPointSet( if familyName == "fillet": for IC in self.intersectComps: # find the points on the fillet that match each intersection - compAInterPtsLocal, compAInterIndLocal = IC.findIntersection(points, IC.compA.curvePts) - compBInterPtsLocal, compBInterIndLocal = IC.findIntersection(points, IC.compB.curvePts) - - compAInterPts = self.comm.allreduce(compAInterPtsLocal, op=MPI.SUM) - compBInterPts = self.comm.allreduce(compBInterPtsLocal, op=MPI.SUM) - compAInterInd = self.comm.allreduce(compAInterIndLocal, op=MPI.SUM) - compBInterInd = self.comm.allreduce(compBInterIndLocal, op=MPI.SUM) - - print(f"rank {self.comm.rank} compAInterInd {compAInterInd}") - print(f"rank {self.comm.rank} compBInterInd {compBInterInd}") + compAInterPts, compAInterInd = IC.findIntersection(points, IC.compA.curvePts) + compBInterPts, compBInterInd = IC.findIntersection(points, IC.compB.curvePts) + # compAInterPtsLocal, compAInterIndLocal = IC.findIntersection(points, IC.compA.curvePts) + # compBInterPtsLocal, compBInterIndLocal = IC.findIntersection(points, IC.compB.curvePts) + + # print(f"\nrank {self.comm.rank} local compAInterInd {compAInterIndLocal}") + # compAInterNPts, compAInterSizes, compAInterPts, compAInterInd = IC._commCurveProj( + # compAInterPtsLocal, compAInterIndLocal, self.comm + # ) + # compBInterNPts, compBInterSizes, compBInterPts, compBInterInd = IC._commCurveProj( + # compBInterPtsLocal, compBInterIndLocal, self.comm + # ) + + # print(f"\nrank {self.comm.rank} local compBInterInd {compBInterIndLocal}") + # print(f"\nrank {self.comm.rank} total compBInterInd {compAInterInd}") + # print(f"\nrank {self.comm.rank} total compBInterInd {compBInterPts}") + # exit() # add those intersection points to each DVGeo so they get deformed with the FFD compAPtsName = f"{IC.compA.name}_fillet_intersection" compBPtsName = f"{IC.compB.name}_fillet_intersection" + + # print(f"\nrank {self.comm.rank} compAInterPts {compAInterPts}") + # print(f"\nrank {self.comm.rank} compBInterPts {compAInterPts}") + IC.compA.DVGeo.addPointSet(compAInterPts, compAPtsName) IC.compB.DVGeo.addPointSet(compBInterPts, compBPtsName) @@ -1290,7 +1301,7 @@ def __init__(self, points, comm, comp=None): class Intersection: - def __init__(self, compA, compB, distTol, DVGeo, dtype, project): + def __init__(self, compA, compB, distTol, DVGeo, project, dtype=float): componentA = DVGeo.comps[compA] componentB = DVGeo.comps[compB] @@ -1308,6 +1319,11 @@ def __init__(self, compA, compB, distTol, DVGeo, dtype, project): # flag to determine if we want to project nodes after intersection treatment self.projectFlag = project + if dtype == float: + self.mpi_type = MPI.C_DOUBLE_COMPLEX + elif dtype == complex: + self.mpi_type = MPI.DOUBLE + def setSurface(self, comm): """This set the new updated surface on which we need to compute the new intersection curve""" @@ -1380,6 +1396,54 @@ def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # return the seeds for the delta vector return deltaBar + def _commCurveProj(self, pts, indices, comm): + """ + This function will get the points, indices, and comm. + This function is called once for each feature curve. + The indices are the indices of points that was mapped to this curve. + We compute how many points we have mapped to this curve globally. + Furthermore, we compute the displacements. + Finally, we communicate the initial coordinates of these points. + These will later be used in the point-based warping. + + """ + + # only do this fancy stuff if this is a "parallel" pointset + if comm: + nproc = comm.size + + # communicate the counts + sizes = np.array(comm.allgather(len(indices)), dtype="intc") + + # total number of points + nptsg = np.sum(sizes) + + # get the displacements + disp = np.array([np.sum(sizes[:i]) for i in range(nproc)], dtype="intc") + + # sendbuf + ptsLocal = pts.flatten() + sendbuf = [ptsLocal, len(indices) * 3] + + # recvbuf + ptsGlobal = np.zeros(3 * nptsg, dtype=self.dtype) + + recvbuf = [ptsGlobal, sizes * 3, disp * 3, self.mpi_type] + + # do an allgatherv + comm.Allgatherv(sendbuf, recvbuf) + + # reshape into a nptsg,3 array + curvePtCoords = ptsGlobal.reshape((nptsg, 3)) + + # this is a "serial" pointset, so the results are just local + else: + nptsg = len(indices) + sizes = [nptsg] + curvePtCoords = pts[indices] + + return nptsg, sizes, curvePtCoords, indices + class CompIntersection(Intersection): def __init__( @@ -2596,54 +2660,6 @@ def project_b(self, ptSetName, dIdpt, comm): return compSens - def _commCurveProj(self, pts, indices, comm): - """ - This function will get the points, indices, and comm. - This function is called once for each feature curve. - The indices are the indices of points that was mapped to this curve. - We compute how many points we have mapped to this curve globally. - Furthermore, we compute the displacements. - Finally, we communicate the initial coordinates of these points. - These will later be used in the point-based warping. - - """ - - # only do this fancy stuff if this is a "parallel" pointset - if comm: - nproc = comm.size - - # communicate the counts - sizes = np.array(comm.allgather(len(indices)), dtype="intc") - - # total number of points - nptsg = np.sum(sizes) - - # get the displacements - disp = np.array([np.sum(sizes[:i]) for i in range(nproc)], dtype="intc") - - # sendbuf - ptsLocal = pts[indices].flatten() - sendbuf = [ptsLocal, len(indices) * 3] - - # recvbuf - ptsGlobal = np.zeros(3 * nptsg, dtype=self.dtype) - - recvbuf = [ptsGlobal, sizes * 3, disp * 3, self.mpi_type] - - # do an allgatherv - comm.Allgatherv(sendbuf, recvbuf) - - # reshape into a nptsg,3 array - curvePtCoords = ptsGlobal.reshape((nptsg, 3)) - - # this is a "serial" pointset, so the results are just local - else: - nptsg = len(indices) - sizes = [nptsg] - curvePtCoords = pts[indices] - - return nptsg, sizes, curvePtCoords - def _projectToComponent(self, pts, comp, projDict, surface=None): # We build an ADT for this component using pySurf # Set bounding box for new tree @@ -3644,7 +3660,7 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True): - super().__init__(compA, compB, distTol, DVGeo, dtype, project) + super().__init__(compA, compB, distTol, DVGeo, project, dtype) self.filletComp = DVGeo.comps[filletComp] self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self From d98fe2e131af519b270a01632eee4331acbca16d Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 11 Oct 2023 10:00:48 -0400 Subject: [PATCH 048/110] todos --- pygeo/parameterization/DVGeoMulti.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index f721f4ef..4b2bb1e1 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -875,7 +875,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # Compute the total Jacobian for this point set as long as this isn't a fillet (no DVGeo control) comp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if comp is None or not comp.isFillet: - self._computeTotalJacobian(ptSetName) + self._computeTotalJacobian(ptSetName) # TODO in fillet case get curve ptsets jacobians # Make dIdpt at least 3D if len(dIdpt.shape) == 2: @@ -3762,7 +3762,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # don't accumulate derivatives for fillet points on intersections if comp.isFillet: # intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) - comp.compAInterInd.extend(comp.compBInterInd) + comp.compAInterInd.extend(comp.compBInterInd) # TODO make new list instead dIdpt[comp.compAInterInd, :, :] = 0 # TODO indices compSens = {} @@ -3771,7 +3771,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: return compSens - curvePtCoordsA = self.compA.curvePts + curvePtCoordsA = self.compA.curvePts # TODO should be original curvePtCoordsB = self.compB.curvePts # get the comm for this point set From b58583f843df18514d2d000dfcda6467f0e7599b Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 11 Oct 2023 21:22:36 -0400 Subject: [PATCH 049/110] deriv todos --- pygeo/parameterization/DVGeoMulti.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 4b2bb1e1..9ad68bb4 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -876,6 +876,9 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): comp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if comp is None or not comp.isFillet: self._computeTotalJacobian(ptSetName) # TODO in fillet case get curve ptsets jacobians + elif comp.isFillet: + self._computeTotalJacobian(comp.compACurvePtName) + self._computeTotalJacobian(comp.compBCurvePtName) # Make dIdpt at least 3D if len(dIdpt.shape) == 2: @@ -952,7 +955,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dvOffset = 0 for comp in self.comps.values(): if comp.isFillet: - nDVComp = 0 + nDVComp = IC.compA.DVGeo.getNDV() + IC.compB.DVGeo.getNDV() else: DVGeo = comp.DVGeo nDVComp = DVGeo.getNDV() @@ -3762,8 +3765,11 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # don't accumulate derivatives for fillet points on intersections if comp.isFillet: # intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) - comp.compAInterInd.extend(comp.compBInterInd) # TODO make new list instead + allInd = deepcopy(comp.compAInterInd) + allInd.extend(comp.compBInterInd) dIdpt[comp.compAInterInd, :, :] = 0 # TODO indices + else: + print("no?") compSens = {} @@ -3771,8 +3777,8 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: return compSens - curvePtCoordsA = self.compA.curvePts # TODO should be original - curvePtCoordsB = self.compB.curvePts + curvePtCoordsA = self.compA.curvePtsOrig + curvePtCoordsB = self.compB.curvePtsOrig # get the comm for this point set ptSetComm = self.points[ptSetName][3] @@ -3790,7 +3796,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): ) print(f"before sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") np.savetxt("didpt1.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) - dIdpt = self._warpSurfPts_b( + deltaBar = self._warpSurfPts_b( dIdpt, points, indices, # TODO could maybe just feed in all indices except boundaries in fillet case @@ -3799,6 +3805,13 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): print(f"after sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") np.savetxt("didpt2.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) + curveInd = len(comp.compAInterInd) + deltaBarCompA = deltaBar[:, :curveInd, :] + deltaBarCompB = deltaBar[:, curveInd:, :] + + dIdxCompA = self.compA.DVGeo.totalSensitivity(deltaBarCompA, self.curvePtNameA) + dIdxCompB = self.compB.DVGeo.totalSensitivity(deltaBarCompB, self.curvePtNameB) + # # reduce seeds for both # if ptSetComm: # delta_b = ptSetComm.allreduce(delta_b_local, op=MPI.SUM) From 50a7fd108ab47576767fdee8a6b678aedee34429 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 24 Oct 2023 11:43:06 -0400 Subject: [PATCH 050/110] figuring out compSens --- pygeo/parameterization/DVGeo.py | 11 +++++++++ pygeo/parameterization/DVGeoMulti.py | 34 +++++++++++++++++++++++----- 2 files changed, 39 insertions(+), 6 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 2bf1cf51..70d50d58 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2359,8 +2359,19 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdx_local = np.zeros((N, nDV), "d") for i in range(N): if self.JT[ptSetName] is not None: + y = dIdpt[i, :, :] + x = dIdpt[i, :, :].flatten() dIdx_local[i, :] = self.JT[ptSetName].dot(dIdpt[i, :, :].flatten()) + # (first point) + # dIdpt is 609x3x3 + # JT for fuse_fillet_intersection is 2x9 (pointset is 3x3) + # our portion of dIdpt is 3x3 + # flattened it's 9 + # dIdx local is supposed to be 609x2 + # we do this N (609) times so it should be fine + # but I can't tell which i is stopping it + if comm: # If we have a comm, globaly reduce with sum dIdx = comm.allreduce(dIdx_local, op=MPI.SUM) else: diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 9ad68bb4..e65a86d4 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -83,7 +83,7 @@ def __init__( self.adtAPI = adtAPI_cs.adtapi else: self.dtype = float - self.adtAPI = adtAPI.adtapi + # self.adtAPI = adtAPI.adtapi def addComponent( self, @@ -877,8 +877,8 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): if comp is None or not comp.isFillet: self._computeTotalJacobian(ptSetName) # TODO in fillet case get curve ptsets jacobians elif comp.isFillet: - self._computeTotalJacobian(comp.compACurvePtName) - self._computeTotalJacobian(comp.compBCurvePtName) + self._computeTotalJacobian(comp.compAPtsName) + self._computeTotalJacobian(comp.compBPtsName) # Make dIdpt at least 3D if len(dIdpt.shape) == 2: @@ -3762,6 +3762,10 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = self.points[ptSetName][0] N = dIdpt.shape[0] + compSens_local = {} + compSensA = {} + compSensB = {} + # don't accumulate derivatives for fillet points on intersections if comp.isFillet: # intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) @@ -3805,13 +3809,12 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): print(f"after sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") np.savetxt("didpt2.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) + # TODO reduce warping sensitivities + curveInd = len(comp.compAInterInd) deltaBarCompA = deltaBar[:, :curveInd, :] deltaBarCompB = deltaBar[:, curveInd:, :] - dIdxCompA = self.compA.DVGeo.totalSensitivity(deltaBarCompA, self.curvePtNameA) - dIdxCompB = self.compB.DVGeo.totalSensitivity(deltaBarCompB, self.curvePtNameB) - # # reduce seeds for both # if ptSetComm: # delta_b = ptSetComm.allreduce(delta_b_local, op=MPI.SUM) @@ -3824,6 +3827,25 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # for k in range(N): # dIdpt[k, :, :] = deltaBar[k] + compSensA = self.compA.DVGeo.totalSensitivity(deltaBarCompA, self.filletComp.compAPtsName) + compSensB = self.compB.DVGeo.totalSensitivity(deltaBarCompB, self.filletComp.compBPtsName) + + for k, v in compSensA.items(): + compSens_local[k] = v + + for k, v in compSensB.items(): + compSens_local[k] = v + + # finally sum the results across procs if we are provided with a comm + if comm: + compSens = {} + # because the results are in a dictionary, we need to loop over the items and sum + for k in compSens_local: + compSens[k] = comm.allreduce(compSens_local[k], op=MPI.SUM) + else: + # we can just pass the dictionary + compSens = compSens_local + return compSens def _getIntersectionSeam(self, comm): From 2a7f5b2ad163dcddaeb787d7885637863cdbeff0 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 24 Oct 2023 12:12:32 -0400 Subject: [PATCH 051/110] cleanup --- pygeo/parameterization/DVGeo.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 70d50d58..bdbdd433 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2359,19 +2359,11 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdx_local = np.zeros((N, nDV), "d") for i in range(N): if self.JT[ptSetName] is not None: - y = dIdpt[i, :, :] - x = dIdpt[i, :, :].flatten() + x = dIdpt[i, :, :] + y = self.JT[ptSetName] + z = self.JT[ptSetName].dot(dIdpt[i, :, :].flatten()) dIdx_local[i, :] = self.JT[ptSetName].dot(dIdpt[i, :, :].flatten()) - # (first point) - # dIdpt is 609x3x3 - # JT for fuse_fillet_intersection is 2x9 (pointset is 3x3) - # our portion of dIdpt is 3x3 - # flattened it's 9 - # dIdx local is supposed to be 609x2 - # we do this N (609) times so it should be fine - # but I can't tell which i is stopping it - if comm: # If we have a comm, globaly reduce with sum dIdx = comm.allreduce(dIdx_local, op=MPI.SUM) else: From 9d573b7cf43f54dd0fcd0bbfe342c9104f21417f Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 26 Oct 2023 16:38:03 -0400 Subject: [PATCH 052/110] cleanup --- pygeo/parameterization/DVGeo.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index bdbdd433..2bf1cf51 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2359,9 +2359,6 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdx_local = np.zeros((N, nDV), "d") for i in range(N): if self.JT[ptSetName] is not None: - x = dIdpt[i, :, :] - y = self.JT[ptSetName] - z = self.JT[ptSetName].dot(dIdpt[i, :, :].flatten()) dIdx_local[i, :] = self.JT[ptSetName].dot(dIdpt[i, :, :].flatten()) if comm: # If we have a comm, globaly reduce with sum From e11cda8c8dd4d1a5199ff8f69e14df3460bc7f70 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 26 Oct 2023 16:38:12 -0400 Subject: [PATCH 053/110] closer to working --- pygeo/parameterization/DVGeoMulti.py | 128 +++++++++++++-------------- 1 file changed, 63 insertions(+), 65 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index e65a86d4..5890fa76 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -873,12 +873,12 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ # Compute the total Jacobian for this point set as long as this isn't a fillet (no DVGeo control) - comp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! - if comp is None or not comp.isFillet: + ptSetComp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! + if ptSetComp is None or not ptSetComp.isFillet: self._computeTotalJacobian(ptSetName) # TODO in fillet case get curve ptsets jacobians - elif comp.isFillet: - self._computeTotalJacobian(comp.compAPtsName) - self._computeTotalJacobian(comp.compBPtsName) + # elif ptSetComp.isFillet: + # self._computeTotalJacobian(self.intersectComps[0].compA.curvePtsName) + # self._computeTotalJacobian(self.intersectComps[0].compB.curvePtsName) # Make dIdpt at least 3D if len(dIdpt.shape) == 2: @@ -901,14 +901,11 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we pass in dIdpt and the intersection object, along with pointset information # the intersection object adjusts the entries corresponding to projected points # and passes back dIdpt in place. - # print(f"before sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") - # np.savetxt("didpt1.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) - compSens = IC.project_b(ptSetName, dIdpt, comm, comp) - # print(f"after sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") - # np.savetxt("didpt2.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) + if ptSetComp.isFillet: # TODO fix for old + compSens = IC.project_b(ptSetName, dIdpt, comm, ptSetComp) - # append this to the dictionary list... - compSensList.append(compSens) + # append this to the dictionary list... + compSensList.append(compSens) # do the transpose multiplication @@ -932,7 +929,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdpt = dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3)) # jacobian for the pointset - if comp.isFillet: + if ptSetComp.isFillet: n = self.points[ptSetName].points.shape[0] jac = np.ones((n * 3, self.getNDV())) # TODO else: @@ -953,31 +950,36 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # use respective DVGeo's convert to dict functionality dIdxDict = OrderedDict() dvOffset = 0 - for comp in self.comps.values(): - if comp.isFillet: - nDVComp = IC.compA.DVGeo.getNDV() + IC.compB.DVGeo.getNDV() - else: - DVGeo = comp.DVGeo - nDVComp = DVGeo.getNDV() - # we only do this if this component has at least one DV - if nDVComp > 0: - # this part of the sensitivity matrix is owned by this dvgeo - dIdxComp = DVGeo.convertSensitivityToDict(dIdx[:, dvOffset : dvOffset + nDVComp]) + if not ptSetComp.isFillet: + for comp in self.comps.values(): + if not comp.isFillet: + DVGeo = comp.DVGeo + nDVComp = DVGeo.getNDV() - for k, v in dIdxComp.items(): - dIdxDict[k] = v + # we only do this if this component has at least one DV + if nDVComp > 0: + # this part of the sensitivity matrix is owned by this dvgeo + dIdxComp = DVGeo.convertSensitivityToDict(dIdx[:, dvOffset : dvOffset + nDVComp]) - # also increment the offset - dvOffset += nDVComp + for k, v in dIdxComp.items(): + dIdxDict[k] = v + + # also increment the offset + dvOffset += nDVComp - # finally, we can add the contributions from intersections - # TODO is this how the fillet contributions will get in? they aren't included in the DVGeo dIdxComp - for compSens in compSensList: - # loop over the items of compSens, which are guaranteed to be in dIdxDict - for k, v in compSens.items(): - # these will bring in effects from projections and intersection computations - dIdxDict[k] += v + # finally, we can add the contributions from intersections + # TODO is this how the fillet contributions will get in? they aren't included in the DVGeo dIdxComp + for compSens in compSensList: + # loop over the items of compSens, which are guaranteed to be in dIdxDict + for k, v in compSens.items(): + # these will bring in effects from projections and intersection computations + dIdxDict[k] += v + + else: + compSens = compSensList[0] + for key, val in compSens.items(): + dIdxDict[key] = val if self.debug: print(f"[{self.comm.rank}] finished DVGeo.totalSensitivity") @@ -1180,6 +1182,7 @@ def _computeTotalJacobian(self, ptSetName): if self.filletIntersection: comp = self.comps[self.points[ptSetName].comp] comp.DVGeo.computeTotalJacobian(ptSetName) + jac = comp.DVGeo.JT[ptSetName].T else: for name in self.compNames: @@ -3760,7 +3763,6 @@ def project(self, ptSetName, newPts): def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = self.points[ptSetName][0] - N = dIdpt.shape[0] compSens_local = {} compSensA = {} @@ -3771,7 +3773,11 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) allInd = deepcopy(comp.compAInterInd) allInd.extend(comp.compBInterInd) - dIdpt[comp.compAInterInd, :, :] = 0 # TODO indices + + for i in range(len(points)): + if i in (allInd): + for j in range(3): + dIdpt[i * 3 + j, i, j] = 0 else: print("no?") @@ -3781,8 +3787,8 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: return compSens - curvePtCoordsA = self.compA.curvePtsOrig - curvePtCoordsB = self.compB.curvePtsOrig + curvePtCoordsA = self.compA.curvePts + curvePtCoordsB = self.compB.curvePts # get the comm for this point set ptSetComm = self.points[ptSetName][3] @@ -3792,43 +3798,35 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # call the bwd warping routine # deltaA_b is the seed for the points projected to curves - curvePtCoords = np.vstack( - ( - curvePtCoordsA, - curvePtCoordsB, - ) - ) - print(f"before sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") - np.savetxt("didpt1.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) + curvePtCoords = np.vstack((curvePtCoordsA, curvePtCoordsB)) + + pts0 = points + # pts0 = deepcopy(self.filletComp.surfPtsOrig) + deltaBar = self._warpSurfPts_b( dIdpt, - points, + pts0, # TODO original points here? indices, # TODO could maybe just feed in all indices except boundaries in fillet case curvePtCoords, ) - print(f"after sum {np.sum(dIdpt)} min {np.min(dIdpt)} max {np.max(dIdpt)}") - np.savetxt("didpt2.txt", dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3))) # TODO reduce warping sensitivities - curveInd = len(comp.compAInterInd) - deltaBarCompA = deltaBar[:, :curveInd, :] - deltaBarCompB = deltaBar[:, curveInd:, :] - - # # reduce seeds for both - # if ptSetComm: - # delta_b = ptSetComm.allreduce(delta_b_local, op=MPI.SUM) - # # no comm, local is global - # else: - # delta_b = delta_b_local + curveInd = len(curvePtCoordsA) + deltaBarCompA_local = deepcopy(deltaBar[:, :curveInd, :]) + deltaBarCompB_local = deepcopy(deltaBar[:, curveInd:, :]) - # deltaBar = delta_b[:, :n, :] - - # for k in range(N): - # dIdpt[k, :, :] = deltaBar[k] + # reduce seeds for both + if ptSetComm: + deltaBarCompA = ptSetComm.allreduce(deltaBarCompA_local, op=MPI.SUM) + deltaBarCompB = ptSetComm.allreduce(deltaBarCompB_local, op=MPI.SUM) + # no comm, local is global + else: + deltaBarCompA = deltaBarCompA_local + deltaBarCompB = deltaBarCompB_local - compSensA = self.compA.DVGeo.totalSensitivity(deltaBarCompA, self.filletComp.compAPtsName) - compSensB = self.compB.DVGeo.totalSensitivity(deltaBarCompB, self.filletComp.compBPtsName) + compSensA = self.compA.DVGeo.totalSensitivity(deltaBarCompA, self.compA.curvePtsName) + compSensB = self.compB.DVGeo.totalSensitivity(deltaBarCompB, self.compB.curvePtsName) for k, v in compSensA.items(): compSens_local[k] = v From ceb4733c4533ea220ad93952b9e54e67e4db6d6e Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 27 Oct 2023 11:25:24 -0400 Subject: [PATCH 054/110] yikees --- pygeo/parameterization/DVGeoMulti.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 5890fa76..7c2e5412 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -80,10 +80,12 @@ def __init__( # Set real or complex Fortran API if isComplex: self.dtype = complex - self.adtAPI = adtAPI_cs.adtapi + if not filletIntersection: + self.adtAPI = adtAPI_cs.adtapi else: self.dtype = float - # self.adtAPI = adtAPI.adtapi + if not filletIntersection: + self.adtAPI = adtAPI.adtapi def addComponent( self, @@ -1326,9 +1328,9 @@ def __init__(self, compA, compB, distTol, DVGeo, project, dtype=float): self.projectFlag = project if dtype == float: - self.mpi_type = MPI.C_DOUBLE_COMPLEX - elif dtype == complex: self.mpi_type = MPI.DOUBLE + elif dtype == complex: + self.mpi_type = MPI.C_DOUBLE_COMPLEX def setSurface(self, comm): """This set the new updated surface on which we need to compute the new intersection curve""" From 336c19da430d4dc09ecafd7b83facfc1248098e1 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 27 Oct 2023 12:02:22 -0400 Subject: [PATCH 055/110] complex --- pygeo/parameterization/DVGeoMulti.py | 31 ++++++++++++++++------------ 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 7c2e5412..fefc0f79 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -491,8 +491,14 @@ def addPointSet( if familyName == "fillet": for IC in self.intersectComps: # find the points on the fillet that match each intersection - compAInterPts, compAInterInd = IC.findIntersection(points, IC.compA.curvePts) - compBInterPts, compBInterInd = IC.findIntersection(points, IC.compB.curvePts) + compAInterPts, compAInterInd = IC.findIntersection( + points.astype(float), IC.compA.curvePts.astype(float) + ) + compBInterPts, compBInterInd = IC.findIntersection( + points.astype(float), IC.compB.curvePts.astype(float) + ) + compAInterPts.dtype = self.dtype + compBInterPts.dtype = self.dtype # compAInterPtsLocal, compAInterIndLocal = IC.findIntersection(points, IC.compA.curvePts) # compBInterPtsLocal, compBInterIndLocal = IC.findIntersection(points, IC.compB.curvePts) @@ -877,10 +883,10 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # Compute the total Jacobian for this point set as long as this isn't a fillet (no DVGeo control) ptSetComp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if ptSetComp is None or not ptSetComp.isFillet: - self._computeTotalJacobian(ptSetName) # TODO in fillet case get curve ptsets jacobians - # elif ptSetComp.isFillet: - # self._computeTotalJacobian(self.intersectComps[0].compA.curvePtsName) - # self._computeTotalJacobian(self.intersectComps[0].compB.curvePtsName) + self._computeTotalJacobian(ptSetName) + elif ptSetComp.isFillet: + self._computeTotalJacobian(self.intersectComps[0].compA.curvePtsName) + self._computeTotalJacobian(self.intersectComps[0].compB.curvePtsName) # Make dIdpt at least 3D if len(dIdpt.shape) == 2: @@ -1146,12 +1152,12 @@ def _readDATFile(self, filename, surf=True): if surf: surfFile = open(filename, "r") nElem = int(surfFile.readline()) - surfPts = np.loadtxt(filename, skiprows=1, max_rows=nElem) + surfPts = np.loadtxt(filename, skiprows=1, max_rows=nElem, dtype=self.dtype) points = surfPts[surfPts[:, 0].argsort()] else: curves = [] for f in filename: - curvePts = np.loadtxt(f, skiprows=1) + curvePts = np.loadtxt(f, skiprows=1, dtype=self.dtype) curves.append(curvePts) points = curves[0] @@ -1189,7 +1195,6 @@ def _computeTotalJacobian(self, ptSetName): else: for name in self.compNames: comp = self.comps[name] - # fillet pointset needs points on boundary removed # number of design variables nDVComp = comp.DVGeo.getNDV() @@ -1208,7 +1213,7 @@ def _computeTotalJacobian(self, ptSetName): dvOffset += nDVComp # Convert to CSR format because this is better for arithmetic - jac = sparse.csr_matrix(jac) + # jac = sparse.csr_matrix(jac) # now we can save this jacobian in the pointset ptSet.jac = jac @@ -1376,7 +1381,7 @@ def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # seeds for delta - deltaBar = np.zeros((dIdPt.shape[0], curvePtCoords.shape[0], 3)) + deltaBar = np.zeros((dIdPt.shape[0], curvePtCoords.shape[0], 3), dtype=self.dtype) # Return zeros if curvePtCoords is empty if not np.any(curvePtCoords): @@ -3678,7 +3683,7 @@ def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] - minSurfCurveDist = -np.ones(nPtSurf) + minSurfCurveDist = -np.ones(nPtSurf, dtype=self.dtype) intersectPts = [] intersectInd = [] @@ -3698,7 +3703,7 @@ def findIntersection(self, surf, curve): # TODO fix this function intersectPts.append(surfPt) intersectInd.append(i) - intersectPts = np.asarray(intersectPts) + intersectPts = np.asarray(intersectPts, dtype=self.dtype) return intersectPts, intersectInd From f838f3fd408b6e8205b5006fd46ff2bf733f05dd Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 27 Oct 2023 14:00:56 -0400 Subject: [PATCH 056/110] fix jac addition --- pygeo/parameterization/DVGeoMulti.py | 35 +++++++++++----------------- 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index fefc0f79..1fe27193 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -938,22 +938,21 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # jacobian for the pointset if ptSetComp.isFillet: - n = self.points[ptSetName].points.shape[0] - jac = np.ones((n * 3, self.getNDV())) # TODO + pass else: jac = self.points[ptSetName].jac - # this is the mat-vec product for the remaining seeds. - # this only contains the effects of the FFD motion, - # projections and intersections are handled separately in compSens - dIdxT_local = jac.T.dot(dIdpt.T) - dIdx_local = dIdxT_local.T + # this is the mat-vec product for the remaining seeds. + # this only contains the effects of the FFD motion, + # projections and intersections are handled separately in compSens + dIdxT_local = jac.T.dot(dIdpt.T) + dIdx_local = dIdxT_local.T - # If we have a comm, globaly reduce with sum - if comm: - dIdx = comm.allreduce(dIdx_local, op=MPI.SUM) - else: - dIdx = dIdx_local + # If we have a comm, globaly reduce with sum + if comm: + dIdx = comm.allreduce(dIdx_local, op=MPI.SUM) + else: + dIdx = dIdx_local # use respective DVGeo's convert to dict functionality dIdxDict = OrderedDict() @@ -3777,7 +3776,6 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # don't accumulate derivatives for fillet points on intersections if comp.isFillet: - # intInd = np.vstack((comp.compAInterInd, comp.compBInterInd)) allInd = deepcopy(comp.compAInterInd) allInd.extend(comp.compBInterInd) @@ -3802,23 +3800,16 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): n = points.shape[0] indices = np.linspace(0, n - 1, n, dtype=int) - # call the bwd warping routine - # deltaA_b is the seed for the points projected to curves - curvePtCoords = np.vstack((curvePtCoordsA, curvePtCoordsB)) - pts0 = points - # pts0 = deepcopy(self.filletComp.surfPtsOrig) - + # call the bwd warping routine deltaBar = self._warpSurfPts_b( dIdpt, - pts0, # TODO original points here? + points, indices, # TODO could maybe just feed in all indices except boundaries in fillet case curvePtCoords, ) - # TODO reduce warping sensitivities - curveInd = len(curvePtCoordsA) deltaBarCompA_local = deepcopy(deltaBar[:, :curveInd, :]) deltaBarCompB_local = deepcopy(deltaBar[:, curveInd:, :]) From 6a8b2242085f28f2a8dce9783550ef39a3302441 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 27 Oct 2023 14:20:00 -0400 Subject: [PATCH 057/110] cleanup --- pygeo/parameterization/DVGeoMulti.py | 25 +++++++------------------ 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 1fe27193..3dfc0667 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -383,12 +383,10 @@ def addCurve(self, compName, filletName, curveFiles): # add the curve pointset to the intersection for IC in self.intersectComps: - # IC.nCurvePts[ptSetName] = {} IC.addPointSet(curvePts, ptSetName, [], self.comm) - # IC.nCurvePts[ptSetName][curveName] = curvePts.shape[0] - print(f"awrite {compName} curve from proc {self.comm.rank}") - np.savetxt(f"comp{compName}_{self.comm.rank}.txt", curvePts) + # print(f"awrite {compName} curve from proc {self.comm.rank}") + # np.savetxt(f"comp{compName}_{self.comm.rank}.txt", curvePts) def getDVGeoDict(self): """Return a dictionary of component DVGeo objects.""" @@ -976,7 +974,6 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dvOffset += nDVComp # finally, we can add the contributions from intersections - # TODO is this how the fillet contributions will get in? they aren't included in the DVGeo dIdxComp for compSens in compSensList: # loop over the items of compSens, which are guaranteed to be in dIdxDict for k, v in compSens.items(): @@ -3741,25 +3738,16 @@ def project(self, ptSetName, newPts): # update the pointset unless we haven't figured out the intersections yet if len(self.compA.curvePts) > 0: # TODO change to a first project flag or something - newCurveCoords = np.vstack( - ( - self.compA.curvePts, - self.compB.curvePts, - ) - ) - curvePtCoords = np.vstack( - ( - self.compA.curvePtsOrig, - self.compB.curvePtsOrig, - ) - ) + # get delta of curve points to drive warping + newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) + curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) delta = newCurveCoords - curvePtCoords ptsNew = deepcopy(self.filletComp.surfPtsOrig) pts0 = self.filletComp.surfPtsOrig + # warp interior fillet points self._warpSurfPts(pts0, ptsNew, self.indices, curvePtCoords, delta) - self.filletComp.surfPts = ptsNew # write curve coords from file to see which proc has which (all should have complete set) @@ -3810,6 +3798,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): curvePtCoords, ) + # split deltaBar into the contributions from each curve curveInd = len(curvePtCoordsA) deltaBarCompA_local = deepcopy(deltaBar[:, :curveInd, :]) deltaBarCompB_local = deepcopy(deltaBar[:, curveInd:, :]) From a610d0a3e18daec68afcd3496ec079761fc23336 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 30 Oct 2023 10:18:54 -0400 Subject: [PATCH 058/110] comments --- pygeo/parameterization/DVGeoMulti.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 3dfc0667..9f5be9fb 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -882,6 +882,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): ptSetComp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if ptSetComp is None or not ptSetComp.isFillet: self._computeTotalJacobian(ptSetName) + # if this is a fillet, get the jacobian of the border curves elif ptSetComp.isFillet: self._computeTotalJacobian(self.intersectComps[0].compA.curvePtsName) self._computeTotalJacobian(self.intersectComps[0].compB.curvePtsName) @@ -904,10 +905,10 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): if not self.filletIntersection: IC.seamBarProj[ptSetName] = np.zeros((N, IC.seam0.shape[0], IC.seam0.shape[1])) - # we pass in dIdpt and the intersection object, along with pointset information - # the intersection object adjusts the entries corresponding to projected points - # and passes back dIdpt in place. - if ptSetComp.isFillet: # TODO fix for old + # we pass in dIdpt and the intersection object, along with pointset information the intersection + # object adjusts the entries corresponding to projected points and passes back dIdpt in place. + # if this is a component that surrounds a fillet, we don't get warping derivatives + if ptSetComp.isFillet or self.filletIntersection: compSens = IC.project_b(ptSetName, dIdpt, comm, ptSetComp) # append this to the dictionary list... @@ -934,9 +935,10 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # reshape the dIdpt array from [N] * [nPt] * [3] to [N] * [nPt*3] dIdpt = dIdpt.reshape((dIdpt.shape[0], dIdpt.shape[1] * 3)) - # jacobian for the pointset + # fillet pointset has no jacobian from FFD motion if ptSetComp.isFillet: pass + # jacobian for the pointset else: jac = self.points[ptSetName].jac @@ -956,6 +958,8 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdxDict = OrderedDict() dvOffset = 0 + # convert dIdx from FFD motion into dIdxDict for pyOptSparse + # fillet has no dIdx from FFD motion if not ptSetComp.isFillet: for comp in self.comps.values(): if not comp.isFillet: @@ -980,6 +984,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # these will bring in effects from projections and intersection computations dIdxDict[k] += v + # add warping derivatives to dIdxDict for fillet else: compSens = compSensList[0] for key, val in compSens.items(): @@ -3757,12 +3762,13 @@ def project(self, ptSetName, newPts): def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = self.points[ptSetName][0] + n = points.shape[0] compSens_local = {} compSensA = {} compSensB = {} - # don't accumulate derivatives for fillet points on intersections + # don't accumulate derivatives for fillet points on intersections, set seeds to 0 if comp.isFillet: allInd = deepcopy(comp.compAInterInd) allInd.extend(comp.compBInterInd) @@ -3777,18 +3783,19 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): compSens = {} # skip calculating warping derivatives for curve points + # TODO these pointsets should never some here from totalSensitivity if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: return compSens + # get current curve points (full definition from pointwise) owned by each component curvePtCoordsA = self.compA.curvePts curvePtCoordsB = self.compB.curvePts + curvePtCoords = np.vstack((curvePtCoordsA, curvePtCoordsB)) # get the comm for this point set ptSetComm = self.points[ptSetName][3] - n = points.shape[0] indices = np.linspace(0, n - 1, n, dtype=int) - curvePtCoords = np.vstack((curvePtCoordsA, curvePtCoordsB)) # call the bwd warping routine deltaBar = self._warpSurfPts_b( @@ -3812,9 +3819,11 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): deltaBarCompA = deltaBarCompA_local deltaBarCompB = deltaBarCompB_local + # run each curve through totalSensitivity on their respective DVGeo compSensA = self.compA.DVGeo.totalSensitivity(deltaBarCompA, self.compA.curvePtsName) compSensB = self.compB.DVGeo.totalSensitivity(deltaBarCompB, self.compB.curvePtsName) + # add up the compSens from each curve for k, v in compSensA.items(): compSens_local[k] = v From b90acdd528f1b45f60e4d2759d6cc3bf05b43041 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 1 Nov 2023 16:01:16 -0400 Subject: [PATCH 059/110] detail in warning --- pygeo/pyBlock.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pygeo/pyBlock.py b/pygeo/pyBlock.py index 19482ac0..83f3f647 100644 --- a/pygeo/pyBlock.py +++ b/pygeo/pyBlock.py @@ -59,6 +59,7 @@ def __init__(self, initType, fileName=None, FFD=False, symmPlane=None, kmax=4, * self.coef = None # The global (reduced) set of control pts self.embeddedVolumes = {} self.symmPlane = symmPlane + self.filename = fileName if initType == "plot3d": self._readPlot3D(fileName, FFD=FFD, kmax=kmax, **kwargs) @@ -931,10 +932,9 @@ def projectPoints(self, x0, interiorOnly, embTol, eps, nIter): # Check to see if we have bad projections and print a warning: if counter > 0: - print( - " -> Warning: %d point(s) not projected to tolerance: %g. " % (counter, eps) - + "Max Error: %12.6g ; RMS Error: %12.6g" % (DMax, DRms) - ) + print(self.filename) + print(f" -> Warning: {counter} point(s) from {self.filename} not projected to tolerance {eps}.") + print(f"Max Error: {DMax:.6g} ; RMS Error: {DRms:.6g}") print("List of Points is: (pt, delta):") for i in range(len(badPts)): print( From b6ade728d0601a8d2747c9342a33468a6ad60d7a Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 1 Nov 2023 16:01:36 -0400 Subject: [PATCH 060/110] another copy --- pygeo/parameterization/DVGeoMulti.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 9f5be9fb..050f0df1 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -839,7 +839,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ This function computes sensitivity information. - Specificly, it computes the following: + Specifically, it computes the following: :math:`\\frac{dX_{pt}}{dX_{DV}}^T \\frac{dI}{d_{pt}}` Parameters @@ -882,10 +882,6 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): ptSetComp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if ptSetComp is None or not ptSetComp.isFillet: self._computeTotalJacobian(ptSetName) - # if this is a fillet, get the jacobian of the border curves - elif ptSetComp.isFillet: - self._computeTotalJacobian(self.intersectComps[0].compA.curvePtsName) - self._computeTotalJacobian(self.intersectComps[0].compB.curvePtsName) # Make dIdpt at least 3D if len(dIdpt.shape) == 2: @@ -908,10 +904,10 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # we pass in dIdpt and the intersection object, along with pointset information the intersection # object adjusts the entries corresponding to projected points and passes back dIdpt in place. # if this is a component that surrounds a fillet, we don't get warping derivatives - if ptSetComp.isFillet or self.filletIntersection: + if ptSetComp.isFillet or not self.filletIntersection: compSens = IC.project_b(ptSetName, dIdpt, comm, ptSetComp) - # append this to the dictionary list... + # append this to the dictionary list compSensList.append(compSens) # do the transpose multiplication @@ -938,6 +934,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # fillet pointset has no jacobian from FFD motion if ptSetComp.isFillet: pass + # jacobian for the pointset else: jac = self.points[ptSetName].jac @@ -948,7 +945,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdxT_local = jac.T.dot(dIdpt.T) dIdx_local = dIdxT_local.T - # If we have a comm, globaly reduce with sum + # If we have a comm, globally reduce with sum if comm: dIdx = comm.allreduce(dIdx_local, op=MPI.SUM) else: @@ -1135,7 +1132,7 @@ def _readCGNSFile(self, filename): print(f"The {filename} mesh has {len(nodes)} nodes and {len(triConnStack)} elements.") else: - # create these to recieve the data + # create these to receive the data nodes = None triConn = None triConnStack = None @@ -1266,8 +1263,8 @@ def updateSurfPts(self): if self.isFillet: print("no") else: - self.surfPts = self.DVGeo.update(self.surfPtsName) - self.curvePts = self.DVGeo.update(self.curvePtsName) + self.surfPts = self.DVGeo.update(self.surfPtsName).copy() + self.curvePts = self.DVGeo.update(self.curvePtsName).copy() def writeSurf(self, fileName): fileName = f"{fileName}_{self.name}_surf.dat" @@ -3761,9 +3758,10 @@ def project(self, ptSetName, newPts): # np.savetxt(f"compBCurve{self.DVGeo.comm.rank}.txt", self.compB.curvePts) def project_b(self, ptSetName, dIdpt, comm=None, comp=None): - points = self.points[ptSetName][0] + points = deepcopy(self.filletComp.surfPtsOrig) n = points.shape[0] + # Initialize dictionaries to accumulate warping sensitivities compSens_local = {} compSensA = {} compSensB = {} @@ -3788,8 +3786,8 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): return compSens # get current curve points (full definition from pointwise) owned by each component - curvePtCoordsA = self.compA.curvePts - curvePtCoordsB = self.compB.curvePts + curvePtCoordsA = self.compA.curvePtsOrig + curvePtCoordsB = self.compB.curvePtsOrig curvePtCoords = np.vstack((curvePtCoordsA, curvePtCoordsB)) # get the comm for this point set From 0e9273554b571f0ac4ba319f851204169d4e52e4 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 3 Nov 2023 14:50:23 -0400 Subject: [PATCH 061/110] make addCurve flexible --- pygeo/parameterization/DVGeoMulti.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 050f0df1..4b164bf9 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -360,11 +360,16 @@ def addIntersection( self.intersectComps.append(inter) - def addCurve(self, compName, filletName, curveFiles): + def addCurve(self, compName, filletName, curveFiles=None, curvePtsArray=None): if not self.filletIntersection: print("no") # TODO real error - curvePts = self._readDATFile(curveFiles, surf=False) + if curveFiles is not None: + curvePts = self._readDATFile(curveFiles, surf=False) + elif curvePtsArray is not None: + curvePts = curvePtsArray + else: + print("no") # figure out which component and fillet we're dealing with comp = self.comps[compName] From 46e1320de30c02c551af950947b44db391ee6ef7 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 3 Nov 2023 16:29:11 -0400 Subject: [PATCH 062/110] allow curve to be 1 pt --- pygeo/parameterization/DVGeoMulti.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 4b164bf9..f1989f9d 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -367,7 +367,7 @@ def addCurve(self, compName, filletName, curveFiles=None, curvePtsArray=None): if curveFiles is not None: curvePts = self._readDATFile(curveFiles, surf=False) elif curvePtsArray is not None: - curvePts = curvePtsArray + curvePts = np.atleast_2d(curvePtsArray) else: print("no") @@ -3695,7 +3695,7 @@ def findIntersection(self, surf, curve): # TODO fix this function surfPt = surf[i] # calculate distances between this surface point and the whole curve - ptSurfCurveDist = cdist(surfPt.reshape(1, 3), curve) + ptSurfCurveDist = cdist(surfPt.reshape(1, 3), np.atleast_2d(curve)) # find minimum of these distances and save it dist2ClosestPt = min(ptSurfCurveDist[0]) @@ -3809,7 +3809,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): ) # split deltaBar into the contributions from each curve - curveInd = len(curvePtCoordsA) + curveInd = curvePtCoordsA.shape[0] deltaBarCompA_local = deepcopy(deltaBar[:, :curveInd, :]) deltaBarCompB_local = deepcopy(deltaBar[:, curveInd:, :]) @@ -3851,3 +3851,4 @@ def _getIntersectionSeam(self, comm): def _getUpdatedCoords(self): self.compA.updateSurfPts() self.compB.updateSurfPts() + self.DVGeo.update("fillet_surf_points") # TODO might not be necessary From f7ca3570ffae76a62f1d922b51890ece477e2767 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 6 Nov 2023 14:28:26 -0500 Subject: [PATCH 063/110] works with tiny test --- pygeo/parameterization/DVGeoMulti.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index f1989f9d..980a1e26 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -868,7 +868,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): Define what configurations this design variable will be applied to Use a string for a single configuration or a list for multiple configurations. The default value of None implies that the design - variable appies to *ALL* configurations. + variable applies to *ALL* configurations. Returns @@ -1096,6 +1096,7 @@ def writePointSet(self, name, fileName, solutionTime=None): make visualization easier in tecplot. """ + print(f"write {name}") coords = self.update(name) fileName = fileName + "_%s.dat" % name f = openTecplot(fileName, 3) @@ -1254,7 +1255,6 @@ def __init__(self, name, isFillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName self.isFillet = isFillet self.DVGeo = DVGeo self.surfPts = surfPts - self.surfPtsOrig = deepcopy(surfPts) self.xMin = xMin self.xMax = xMax self.comm = comm @@ -3750,7 +3750,7 @@ def project(self, ptSetName, newPts): curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) delta = newCurveCoords - curvePtCoords - ptsNew = deepcopy(self.filletComp.surfPtsOrig) + ptsNew = self.filletComp.surfPtsOrig.copy() pts0 = self.filletComp.surfPtsOrig # warp interior fillet points From 018d63e29c801369c23b36b4ffb7973e19c5172b Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 6 Nov 2023 14:30:13 -0500 Subject: [PATCH 064/110] removing for debug --- pygeo/parameterization/DVGeo.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 2bf1cf51..8091909a 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -1959,6 +1959,7 @@ def update(self, ptSetName, childDelta=True, config=None): variable applies to *ALL* configurations. """ + self.curPtSet = ptSetName # We've postponed things as long as we can...do the finalization. self._finalize() @@ -2897,7 +2898,7 @@ def addVariablesPyOpt( optProb, globalVars, localVars, sectionlocalVars, spanwiselocalVars, ignoreVars, freezeVars ) - def writeTecplot(self, fileName, solutionTime=None): + def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True): """Write the (deformed) current state of the FFDs to a tecplot file, including the children @@ -2908,6 +2909,9 @@ def writeTecplot(self, fileName, solutionTime=None): SolutionTime : float Solution time to write to the file. This could be a fictitious time to make visualization easier in tecplot. + writeEmbeding : bool + Whether to write the embedding volume in the file. + True by default for visualization but can be turned off for a leaner file. """ # Name here doesn't matter, just take the first one @@ -2919,7 +2923,7 @@ def writeTecplot(self, fileName, solutionTime=None): vol_counter = 0 # Write master volumes: - vol_counter += self._writeVols(f, vol_counter, solutionTime) + vol_counter += self._writeVols(f, vol_counter, solutionTime, writeEmbedding) closeTecplot(f) if len(self.points) > 0: @@ -4426,11 +4430,14 @@ def _cascadedDVJacobian(self, config=None): return Jacobian - def _writeVols(self, handle, vol_counter, solutionTime): + def _writeVols(self, handle, vol_counter, solutionTime, writeEmbedding): for i in range(len(self.FFD.vols)): writeTecplot3D(handle, "FFD_vol%d" % i, self.FFD.vols[i].coef, solutionTime) self.FFD.vols[i].computeData(recompute=True) - writeTecplot3D(handle, "embedding_vol", self.FFD.vols[i].data, solutionTime) + + if writeEmbedding: + writeTecplot3D(handle, "embedding_vol", self.FFD.vols[i].data, solutionTime) + vol_counter += 1 # Write children volumes: From bd55fa8e3c04b1d1042abecc22529eb0c96e204d Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 6 Nov 2023 14:59:04 -0500 Subject: [PATCH 065/110] narrator voice: it was very, very necessary --- pygeo/parameterization/DVGeoMulti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 980a1e26..6e127fe7 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3851,4 +3851,4 @@ def _getIntersectionSeam(self, comm): def _getUpdatedCoords(self): self.compA.updateSurfPts() self.compB.updateSurfPts() - self.DVGeo.update("fillet_surf_points") # TODO might not be necessary + self.DVGeo.update("fillet_surf_points") From 71321e06ed557c7d8850f7046c22c8205afa39c5 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 6 Nov 2023 16:01:09 -0500 Subject: [PATCH 066/110] handle pointsets unaffected by a pointset --- pygeo/parameterization/DVGeoMulti.py | 47 +++++++++++++++++++++------- 1 file changed, 35 insertions(+), 12 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 6e127fe7..81359b8b 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -961,24 +961,38 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dvOffset = 0 # convert dIdx from FFD motion into dIdxDict for pyOptSparse + # non-fillet intersections can have pointsets that span multiple DVGeos # fillet has no dIdx from FFD motion - if not ptSetComp.isFillet: + if not self.filletIntersection: for comp in self.comps.values(): - if not comp.isFillet: - DVGeo = comp.DVGeo - nDVComp = DVGeo.getNDV() + DVGeo = comp.DVGeo + nDVComp = DVGeo.getNDV() + + # we only do this if this component has at least one DV + if nDVComp > 0: + # this part of the sensitivity matrix is owned by this dvgeo + dIdxComp = DVGeo.convertSensitivityToDict(dIdx[:, dvOffset : dvOffset + nDVComp]) + + for k, v in dIdxComp.items(): + dIdxDict[k] = v - # we only do this if this component has at least one DV - if nDVComp > 0: - # this part of the sensitivity matrix is owned by this dvgeo - dIdxComp = DVGeo.convertSensitivityToDict(dIdx[:, dvOffset : dvOffset + nDVComp]) + # also increment the offset + dvOffset += nDVComp - for k, v in dIdxComp.items(): - dIdxDict[k] = v + # pointsets in fillet intersection are only tied to one component + elif not ptSetComp.isFillet: + DVGeo = ptSetComp.DVGeo + nDVComp = DVGeo.getNDV() - # also increment the offset - dvOffset += nDVComp + # we only do this if this component has at least one DV + if nDVComp > 0: + dIdxComp = DVGeo.convertSensitivityToDict(dIdx[:, dvOffset : dvOffset + nDVComp]) + for k, v in dIdxComp.items(): + dIdxDict[k] = v + + # accumulate dIdxDict if we have derivatives from FFD + if not ptSetComp.isFillet: # finally, we can add the contributions from intersections for compSens in compSensList: # loop over the items of compSens, which are guaranteed to be in dIdxDict @@ -992,6 +1006,15 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): for key, val in compSens.items(): dIdxDict[key] = val + # fillet intersections don't have multiple DVGeos contributing to one pointset + # manually add zeros to that entry + if len(dIdxDict) < self.getNDV(): + dvNames = self.getVarNames() + + for dv in dvNames: + if dv not in dIdxDict.keys(): + dIdxDict[dv] = np.zeros((N, 1)) + if self.debug: print(f"[{self.comm.rank}] finished DVGeo.totalSensitivity") From 0008e06eefe5f959669e3c698e26433f7bb32395 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 7 Nov 2023 11:20:23 -0500 Subject: [PATCH 067/110] stop hard coding pointset name --- pygeo/parameterization/DVGeoMulti.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 81359b8b..2cbc1ce6 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1007,7 +1007,7 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdxDict[key] = val # fillet intersections don't have multiple DVGeos contributing to one pointset - # manually add zeros to that entry + # manually add zeros to that entry if len(dIdxDict) < self.getNDV(): dvNames = self.getVarNames() @@ -3874,4 +3874,4 @@ def _getIntersectionSeam(self, comm): def _getUpdatedCoords(self): self.compA.updateSurfPts() self.compB.updateSurfPts() - self.DVGeo.update("fillet_surf_points") + self.DVGeo.update(self.filletComp.surfPtsName) From 4526057f01c5a41b94ea5b3dcd1c650923bfeb22 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 10 Nov 2023 12:10:07 -0500 Subject: [PATCH 068/110] fix handling of derivatives for DVs on other DVGeos --- pygeo/parameterization/DVGeoMulti.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 2cbc1ce6..2c182c36 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -840,6 +840,26 @@ def getVarNames(self, pyOptSparse=False): return dvNames + def getVarVals(self): + dvVals = OrderedDict() + + for comp in self.compNames: + DVGeo = self.comps[comp].DVGeo + if DVGeo is not None: + names = DVGeo.getVarNames() + for dv in names: + if dv in DVGeo.DV_listGlobal: + val = DVGeo.DV_listGlobal[dv].nVal + elif dv in DVGeo.DV_listSpanwiseLocal: + val = DVGeo.DV_listSpanwiseLocal[dv].nVal + elif dv in DVGeo.DV_listSectionLocal: + val = DVGeo.DV_listSectionLocal[dv].nVal + else: + val = DVGeo.DV_listLocal[dv].nVal + dvVals[dv] = val + + return dvVals + def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ This function computes sensitivity information. @@ -1009,11 +1029,11 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # fillet intersections don't have multiple DVGeos contributing to one pointset # manually add zeros to that entry if len(dIdxDict) < self.getNDV(): - dvNames = self.getVarNames() + dvVals = self.getVarVals() - for dv in dvNames: + for dv, nVals in dvVals.items(): if dv not in dIdxDict.keys(): - dIdxDict[dv] = np.zeros((N, 1)) + dIdxDict[dv] = np.zeros((N, nVals)) if self.debug: print(f"[{self.comm.rank}] finished DVGeo.totalSensitivity") @@ -1119,7 +1139,6 @@ def writePointSet(self, name, fileName, solutionTime=None): make visualization easier in tecplot. """ - print(f"write {name}") coords = self.update(name) fileName = fileName + "_%s.dat" % name f = openTecplot(fileName, 3) From d27b90fab9ade853f65bab3f7bf7cc265aad217e Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 14 Nov 2023 14:19:03 -0500 Subject: [PATCH 069/110] parallelism or something --- pygeo/parameterization/DVGeoMulti.py | 34 ++++++++++++++-------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 2c182c36..91967df7 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -494,36 +494,36 @@ def addPointSet( if familyName == "fillet": for IC in self.intersectComps: # find the points on the fillet that match each intersection - compAInterPts, compAInterInd = IC.findIntersection( - points.astype(float), IC.compA.curvePts.astype(float) + # compAInterPts, compAInterInd = IC.findIntersection( + # points.astype(float), IC.compA.curvePts.astype(float) + # ) + # compBInterPts, compBInterInd = IC.findIntersection( + # points.astype(float), IC.compB.curvePts.astype(float) + # ) + compAInterPtsLocal, compAInterIndLocal = IC.findIntersection(points, IC.compA.curvePts) + compBInterPtsLocal, compBInterIndLocal = IC.findIntersection(points, IC.compB.curvePts) + + # print(f"\nrank {self.comm.rank} local compAInterInd {compAInterIndLocal}") + compAInterNPts, compAInterSizes, compAInterPts, compAInterInd = IC._commCurveProj( + compAInterPtsLocal, compAInterIndLocal, self.comm ) - compBInterPts, compBInterInd = IC.findIntersection( - points.astype(float), IC.compB.curvePts.astype(float) + compBInterNPts, compBInterSizes, compBInterPts, compBInterInd = IC._commCurveProj( + compBInterPtsLocal, compBInterIndLocal, self.comm ) + compAInterPts.dtype = self.dtype compBInterPts.dtype = self.dtype - # compAInterPtsLocal, compAInterIndLocal = IC.findIntersection(points, IC.compA.curvePts) - # compBInterPtsLocal, compBInterIndLocal = IC.findIntersection(points, IC.compB.curvePts) - - # print(f"\nrank {self.comm.rank} local compAInterInd {compAInterIndLocal}") - # compAInterNPts, compAInterSizes, compAInterPts, compAInterInd = IC._commCurveProj( - # compAInterPtsLocal, compAInterIndLocal, self.comm - # ) - # compBInterNPts, compBInterSizes, compBInterPts, compBInterInd = IC._commCurveProj( - # compBInterPtsLocal, compBInterIndLocal, self.comm - # ) # print(f"\nrank {self.comm.rank} local compBInterInd {compBInterIndLocal}") # print(f"\nrank {self.comm.rank} total compBInterInd {compAInterInd}") # print(f"\nrank {self.comm.rank} total compBInterInd {compBInterPts}") - # exit() # add those intersection points to each DVGeo so they get deformed with the FFD compAPtsName = f"{IC.compA.name}_fillet_intersection" compBPtsName = f"{IC.compB.name}_fillet_intersection" - # print(f"\nrank {self.comm.rank} compAInterPts {compAInterPts}") - # print(f"\nrank {self.comm.rank} compBInterPts {compAInterPts}") + print(f"\nrank {self.comm.rank} compAInterPts {compAInterPts}") + print(f"\nrank {self.comm.rank} compBInterPts {compAInterPts}") IC.compA.DVGeo.addPointSet(compAInterPts, compAPtsName) IC.compB.DVGeo.addPointSet(compBInterPts, compBPtsName) From 708c4d7c7a6359c410c949eb00a9ce87ef3fb1fa Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 15 Nov 2023 09:44:28 -0500 Subject: [PATCH 070/110] fix zeroing of seeds on intersection --- pygeo/parameterization/DVGeoMulti.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 91967df7..8a25169c 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -522,8 +522,8 @@ def addPointSet( compAPtsName = f"{IC.compA.name}_fillet_intersection" compBPtsName = f"{IC.compB.name}_fillet_intersection" - print(f"\nrank {self.comm.rank} compAInterPts {compAInterPts}") - print(f"\nrank {self.comm.rank} compBInterPts {compAInterPts}") + # print(f"\nrank {self.comm.rank} compAInterPts {compAInterPts}") + # print(f"\nrank {self.comm.rank} compBInterPts {compAInterPts}") IC.compA.DVGeo.addPointSet(compAInterPts, compAPtsName) IC.compB.DVGeo.addPointSet(compBInterPts, compBPtsName) @@ -3806,7 +3806,7 @@ def project(self, ptSetName, newPts): def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = deepcopy(self.filletComp.surfPtsOrig) - n = points.shape[0] + n = points.shape[1] # Initialize dictionaries to accumulate warping sensitivities compSens_local = {} @@ -3818,10 +3818,10 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): allInd = deepcopy(comp.compAInterInd) allInd.extend(comp.compBInterInd) - for i in range(len(points)): - if i in (allInd): - for j in range(3): - dIdpt[i * 3 + j, i, j] = 0 + for i in range(dIdpt.shape[0]): + for j in range(dIdpt.shape[1]): + if j in (allInd): + dIdpt[i, j, :] = 0 else: print("no?") From b77e053c3d72a94e1361a0e7bfd257375609e54d Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 16 Nov 2023 11:09:38 -0500 Subject: [PATCH 071/110] finish passing options --- pygeo/parameterization/DVGeo.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 8091909a..07f14575 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2607,6 +2607,9 @@ def computeTotalJacobian(self, ptSetName, config=None): """Return the total point jacobian in CSR format since we need this for TACS""" + if MPI.COMM_WORLD.rank == 0: + print(f"computeTotalJacobian for {ptSetName}") + # Finalize the object, if not done yet self._finalize() self.curPtSet = ptSetName @@ -4442,7 +4445,7 @@ def _writeVols(self, handle, vol_counter, solutionTime, writeEmbedding): # Write children volumes: for child in self.children.values(): - vol_counter += child._writeVols(handle, vol_counter, solutionTime) + vol_counter += child._writeVols(handle, vol_counter, solutionTime, writeEmbedding) return vol_counter From 025ccae80d1eeb0267843ec94a441ce8242bad68 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 16 Nov 2023 11:09:45 -0500 Subject: [PATCH 072/110] debug statements --- pygeo/parameterization/DVGeoMulti.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 8a25169c..294912ab 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1233,6 +1233,8 @@ def _computeTotalJacobian(self, ptSetName): dvOffset = 0 # we need to call computeTotalJacobian from all comps and get the jacobians for this pointset + if self.comm.rank == 0: + print(f"_computeTotalJacobian for {ptSetName}") if self.filletIntersection: comp = self.comps[self.points[ptSetName].comp] comp.DVGeo.computeTotalJacobian(ptSetName) From 4d951acd064cfbb569382908566985d49e98849e Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 17 Nov 2023 11:08:36 -0500 Subject: [PATCH 073/110] fixes failure in parallel --- pygeo/parameterization/DVGeoMulti.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 294912ab..4886ae9a 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1233,12 +1233,16 @@ def _computeTotalJacobian(self, ptSetName): dvOffset = 0 # we need to call computeTotalJacobian from all comps and get the jacobians for this pointset - if self.comm.rank == 0: - print(f"_computeTotalJacobian for {ptSetName}") + # if self.comm.rank == 0: + # print(f"_computeTotalJacobian for {ptSetName} with comm {self.points[ptSetName].comm}") if self.filletIntersection: comp = self.comps[self.points[ptSetName].comp] comp.DVGeo.computeTotalJacobian(ptSetName) - jac = comp.DVGeo.JT[ptSetName].T + # print(f"\nrank {self.comm.rank} jac for pointset {ptSetName}: {comp.DVGeo.JT[ptSetName]}") + + # if the pointset isn't on this proc we won't have a jac + if comp.DVGeo.JT[ptSetName] is not None: + jac = comp.DVGeo.JT[ptSetName].T else: for name in self.compNames: From 17ac04fcf8a604494727596ba841a41f2b30c1bd Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 20 Nov 2023 14:52:38 -0500 Subject: [PATCH 074/110] parallel communication --- pygeo/parameterization/DVGeoMulti.py | 51 ++++++++++++++++++++-------- 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 4886ae9a..491fdeb9 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -479,8 +479,10 @@ def addPointSet( # create the pointset class if self.filletIntersection: comp = compNames[0] + comm = self.comm else: comp = None + self.points[ptName] = PointSet(points, comm=comm, comp=comp) for comp in self.compNames: @@ -1437,7 +1439,7 @@ def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # Return zeros if curvePtCoords is empty if not np.any(curvePtCoords): return deltaBar - + for k in range(dIdPt.shape[0]): for j in indices: # point coordinates with the baseline design @@ -3812,7 +3814,9 @@ def project(self, ptSetName, newPts): def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = deepcopy(self.filletComp.surfPtsOrig) - n = points.shape[1] + + # number of functions we have + N = dIdpt.shape[0] # Initialize dictionaries to accumulate warping sensitivities compSens_local = {} @@ -3846,20 +3850,36 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # get the comm for this point set ptSetComm = self.points[ptSetName][3] - indices = np.linspace(0, n - 1, n, dtype=int) - + if ptSetComm: + nCurvePtCoordsA = ptSetComm.allreduce(len(curvePtCoordsA), op=MPI.MAX) + nCurvePtCoordsB = ptSetComm.allreduce(len(curvePtCoordsB), op=MPI.MAX) + else: + nCurvePtCoordsA = len(curvePtCoordsA) + nCurvePtCoordsB = len(curvePtCoordsB) + # call the bwd warping routine - deltaBar = self._warpSurfPts_b( - dIdpt, - points, - indices, # TODO could maybe just feed in all indices except boundaries in fillet case - curvePtCoords, - ) + deltaBar = [] + if len(points) > 0: + print(f"go to _warpSurfPts_b for {ptSetName} comm {self.DVGeo.comm.rank} with comm {comm}") + + deltaBar = self._warpSurfPts_b( + dIdpt, + points, + self.indices, # TODO could maybe just feed in all indices except boundaries in fillet case + curvePtCoords, + ) + + # split deltaBar into the contributions from each curve + curveInd = curvePtCoordsA.shape[0] + deltaBarCompA_local = deepcopy(deltaBar[:, :curveInd, :]) + deltaBarCompB_local = deepcopy(deltaBar[:, curveInd:, :]) - # split deltaBar into the contributions from each curve - curveInd = curvePtCoordsA.shape[0] - deltaBarCompA_local = deepcopy(deltaBar[:, :curveInd, :]) - deltaBarCompB_local = deepcopy(deltaBar[:, curveInd:, :]) + else: + deltaBarCompA_local = np.zeros((N, nCurvePtCoordsA, 3)) + deltaBarCompB_local = np.zeros((N, nCurvePtCoordsB, 3)) + # deltaBarCompA_local = np.zeros((0,0,0)) + # deltaBarCompB_local = np.zeros((0,0,0)) + print(f"skip warpsurf for {ptSetName} comm {self.DVGeo.comm.rank}") # reduce seeds for both if ptSetComm: @@ -3891,6 +3911,9 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # we can just pass the dictionary compSens = compSens_local + if comm.rank == 0: + print(f"after project_b compSens {compSens}") + return compSens def _getIntersectionSeam(self, comm): From c3710975075119c83c1650d450418d9880b53a64 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 20 Nov 2023 19:34:23 -0500 Subject: [PATCH 075/110] galen stop finding bugs --- pygeo/parameterization/DVGeoMulti.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 4886ae9a..3ad1093c 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -360,7 +360,7 @@ def addIntersection( self.intersectComps.append(inter) - def addCurve(self, compName, filletName, curveFiles=None, curvePtsArray=None): + def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None): if not self.filletIntersection: print("no") # TODO real error @@ -381,7 +381,7 @@ def addCurve(self, compName, filletName, curveFiles=None, curvePtsArray=None): comp.curvePtsOrig = deepcopy(curvePts) # add the curve pointset to the component's DVGeo - comp.DVGeo.addPointSet(curvePts, ptSetName, self.comm) # TODO is comm right here + comp.DVGeo.addPointSet(curvePts, ptSetName, origConfig=origConfig, coordXfer=coordXfer) # TODO is comm right here # add the curve pointset to DVGeoMulti self.points[ptSetName] = PointSet(curvePts, comm=self.comm, comp=compName) From 811e2b82d591b147efa5ed02f8d8babc6b90027a Mon Sep 17 00:00:00 2001 From: galen ng Date: Tue, 21 Nov 2023 10:56:03 -0500 Subject: [PATCH 076/110] coordXfer changes to DVGeomulti that do not work --- pygeo/parameterization/DVGeoMulti.py | 29 +++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 491fdeb9..79b623f5 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -77,6 +77,9 @@ def __init__( self.debug = debug self.complex = isComplex + # dictionary to save any coordinate transformations we are given + self.coordXfer = {} + # Set real or complex Fortran API if isComplex: self.dtype = complex @@ -405,6 +408,7 @@ def addPointSet( compNames=None, comm=None, applyIC=True, + coordXfer=None, **kwargs, ): """ @@ -420,6 +424,8 @@ def addPointSet( A user supplied name to associate with the set of coordinates. This name will need to be provided when updating the coordinates or when getting the derivatives of the coordinates. + coordXfer : function + see DVGeo addPointSet() documentation compNames : list, optional A list of component names that this point set should be added to. To ease bookkeeping, an empty point set with ptName will be added to components not in this list. @@ -657,7 +663,12 @@ def addPointSet( self.comps[comp].surfPtsOrig = deepcopy(points) if comp != "fillet": - self.comps[comp].DVGeo.addPointSet(points, ptName, **kwargs) + if self.comm.rank ==0: + print(f"adding {ptName} to {comp}") + # The addPointSet call should pass coordXfer all the way through + self.comps[comp].DVGeo.addPointSet(points, ptName, + coordXfer=coordXfer, + **kwargs) # check if this pointset will get the IC treatment if applyIC: @@ -788,6 +799,12 @@ def update(self, ptSetName, config=None): # set the pointset up to date self.updated[ptSetName] = True + # apply coord transformation on newPts + if ptSetName in self.coordXfer: + if self.comm.rank == 0: + print(f"running {ptSetName} through coordXfer") + newPts = self.coordXfer[ptSetName](newPts, mode="fwd", applyDisplacement=True) + self.points[ptSetName].points = newPts return newPts @@ -905,6 +922,16 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ + # apply coord transformation on dIdpt + if ptSetName in self.coordXfer: + if self.comm.rank == 0: + print("applying coordXfer to dIdpt") + # loop over functions + for ifunc in range(N): + # its important to remember that dIdpt are vector-like values, + # so we don't apply the transformations and only the rotations! + dIdpt[ifunc] = self.coordXfer[ptSetName](dIdpt[ifunc], mode="bwd", applyDisplacement=False) + # Compute the total Jacobian for this point set as long as this isn't a fillet (no DVGeo control) ptSetComp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if ptSetComp is None or not ptSetComp.isFillet: From 3a7ce00cbcfd8cad91b153c748849133b639d882 Mon Sep 17 00:00:00 2001 From: galen ng Date: Sun, 26 Nov 2023 13:31:28 -0500 Subject: [PATCH 077/110] coordxfer bug --- pygeo/parameterization/DVGeoMulti.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index b6dfa56f..ee738b19 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -364,6 +364,9 @@ def addIntersection( self.intersectComps.append(inter) def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None): + """ + Chances are you do not need coordXfer and origConfig passed through here. + """ if not self.filletIntersection: print("no") # TODO real error @@ -424,8 +427,6 @@ def addPointSet( A user supplied name to associate with the set of coordinates. This name will need to be provided when updating the coordinates or when getting the derivatives of the coordinates. - coordXfer : function - see DVGeo addPointSet() documentation compNames : list, optional A list of component names that this point set should be added to. To ease bookkeeping, an empty point set with ptName will be added to components not in this list. @@ -437,9 +438,24 @@ def addPointSet( applyIC : bool, optional Flag to specify whether this point set will follow the updated intersection curve(s). This is typically only needed for the CFD surface mesh. + coordXfer : function, optional + See DVGeo docs """ + # Do the very first coordXfer if it exists + # We do not need to pass a coordXfer callback all the way through + # because it already exists in the DVGeo level + if coordXfer is not None: + self.coordXfer[ptName] = coordXfer + # print(f"running {ptName} through coordXfer") + # points = self.coordXfer[ptName](points, mode="fwd", applyDisplacement=True) + print("running coordXfer and saving it") + points = coordXfer(points, mode="fwd", applyDisplacement=True) + else: + print(f"no coordXfer for {ptName}") + # Find out what the **kwargs are here + # if compList is not provided, we use all components if compNames is None: compNames = self.compNames @@ -663,12 +679,7 @@ def addPointSet( self.comps[comp].surfPtsOrig = deepcopy(points) if comp != "fillet": - if self.comm.rank ==0: - print(f"adding {ptName} to {comp}") - # The addPointSet call should pass coordXfer all the way through - self.comps[comp].DVGeo.addPointSet(points, ptName, - coordXfer=coordXfer, - **kwargs) + self.comps[comp].DVGeo.addPointSet(points, ptName, **kwargs) # check if this pointset will get the IC treatment if applyIC: From fe0800131d4173bbfe366b26ae424e98bcbed150 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 27 Nov 2023 10:13:01 -0500 Subject: [PATCH 078/110] broke print statements --- pygeo/parameterization/DVGeoMulti.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 5b5e40e3..7e23df7a 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3911,7 +3911,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # we can just pass the dictionary compSens = compSens_local - if comm.rank == 0: + if comm is None or comm.rank == 0: print(f"after project_b compSens {compSens}") return compSens From 5ad8970b5cfd449e4216b34293add7e06f19de8a Mon Sep 17 00:00:00 2001 From: galen ng Date: Mon, 27 Nov 2023 19:02:13 -0500 Subject: [PATCH 079/110] these are the changes to get coordXfer working in DVGeoMulti fillet method --- pygeo/parameterization/DVGeoMulti.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index ee738b19..c10643d8 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -365,7 +365,8 @@ def addIntersection( def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None): """ - Chances are you do not need coordXfer and origConfig passed through here. + If using coordXfer callback function, the curvePts need to be in the ADflow reference frame + and the callback function needs to be passed in """ if not self.filletIntersection: print("no") # TODO real error @@ -445,16 +446,11 @@ def addPointSet( # Do the very first coordXfer if it exists # We do not need to pass a coordXfer callback all the way through - # because it already exists in the DVGeo level + # because it already exists in the DVGeoMulti level if coordXfer is not None: self.coordXfer[ptName] = coordXfer - # print(f"running {ptName} through coordXfer") - # points = self.coordXfer[ptName](points, mode="fwd", applyDisplacement=True) - print("running coordXfer and saving it") - points = coordXfer(points, mode="fwd", applyDisplacement=True) - else: - print(f"no coordXfer for {ptName}") - # Find out what the **kwargs are here + + points = self.coordXfer[ptName](points, mode="bwd", applyDisplacement=True) # if compList is not provided, we use all components if compNames is None: @@ -812,8 +808,6 @@ def update(self, ptSetName, config=None): # apply coord transformation on newPts if ptSetName in self.coordXfer: - if self.comm.rank == 0: - print(f"running {ptSetName} through coordXfer") newPts = self.coordXfer[ptSetName](newPts, mode="fwd", applyDisplacement=True) self.points[ptSetName].points = newPts From a49d14e1734da10ab9183b98eb9680bf5eaa0ac1 Mon Sep 17 00:00:00 2001 From: galen ng Date: Tue, 28 Nov 2023 10:06:29 -0500 Subject: [PATCH 080/110] remove print statement in sensitivity call --- pygeo/parameterization/DVGeoMulti.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 18312071..c779ba20 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -927,10 +927,8 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ - # apply coord transformation on dIdpt + # apply coord transformation on dIdpt if this pointset has it. if ptSetName in self.coordXfer: - if self.comm.rank == 0: - print("applying coordXfer to dIdpt") # loop over functions for ifunc in range(N): # its important to remember that dIdpt are vector-like values, From 032086d618a697b664b99b0a5f2d5217a35f245a Mon Sep 17 00:00:00 2001 From: galen ng Date: Tue, 28 Nov 2023 16:29:44 -0500 Subject: [PATCH 081/110] add coordxfer to writeout capability --- pygeo/parameterization/DVGeo.py | 39 ++++++++++++++++++++++++++++----- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 07f14575..7f4d08c9 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2901,7 +2901,7 @@ def addVariablesPyOpt( optProb, globalVars, localVars, sectionlocalVars, spanwiselocalVars, ignoreVars, freezeVars ) - def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True): + def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True, coordXfer=None): """Write the (deformed) current state of the FFDs to a tecplot file, including the children @@ -2915,6 +2915,10 @@ def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True): writeEmbeding : bool Whether to write the embedding volume in the file. True by default for visualization but can be turned off for a leaner file. + coordXfer : callback func + Apply the coordinate transfer to the points before writing. If not, the + output is the deformed points in the FFD coordinate system. + None by default. """ # Name here doesn't matter, just take the first one @@ -2926,7 +2930,7 @@ def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True): vol_counter = 0 # Write master volumes: - vol_counter += self._writeVols(f, vol_counter, solutionTime, writeEmbedding) + vol_counter += self._writeVols(f, vol_counter, solutionTime, writeEmbedding, coordXfer) closeTecplot(f) if len(self.points) > 0: @@ -4433,13 +4437,38 @@ def _cascadedDVJacobian(self, config=None): return Jacobian - def _writeVols(self, handle, vol_counter, solutionTime, writeEmbedding): + def _writeVols(self, handle, vol_counter, solutionTime, writeEmbedding, coordXfer): for i in range(len(self.FFD.vols)): - writeTecplot3D(handle, "FFD_vol%d" % i, self.FFD.vols[i].coef, solutionTime) + if coordXfer is not None: + data = self.FFD.vols[i].coef + ny = data.shape[1] + nz = data.shape[2] + FFDPts = np.zeros_like(data) + for k in range(nz): + for j in range(ny): + points = data[:, j, k, :] + FFDPt = coordXfer(points, mode="fwd", applyDisplacement=True) + FFDPts[:, j, k, :] = FFDPt + else: + FFDPts = self.FFD.vols[i].coef + + writeTecplot3D(handle, "FFD_vol%d" % i, FFDPts, solutionTime) self.FFD.vols[i].computeData(recompute=True) if writeEmbedding: - writeTecplot3D(handle, "embedding_vol", self.FFD.vols[i].data, solutionTime) + if coordXfer is not None: + data = self.FFD.vols[i].data + ny = data.shape[1] + nz = data.shape[2] + embeddingPts = np.zeros_like(data) + for k in range(nz): + for j in range(ny): + points = data[:, j, k, :] + embeddingPt = coordXfer(points, mode="fwd", applyDisplacement=True) + embeddingPts[:, j, k, :] = embeddingPt + else: + embeddingPts = self.FFD.vols[i].data + writeTecplot3D(handle, "embedding_vol", embeddingPts, solutionTime) vol_counter += 1 From 6995529a51935868a9eb6e179d5d572355287e56 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 28 Nov 2023 17:08:57 -0500 Subject: [PATCH 082/110] make the test pointset names unique --- pygeo/parameterization/DVGeoMulti.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 7e23df7a..d95495f2 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -185,7 +185,7 @@ def addComponent( # add these points to the corresponding dvgeo unless this component is a fillet if not isFillet: - DVGeo.addPointSet(nodes, "datPts", **pointSetKwargs) + DVGeo.addPointSet(nodes, f"{comp}DatPts", **pointSetKwargs) else: nodes = None @@ -381,7 +381,9 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru comp.curvePtsOrig = deepcopy(curvePts) # add the curve pointset to the component's DVGeo - comp.DVGeo.addPointSet(curvePts, ptSetName, origConfig=origConfig, coordXfer=coordXfer) # TODO is comm right here + comp.DVGeo.addPointSet( + curvePts, ptSetName, origConfig=origConfig, coordXfer=coordXfer + ) # TODO is comm right here # add the curve pointset to DVGeoMulti self.points[ptSetName] = PointSet(curvePts, comm=self.comm, comp=compName) @@ -482,7 +484,7 @@ def addPointSet( comm = self.comm else: comp = None - + self.points[ptName] = PointSet(points, comm=comm, comp=comp) for comp in self.compNames: @@ -1236,7 +1238,7 @@ def _computeTotalJacobian(self, ptSetName): # we need to call computeTotalJacobian from all comps and get the jacobians for this pointset # if self.comm.rank == 0: - # print(f"_computeTotalJacobian for {ptSetName} with comm {self.points[ptSetName].comm}") + # print(f"_computeTotalJacobian for {ptSetName} with comm {self.points[ptSetName].comm}") if self.filletIntersection: comp = self.comps[self.points[ptSetName].comp] comp.DVGeo.computeTotalJacobian(ptSetName) @@ -1439,7 +1441,7 @@ def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # Return zeros if curvePtCoords is empty if not np.any(curvePtCoords): return deltaBar - + for k in range(dIdPt.shape[0]): for j in indices: # point coordinates with the baseline design @@ -3856,12 +3858,12 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): else: nCurvePtCoordsA = len(curvePtCoordsA) nCurvePtCoordsB = len(curvePtCoordsB) - + # call the bwd warping routine deltaBar = [] if len(points) > 0: print(f"go to _warpSurfPts_b for {ptSetName} comm {self.DVGeo.comm.rank} with comm {comm}") - + deltaBar = self._warpSurfPts_b( dIdpt, points, @@ -3913,7 +3915,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): if comm is None or comm.rank == 0: print(f"after project_b compSens {compSens}") - + return compSens def _getIntersectionSeam(self, comm): From 8d3264b0fca376f50cc335b8817399f05b0ada17 Mon Sep 17 00:00:00 2001 From: galen ng Date: Wed, 29 Nov 2023 14:52:42 -0500 Subject: [PATCH 083/110] I made a bug in the dvcon sensitivities... fixed now --- pygeo/parameterization/DVGeoMulti.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index c779ba20..6f976fe9 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -927,14 +927,6 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): """ - # apply coord transformation on dIdpt if this pointset has it. - if ptSetName in self.coordXfer: - # loop over functions - for ifunc in range(N): - # its important to remember that dIdpt are vector-like values, - # so we don't apply the transformations and only the rotations! - dIdpt[ifunc] = self.coordXfer[ptSetName](dIdpt[ifunc], mode="bwd", applyDisplacement=False) - # Compute the total Jacobian for this point set as long as this isn't a fillet (no DVGeo control) ptSetComp = self.comps[self.points[ptSetName].comp] # todo this is dumb!! if ptSetComp is None or not ptSetComp.isFillet: @@ -945,6 +937,15 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): dIdpt = np.array([dIdpt]) N = dIdpt.shape[0] + # apply coord transformation on dIdpt if this pointset has it. + if ptSetName in self.coordXfer: + # loop over functions + for ifunc in range(N): + # its important to remember that dIdpt are vector-like values, + # so we don't apply the transformations and only the rotations! + dIdpt[ifunc] = self.coordXfer[ptSetName](dIdpt[ifunc], mode="bwd", applyDisplacement=False) + + # create a dictionary to save total sensitivity info that might come out of the ICs compSensList = [] From 255c0c6eda8a8826fca2bec30dc65740c85041cb Mon Sep 17 00:00:00 2001 From: galen ng Date: Thu, 30 Nov 2023 17:38:53 -0500 Subject: [PATCH 084/110] hannah fixed the parallel dv bug in sensitivitity --- pygeo/parameterization/DVGeoMulti.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 6f976fe9..1a14b90b 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1255,7 +1255,11 @@ def _computeTotalJacobian(self, ptSetName): """ # number of design variables - nDV = self.getNDV() + if self.filletIntersection: + comp = self.comps[self.points[ptSetName].comp] + nDV = comp.DVGeo.getNDV() + else: + nDV = self.getNDV() # Initialize the Jacobian as a LIL matrix because this is convenient for indexing jac = sparse.lil_matrix((self.points[ptSetName].nPts * 3, nDV)) @@ -1269,7 +1273,7 @@ def _computeTotalJacobian(self, ptSetName): # if self.comm.rank == 0: # print(f"_computeTotalJacobian for {ptSetName} with comm {self.points[ptSetName].comm}") if self.filletIntersection: - comp = self.comps[self.points[ptSetName].comp] + comp.DVGeo.computeTotalJacobian(ptSetName) # print(f"\nrank {self.comm.rank} jac for pointset {ptSetName}: {comp.DVGeo.JT[ptSetName]}") From 55538d2b036a1773932d792ddd7dd2762a13ab17 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 12 Dec 2023 13:37:14 -0500 Subject: [PATCH 085/110] print statements --- pygeo/parameterization/DVGeoMulti.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index d95495f2..d07fedf8 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1709,7 +1709,7 @@ def __init__( self.intDir = intDir # only the node coordinates will be modified for the intersection calculations because we have calculated and saved all the connectivity information - if self.comm.rank == 0: + if self.comm.rank == 0 and self.debug: print(f"Computing initial intersection between {compA} and {compB}") self.seam0 = self._getIntersectionSeam(self.comm, firstCall=True) self.seam = self.seam0.copy() @@ -3862,7 +3862,8 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # call the bwd warping routine deltaBar = [] if len(points) > 0: - print(f"go to _warpSurfPts_b for {ptSetName} comm {self.DVGeo.comm.rank} with comm {comm}") + if self.DVGeo.debug: + print(f"go to _warpSurfPts_b for {ptSetName} comm {self.DVGeo.comm.rank} with comm {comm}") deltaBar = self._warpSurfPts_b( dIdpt, @@ -3881,7 +3882,8 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): deltaBarCompB_local = np.zeros((N, nCurvePtCoordsB, 3)) # deltaBarCompA_local = np.zeros((0,0,0)) # deltaBarCompB_local = np.zeros((0,0,0)) - print(f"skip warpsurf for {ptSetName} comm {self.DVGeo.comm.rank}") + if self.DVGeo.debug: + print(f"skip warpsurf for {ptSetName} comm {self.DVGeo.comm.rank}") # reduce seeds for both if ptSetComm: @@ -3913,8 +3915,8 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # we can just pass the dictionary compSens = compSens_local - if comm is None or comm.rank == 0: - print(f"after project_b compSens {compSens}") + # if comm is None or comm.rank == 0: + # print(f"after project_b compSens {compSens}") return compSens From ec95e57f29784e4ff73cf28d2ece5bad0c71e228 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Tue, 12 Dec 2023 17:43:06 -0500 Subject: [PATCH 086/110] take out unnecessary allreduce causing derivative errors in parallel! --- pygeo/parameterization/DVGeoMulti.py | 57 +++++++++++++++------------- 1 file changed, 30 insertions(+), 27 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index d0d93bb9..1e530fca 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -447,7 +447,7 @@ def addPointSet( """ # Do the very first coordXfer if it exists - # We do not need to pass a coordXfer callback all the way through + # We do not need to pass a coordXfer callback all the way through # because it already exists in the DVGeoMulti level if coordXfer is not None: self.coordXfer[ptName] = coordXfer @@ -678,6 +678,11 @@ def addPointSet( if comp != "fillet": self.comps[comp].DVGeo.addPointSet(points, ptName, **kwargs) + # add a dummy array for indices + # only necessary to fix test cases, + else: + for IC in self.intersectComps: + IC.indices = np.linspace(0, len(points) - 1, len(points), dtype=int) # check if this pointset will get the IC treatment if applyIC: @@ -811,7 +816,7 @@ def update(self, ptSetName, config=None): # apply coord transformation on newPts if ptSetName in self.coordXfer: newPts = self.coordXfer[ptSetName](newPts, mode="fwd", applyDisplacement=True) - + self.points[ptSetName].points = newPts return newPts @@ -947,7 +952,6 @@ def totalSensitivity(self, dIdpt, ptSetName, comm=None, config=None): # so we don't apply the transformations and only the rotations! dIdpt[ifunc] = self.coordXfer[ptSetName](dIdpt[ifunc], mode="bwd", applyDisplacement=False) - # create a dictionary to save total sensitivity info that might come out of the ICs compSensList = [] @@ -1275,7 +1279,6 @@ def _computeTotalJacobian(self, ptSetName): # if self.comm.rank == 0: # print(f"_computeTotalJacobian for {ptSetName} with comm {self.points[ptSetName].comm}") if self.filletIntersection: - comp.DVGeo.computeTotalJacobian(ptSetName) # print(f"\nrank {self.comm.rank} jac for pointset {ptSetName}: {comp.DVGeo.JT[ptSetName]}") @@ -3852,6 +3855,12 @@ def project(self, ptSetName, newPts): def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = deepcopy(self.filletComp.surfPtsOrig) + # skip calculating warping derivatives for curve points + # TODO these pointsets should never some here from totalSensitivity + if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: + print("no") + return compSens + # number of functions we have N = dIdpt.shape[0] @@ -3872,12 +3881,17 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): else: print("no?") - compSens = {} - - # skip calculating warping derivatives for curve points - # TODO these pointsets should never some here from totalSensitivity - if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: - return compSens + # loop over each entry in xA and xB and create a dummy zero gradient array for all + xA = self.compA.DVGeo.getValues() + xB = self.compB.DVGeo.getValues() + for k, v in xA.items(): + # create the zero array: + zeroSens = np.zeros((N, v.shape[0])) + compSens_local[k] = zeroSens + for k, v in xB.items(): + # create the zero array: + zeroSens = np.zeros((N, v.shape[0])) + compSens_local[k] = zeroSens # get current curve points (full definition from pointwise) owned by each component curvePtCoordsA = self.compA.curvePtsOrig @@ -3895,12 +3909,11 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): nCurvePtCoordsB = len(curvePtCoordsB) # call the bwd warping routine - deltaBar = [] if len(points) > 0: if self.DVGeo.debug: print(f"go to _warpSurfPts_b for {ptSetName} comm {self.DVGeo.comm.rank} with comm {comm}") - deltaBar = self._warpSurfPts_b( + delta_b_local = self._warpSurfPts_b( dIdpt, points, self.indices, # TODO could maybe just feed in all indices except boundaries in fillet case @@ -3909,26 +3922,16 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # split deltaBar into the contributions from each curve curveInd = curvePtCoordsA.shape[0] - deltaBarCompA_local = deepcopy(deltaBar[:, :curveInd, :]) - deltaBarCompB_local = deepcopy(deltaBar[:, curveInd:, :]) + deltaBarCompA = deepcopy(delta_b_local[:, :curveInd, :]) + deltaBarCompB = deepcopy(delta_b_local[:, curveInd:, :]) else: - deltaBarCompA_local = np.zeros((N, nCurvePtCoordsA, 3)) - deltaBarCompB_local = np.zeros((N, nCurvePtCoordsB, 3)) - # deltaBarCompA_local = np.zeros((0,0,0)) - # deltaBarCompB_local = np.zeros((0,0,0)) + deltaBarCompA = np.zeros((N, nCurvePtCoordsA, 3)) + deltaBarCompB = np.zeros((N, nCurvePtCoordsB, 3)) + if self.DVGeo.debug: print(f"skip warpsurf for {ptSetName} comm {self.DVGeo.comm.rank}") - # reduce seeds for both - if ptSetComm: - deltaBarCompA = ptSetComm.allreduce(deltaBarCompA_local, op=MPI.SUM) - deltaBarCompB = ptSetComm.allreduce(deltaBarCompB_local, op=MPI.SUM) - # no comm, local is global - else: - deltaBarCompA = deltaBarCompA_local - deltaBarCompB = deltaBarCompB_local - # run each curve through totalSensitivity on their respective DVGeo compSensA = self.compA.DVGeo.totalSensitivity(deltaBarCompA, self.compA.curvePtsName) compSensB = self.compB.DVGeo.totalSensitivity(deltaBarCompB, self.compB.curvePtsName) From 69b61a3e5e17ece1043da03fd3098f703d93383c Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 14 Dec 2023 10:14:02 -0500 Subject: [PATCH 087/110] handle constraint & mesh pointsets differently --- pygeo/parameterization/DVGeoMulti.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 1e530fca..ad16269b 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -413,7 +413,7 @@ def addPointSet( familyName=None, compNames=None, comm=None, - applyIC=True, + applyIC=False, coordXfer=None, **kwargs, ): @@ -513,7 +513,9 @@ def addPointSet( # is this intersection group a fillet or normal if self.filletIntersection: # is this pointset being added to a fillet component or a controlled component - if familyName == "fillet": + # only find intersections if this is the mesh pointset (applyIC=True) + # if this is from a constraint (applyIC=False) skip this step + if familyName == "fillet" and applyIC: for IC in self.intersectComps: # find the points on the fillet that match each intersection # compAInterPts, compAInterInd = IC.findIntersection( @@ -671,16 +673,18 @@ def addPointSet( elif self.filletIntersection: for comp in compNames: - self.comps[comp].surfPtsName = ptName - self.comps[comp].surfPts = points - self.comps[comp].nPts = len(points) - self.comps[comp].surfPtsOrig = deepcopy(points) + # only save this as the surface points if it's the actual mesh pointset (applyIC=True) + if applyIC: + self.comps[comp].surfPtsName = ptName + self.comps[comp].surfPtsOrig = deepcopy(points) + self.comps[comp].surfPts = points + self.comps[comp].nPts = len(points) if comp != "fillet": self.comps[comp].DVGeo.addPointSet(points, ptName, **kwargs) # add a dummy array for indices # only necessary to fix test cases, - else: + elif applyIC: for IC in self.intersectComps: IC.indices = np.linspace(0, len(points) - 1, len(points), dtype=int) From 44b787db539969a42ce45541baf3219bc9d16c72 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Thu, 14 Dec 2023 10:27:55 -0500 Subject: [PATCH 088/110] clean up --- pygeo/parameterization/DVGeo.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 7f4d08c9..b4cf0701 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -2607,9 +2607,6 @@ def computeTotalJacobian(self, ptSetName, config=None): """Return the total point jacobian in CSR format since we need this for TACS""" - if MPI.COMM_WORLD.rank == 0: - print(f"computeTotalJacobian for {ptSetName}") - # Finalize the object, if not done yet self._finalize() self.curPtSet = ptSetName From 254b121b406dfc4ec4130d5ac8154fdf77cfce66 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 17 May 2024 13:49:55 -0400 Subject: [PATCH 089/110] tol --- pygeo/parameterization/DVGeoMulti.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index ad16269b..dc665382 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -234,7 +234,7 @@ def addIntersection( dStarA=0.2, dStarB=0.2, featureCurves=None, - distTol=1e-14, + distTol=1e-4, project=False, marchDir=1, includeCurves=False, @@ -496,6 +496,9 @@ def addPointSet( if familyName is not None: compNames = [familyName] + if self.debug: + print(f"addPointSet {ptName} to {compNames} applyIC {applyIC}") + # create the pointset class if self.filletIntersection: comp = compNames[0] @@ -715,7 +718,6 @@ def setDesignVars(self, dvDict): Any additional keys in the dictionary are simply ignored. """ - # Check if we have duplicate DV names if self.checkDVs: dvNames = self.getVarNames() @@ -775,7 +777,6 @@ def update(self, ptSetName, config=None): This must match one of those added in an :func:`addPointSet()` call. """ - # get the new points newPts = np.zeros((self.points[ptSetName].nPts, 3), dtype=self.dtype) From d49cb624d6ea366e6603b53fb0d70f348638f7e6 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 17 May 2024 13:55:09 -0400 Subject: [PATCH 090/110] partially merge main in --- pygeo/constraints/DVCon.py | 227 +++++++++++++- pygeo/mphys/mphys_dvgeo.py | 197 +++++++++---- pygeo/parameterization/DVGeo.py | 294 ++++++++++++------- pygeo/pyBlock.py | 26 +- tests/reg_tests/ref/test_active_children.ref | 8 +- tests/reg_tests/test_DVConstraints.py | 241 +++++++++++++++ tests/reg_tests/test_DVGeometry.py | 74 ++++- 7 files changed, 889 insertions(+), 178 deletions(-) diff --git a/pygeo/constraints/DVCon.py b/pygeo/constraints/DVCon.py index 2403b145..0b66d7f4 100644 --- a/pygeo/constraints/DVCon.py +++ b/pygeo/constraints/DVCon.py @@ -19,7 +19,12 @@ from .locationConstraint import LocationConstraint from .planarityConstraint import PlanarityConstraint from .radiusConstraint import RadiusConstraint -from .thicknessConstraint import ThicknessConstraint, ThicknessToChordConstraint +from .thicknessConstraint import ( + ProjectedThicknessConstraint, + ProximityConstraint, + ThicknessConstraint, + ThicknessToChordConstraint, +) from .volumeConstraint import CompositeVolumeConstraint, TriangulatedVolumeConstraint, VolumeConstraint @@ -437,6 +442,7 @@ def addThicknessConstraints2D( surfaceName="default", DVGeoName="default", compNames=None, + projected=False, ): r""" Add a set of thickness constraints that span a logically a @@ -578,6 +584,10 @@ def addThicknessConstraints2D( with this constraint should be added. If None, the point set is added to all components. + projected : bool + Use the component of the toothpick thickness aligned with + the original thickness direction. + Examples -------- >>> # Take unique square in x-z plane and and 10 along z-direction (spanWise) @@ -611,7 +621,13 @@ def addThicknessConstraints2D( conName = "%s_thickness_constraints_%d" % (self.name, len(self.constraints[typeName])) else: conName = name - self.constraints[typeName][conName] = ThicknessConstraint( + + if projected: + thickness_class = ProjectedThicknessConstraint + else: + thickness_class = ThicknessConstraint + + self.constraints[typeName][conName] = thickness_class( conName, coords, lower, upper, scaled, scale, self.DVGeometries[DVGeoName], addToPyOpt, compNames ) @@ -629,6 +645,7 @@ def addThicknessConstraints1D( surfaceName="default", DVGeoName="default", compNames=None, + projected=False, ): r""" Add a set of thickness constraints oriented along a poly-line. @@ -727,6 +744,10 @@ def addThicknessConstraints1D( with this constraint should be added. If None, the point set is added to all components. + projected : bool + Use the component of the toothpick thickness aligned with + the original thickness direction. + """ self._checkDVGeo(DVGeoName) @@ -760,10 +781,205 @@ def addThicknessConstraints1D( conName = "%s_thickness_constraints_%d" % (self.name, len(self.constraints[typeName])) else: conName = name - self.constraints[typeName][conName] = ThicknessConstraint( + + if projected: + thickness_class = ProjectedThicknessConstraint + else: + thickness_class = ThicknessConstraint + + self.constraints[typeName][conName] = thickness_class( conName, coords, lower, upper, scaled, scale, self.DVGeometries[DVGeoName], addToPyOpt, compNames ) + def addProximityConstraints( + self, + ptList, + vecList, + surfA, + surfB, + pointSetKwargsA=None, + pointSetKwargsB=None, + lower=1.0, + upper=3.0, + scaled=True, + scale=1.0, + name=None, + addToPyOpt=True, + DVGeoName="default", + compNames=None, + ): + r""" + Add "toothpick" thickness constraints between multiple surfaces. + This can be used to control proximity of multiple components, + each defined with its own surface. Surfaces must move in the same + direction as the constraint orientation to prevent skewness. + i.e, if the toothpicks are oriented in the y-direction, + the surfaces must only move in the y-direction. + + Parameters + ---------- + ptList : list or array of size (N x 3) + Together with the vecList, these points will be used to + determine the intersections with each component surface. + + vecList : list or array of size (N x 3) + Directions that originate from each point in ptList + that define the search vector directions for component + surface intersections. The vector direction should + point to component A, and the opposite direction should + point to component B + + surfA : str + Name of the DVCon surface mesh that defines component A + + surfB : str + Name of the DVCon surface mesh that defines component B + + pointSetKwargsA : dict + The dictionary of keyword arguments to be passed to DVGeo + when points on component A is added to DVGeo. The most common + use case is to pick which child FFDs are active for this point. + This is an optional argument. The default behavior adds toothpick + points to DVGeo like any other point. + + pointSetKwargsB : dict + The dictionary of keyword arguments to be passed to DVGeo + when points on component B is added to DVGeo. The most common + use case is to pick which child FFDs are active for this point. + This is an optional argument. The default behavior adds toothpick + points to DVGeo like any other point. + + lower : float or array of size nCon + The lower bound for the constraint. A single float will + apply the same bounds to all constraints, while the array + option will use different bounds for each constraint. + + upper : float or array of size nCon + The upper bound for the constraint. A single float will + apply the same bounds to all constraints, while the array + option will use different bounds for each constraint. + + scaled : bool + Flag specifying whether or not the constraint is to be + implemented in a scaled fashion or not. + + * scaled=True: The initial length of each thickness + constraint is defined to be 1.0. In this case, the lower + and upper bounds are given in multiple of the initial + length. lower=0.85, upper=1.15, would allow for 15% + change in each direction from the original length. For + aerodynamic shape optimizations, this option is used + most often. + + * scaled=False: No scaling is applied and the physical lengths + must be specified for the lower and upper bounds. + + scale : float or array of size nCon + This is the optimization scaling of the + constraint. Typically this parameter will not need to be + changed. If the thickness constraints are scaled, this + already results in well-scaled constraint values, and + scale can be left at 1.0. If scaled=False, it may changed + to a more suitable value of the resulting physical + thickness have magnitudes vastly different than O(1). + + name : str + Normally this does not need to be set. Only use this if + you have multiple DVCon objects and the constraint names + need to be distinguished **or** you are using this set of + thickness constraints for something other than a direct + constraint in pyOptSparse. + + addToPyOpt : bool + Normally this should be left at the default of True. If + the values need to be processed (modified) *before* they are + given to the optimizer, set this flag to False. + + DVGeoName : str + Name of the DVGeo object to compute the constraint with. You only + need to set this if you're using multiple DVGeo objects + for a problem. For backward compatibility, the name is 'default' by default + + compNames : list + If using DVGeometryMulti, the components to which the point set associated + with this constraint should be added. + If None, the point set is added to all components. + + """ + self._checkDVGeo(DVGeoName) + + if pointSetKwargsA is None: + pointSetKwargsA = {} + if pointSetKwargsB is None: + pointSetKwargsB = {} + + ptList = np.atleast_2d(ptList) + vecList = np.atleast_2d(vecList) + nCon = ptList.shape[0] + if nCon != vecList.shape[0]: + raise Error( + "The vecList argument of addProximityConstraints needs to have the same number of vectors as the number of points." + ) + coordsA = np.zeros((nCon, 3)) + coordsB = np.zeros((nCon, 3)) + + # get the intersections with both components A and B + p0A, p1A, p2A = self._getSurfaceVertices(surfaceName=surfA) + v1A = p1A - p0A + v2A = p2A - p0A + p0B, p1B, p2B = self._getSurfaceVertices(surfaceName=surfB) + v1B = p1B - p0B + v2B = p2B - p0B + + # Project all the points + for ii in range(nCon): + # get the point and vector + pt = ptList[ii] + vec = vecList[ii] + + # Projections. Surf A first. + projA, fail = geo_utils.projectNodePosOnly(pt, vec, p0A, v1A, v2A) + if fail > 0: + raise Error( + "There was an error projecting a node to surf A " + "at (%f, %f, %f) with normal (%f, %f, %f)." % (pt[0], pt[1], pt[2], vec[0], vec[1], vec[2]) + ) + + # Surf B. + projB, fail = geo_utils.projectNodePosOnly(pt, -vec, p0B, v1B, v2B) + if fail > 0: + raise Error( + "There was an error projecting a node to surf B" + "at (%f, %f, %f) with normal (%f, %f, %f)." % (pt[0], pt[1], pt[2], -vec[0], -vec[1], -vec[2]) + ) + + coordsA[ii] = projA + coordsB[ii] = projB + + # Create the thickness constraint object: + typeName = "thickCon" + if typeName not in self.constraints: + self.constraints[typeName] = OrderedDict() + + if name is None: + conName = "%s_proximity_constraints_%d" % (self.name, len(self.constraints[typeName])) + else: + conName = name + self.constraints[typeName][conName] = ProximityConstraint( + conName, + coordsA, + coordsB, + pointSetKwargsA, + pointSetKwargsB, + lower, + upper, + scaled, + scale, + self.DVGeometries[DVGeoName], + addToPyOpt, + compNames, + ) + def addLERadiusConstraints( self, leList, @@ -928,14 +1144,13 @@ def addLERadiusConstraints( chordDir /= np.linalg.norm(chordDir) for i in range(nSpan): # Project actual node: - up, down, fail = geo_utils.projectNode(X[i], chordDir, p0, p1 - p0, p2 - p0) + lePts[i], fail = geo_utils.projectNodePosOnly(X[i], chordDir, p0, p1 - p0, p2 - p0) if fail > 0: raise Error( "There was an error projecting a node " - "at (%f, %f, %f) with normal (%f, %f, %f)." + "at (%f, %f, %f) in direction (%f, %f, %f)." % (X[i, 0], X[i, 1], X[i, 2], chordDir[0], chordDir[1], chordDir[2]) ) - lePts[i] = up # Check that points can form radius d = np.linalg.norm(coords[:, 0, :] - coords[:, 1, :], axis=1) diff --git a/pygeo/mphys/mphys_dvgeo.py b/pygeo/mphys/mphys_dvgeo.py index 0a73f8ae..ba14bf46 100644 --- a/pygeo/mphys/mphys_dvgeo.py +++ b/pygeo/mphys/mphys_dvgeo.py @@ -136,8 +136,8 @@ def nom_addChild(self, ffd_file, DVGeoName=None, childName=None): ) # Add child FFD - child_ffd = DVGeometry(ffd_file, child=True) - DVGeo.addChild(child_ffd, childName=childName) + child_ffd = DVGeometry(ffd_file, child=True, name=childName) + DVGeo.addChild(child_ffd) # Embed points from parent if not already done for pointSet in DVGeo.points: @@ -228,7 +228,9 @@ def nom_getDVCon(self): Wrapper for DVGeo functions """ - def nom_addGlobalDV(self, dvName, value, func, childName=None, isComposite=False, DVGeoName=None): + def nom_addGlobalDV( + self, dvName, value, func, childName=None, isComposite=False, DVGeoName=None, prependName=False + ): """ Add a global design variable to the DVGeo object. This is a wrapper for the DVGeo.addGlobalDV method. @@ -259,17 +261,19 @@ def nom_addGlobalDV(self, dvName, value, func, childName=None, isComposite=False """ # if we have multiple DVGeos use the one specified by name - DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) + DVGeo = self.nom_getDVGeo(childName=childName, DVGeoName=DVGeoName) # global DVs are only added to FFD-based DVGeo objects if not isinstance(DVGeo, DVGeometry): raise RuntimeError(f"Only FFD-based DVGeo objects can use global DVs, not type: {type(DVGeo).__name__}") + # if this DVGeo object has a name attribute, prepend it to match up with what DVGeo is expecting + # this keeps track of DVs between multiple DVGeo objects + if DVGeoName is not None and prependName: + dvName = DVGeoName + "_" + dvName + # call the dvgeo object and add this dv - if childName is None: - DVGeo.addGlobalDV(dvName, value, func) - else: - DVGeo.children[childName].addGlobalDV(dvName, value, func) + DVGeo.addGlobalDV(dvName, value, func, prependName=False) # define the input # When composite DVs are used, input is not required for the default DVs. Now the composite DVs are @@ -277,24 +281,23 @@ def nom_addGlobalDV(self, dvName, value, func, childName=None, isComposite=False if not isComposite: self.add_input(dvName, distributed=False, shape=len(np.atleast_1d(value))) - # call the dvgeo object and add this dv - if childName is None: - DVGeo.addGlobalDV(dvName, value, func) - else: - DVGeo.children[childName].addGlobalDV(dvName, value, func) - - def nom_addLocalDV(self, dvName, axis="y", pointSelect=None, childName=None, isComposite=False, DVGeoName=None): + def nom_addLocalDV( + self, dvName, axis="y", pointSelect=None, childName=None, isComposite=False, DVGeoName=None, prependName=False + ): # if we have multiple DVGeos use the one specified by name - DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) + DVGeo = self.nom_getDVGeo(childName=childName, DVGeoName=DVGeoName) # local DVs are only added to FFD-based DVGeo objects if not isinstance(DVGeo, DVGeometry): raise RuntimeError(f"Only FFD-based DVGeo objects can use local DVs, not type: {type(DVGeo).__name__}") - if childName is None: - nVal = DVGeo.addLocalDV(dvName, axis=axis, pointSelect=pointSelect) - else: - nVal = DVGeo.children[childName].addLocalDV(dvName, axis=axis, pointSelect=pointSelect) + # if this DVGeo object has a name attribute, prepend it to match up with what DVGeo is expecting + # this keeps track of DVs between multiple DVGeo objects + if DVGeoName is not None and prependName: + dvName = DVGeoName + "_" + dvName + + # add the DV to DVGeo + nVal = DVGeo.addLocalDV(dvName, axis=axis, pointSelect=pointSelect, prependName=False) # define the input # When composite DVs are used, input is not required for the default DVs. Now the composite DVs are @@ -315,6 +318,7 @@ def nom_addLocalSectionDV( orient2="svd", config=None, DVGeoName=None, + prependName=False, ): """ Add one or more section local design variables to the DVGeometry object @@ -365,7 +369,7 @@ def nom_addLocalSectionDV( """ # if we have multiple DVGeos use the one specified by name - DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) + DVGeo = self.nom_getDVGeo(childName=childName, DVGeoName=DVGeoName) # local DVs are only added to FFD-based DVGeo objects if not isinstance(DVGeo, DVGeometry): @@ -373,27 +377,21 @@ def nom_addLocalSectionDV( f"Only FFD-based DVGeo objects can use local section DVs, not type: {type(DVGeo).__name__}" ) - # add the DV to a normal DVGeo - if childName is None: - nVal = DVGeo.addLocalSectionDV(dvName, secIndex, axis, pointSelect, volList, orient0, orient2, config) - # add the DV to a child DVGeo - else: - nVal = DVGeo.children[childName].addLocalSectionDV( - dvName, - secIndex, - axis, - pointSelect, - volList, - orient0, - orient2, - config, - ) + # if this DVGeo object has a name attribute, prepend it to match up with what DVGeo is expecting + # this keeps track of DVs between multiple DVGeo objects + if DVGeoName is not None and prependName: + dvName = DVGeoName + "_" + dvName + + # add the DV to DVGeo + nVal = DVGeo.addLocalSectionDV( + dvName, secIndex, axis, pointSelect, volList, orient0, orient2, config, prependName=False + ) # define the input self.add_input(dvName, distributed=False, shape=nVal) return nVal - def nom_addShapeFunctionDV(self, dvName, shapes, childName=None, config=None, DVGeoName=None): + def nom_addShapeFunctionDV(self, dvName, shapes, childName=None, config=None, DVGeoName=None, prependName=False): """ Add one or more local shape function design variables to the DVGeometry object Wrapper for :meth:`addShapeFunctionDV <.DVGeometry.addShapeFunctionDV>` @@ -428,7 +426,7 @@ def nom_addShapeFunctionDV(self, dvName, shapes, childName=None, config=None, DV """ # if we have multiple DVGeos use the one specified by name - DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) + DVGeo = self.nom_getDVGeo(childName=childName, DVGeoName=DVGeoName) # shape function DVs are only added to FFD-based DVGeo objects if not isinstance(DVGeo, DVGeometry): @@ -436,23 +434,31 @@ def nom_addShapeFunctionDV(self, dvName, shapes, childName=None, config=None, DV f"Only FFD-based DVGeo objects can use shape function DVs, not type: {type(DVGeo).__name__}" ) - # add the DV to a normal DVGeo - if childName is None: - nVal = DVGeo.addShapeFunctionDV(dvName, shapes, config) - # add the DV to a child DVGeo - else: - nVal = DVGeo.children[childName].addShapeFunctionDV(dvName, shapes, config) + # if this DVGeo object has a name attribute, prepend it to match up with what DVGeo is expecting + # this keeps track of DVs between multiple DVGeo objects + if DVGeoName is not None and prependName: + dvName = DVGeoName + "_" + dvName + + # add the DV to DVGeo + nVal = DVGeo.addShapeFunctionDV(dvName, shapes, config, prependName=False) # define the input self.add_input(dvName, distributed=False, shape=nVal) return nVal - def nom_addGeoCompositeDV(self, dvName, ptSetName=None, u=None, scale=None, DVGeoName=None, **kwargs): + def nom_addGeoCompositeDV( + self, dvName, ptSetName=None, u=None, scale=None, DVGeoName=None, prependName=False, **kwargs + ): # if we have multiple DVGeos use the one specified by name DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) + # if this DVGeo object has a name attribute, prepend it to match up with what DVGeo is expecting + # this keeps track of DVs between multiple DVGeo objects + if DVGeoName is not None and prependName: + dvName = DVGeoName + "_" + dvName + # call the dvgeo object and add this dv - DVGeo.addCompositeDV(dvName, ptSetName=ptSetName, u=u, scale=scale, **kwargs) + DVGeo.addCompositeDV(dvName, ptSetName=ptSetName, u=u, scale=scale, prependName=False, **kwargs) val = DVGeo.getValues() # define the input @@ -503,18 +509,15 @@ def nom_addESPVariable(self, desmptr_name, isComposite=False, DVGeoName=None, ** def nom_addRefAxis(self, childName=None, DVGeoName=None, **kwargs): # if we have multiple DVGeos use the one specified by name - DVGeo = self.nom_getDVGeo(DVGeoName=DVGeoName) + DVGeo = self.nom_getDVGeo(childName=childName, DVGeoName=DVGeoName) # references axes are only needed in FFD-based DVGeo objects if not isinstance(DVGeo, DVGeometry): raise RuntimeError(f"Only FFD-based DVGeo objects can use reference axes, not type: {type(DVGeo).__name__}") # add ref axis to this DVGeo - if childName is None: - return DVGeo.addRefAxis(**kwargs) + return DVGeo.addRefAxis(**kwargs) # add ref axis to the specified child - else: - return DVGeo.children[childName].addRefAxis(**kwargs) """ Wrapper for DVCon functions @@ -531,6 +534,7 @@ def nom_addThicknessConstraints2D( surfaceName="default", DVGeoName="default", compNames=None, + projected=False, ): self.DVCon.addThicknessConstraints2D( leList, @@ -542,11 +546,21 @@ def nom_addThicknessConstraints2D( surfaceName=surfaceName, DVGeoName=DVGeoName, compNames=compNames, + projected=projected, ) self.add_output(name, distributed=False, val=np.ones((nSpan * nChord,)), shape=nSpan * nChord) def nom_addThicknessConstraints1D( - self, name, ptList, nCon, axis, scaled=True, surfaceName="default", DVGeoName="default", compNames=None + self, + name, + ptList, + nCon, + axis, + scaled=True, + surfaceName="default", + DVGeoName="default", + compNames=None, + projected=False, ): self.DVCon.addThicknessConstraints1D( ptList, @@ -557,10 +571,22 @@ def nom_addThicknessConstraints1D( surfaceName=surfaceName, DVGeoName=DVGeoName, compNames=compNames, + projected=projected, ) self.add_output(name, distributed=False, val=np.ones(nCon), shape=nCon) - def nom_addVolumeConstraint(self, name, leList, teList, nSpan=10, nChord=10, scaled=True, surfaceName="default"): + def nom_addVolumeConstraint( + self, + name, + leList, + teList, + nSpan=10, + nChord=10, + scaled=True, + surfaceName="default", + DVGeoName="default", + compNames=None, + ): """ Add a DVCon volume constraint to the problem Wrapper for :meth:`addVolumeConstraint <.DVConstraints.addVolumeConstraint>` @@ -582,14 +608,55 @@ def nom_addVolumeConstraint(self, name, leList, teList, nSpan=10, nChord=10, sca See wrapped surfaceName : str, optional See wrapped + DVGeoName : str, optional + See wrapped + compNames : list, optional + See wrapped """ self.DVCon.addVolumeConstraint( - leList, teList, nSpan=nSpan, nChord=nChord, scaled=scaled, name=name, surfaceName=surfaceName + leList, + teList, + nSpan=nSpan, + nChord=nChord, + scaled=scaled, + name=name, + surfaceName=surfaceName, + DVGeoName=DVGeoName, + compNames=compNames, ) self.add_output(name, distributed=False, val=1.0) - def nom_addProjectedAreaConstraint(self, name, axis, scaled=True, surface_name="default"): + def nom_addSurfaceAreaConstraint( + self, name, scaled=True, surfaceName="default", DVGeoName="default", compNames=None + ): + """ + Add a DVCon surface area constraint to the problem + Wrapper for :meth:`addSurfaceAreaConstraint <.DVConstraints.addSurfaceAreaConstraint>` + Input parameters are identical to those in wrapped function unless otherwise specified + + Parameters + ---------- + name : + See wrapped + scaled : bool, optional + See wrapped + surfaceName : str, optional + See wrapped + DVGeoName : str, optional + See wrapped + compNames : list, optional + See wrapped + """ + + self.DVCon.addSurfaceAreaConstraint( + name=name, scaled=scaled, surfaceName=surfaceName, DVGeoName=DVGeoName, compNames=compNames + ) + self.add_output(name, distributed=False, val=1.0) + + def nom_addProjectedAreaConstraint( + self, name, axis, scaled=True, surface_name="default", DVGeoName="default", compNames=None + ): """ Add a DVCon projected area constraint to the problem Wrapper for :meth:`addProjectedAreaConstraint <.DVConstraints.addProjectedAreaConstraint>` @@ -605,9 +672,15 @@ def nom_addProjectedAreaConstraint(self, name, axis, scaled=True, surface_name=" See wrapped surface_name : str, optional See wrapped + DVGeoName : str, optional + See wrapped + compNames : list, optional + See wrapped """ - self.DVCon.addProjectedAreaConstraint(axis, name=name, scaled=scaled, surfaceName=surface_name) + self.DVCon.addProjectedAreaConstraint( + axis, name=name, scaled=scaled, surfaceName=surface_name, DVGeoName=DVGeoName, compNames=compNames + ) self.add_output(name, distributed=False, val=1.0) def nom_add_LETEConstraint(self, name, volID, faceID, topID=None, childName=None): @@ -626,9 +699,17 @@ def nom_addCurvatureConstraint1D(self, name, start, end, nPts, axis, **kwargs): self.DVCon.addCurvatureConstraint1D(start=start, end=end, nPts=nPts, axis=axis, name=name, **kwargs) self.add_output(name, distributed=False, val=1.0) - def nom_addLinearConstraintsShape(self, name, indSetA, indSetB, factorA, factorB, childName=None): + def nom_addLinearConstraintsShape( + self, name, indSetA, indSetB, factorA, factorB, childName=None, DVGeoName="default" + ): self.DVCon.addLinearConstraintsShape( - indSetA=indSetA, indSetB=indSetB, factorA=factorA, factorB=factorB, name=name, childName=childName + indSetA=indSetA, + indSetB=indSetB, + factorA=factorA, + factorB=factorB, + name=name, + childName=childName, + DVGeoName=DVGeoName, ) lSize = len(indSetA) self.add_output(name, distributed=False, val=np.zeros(lSize), shape=lSize) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index b4cf0701..2e70ce98 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -77,25 +77,56 @@ class DVGeometry(BaseDVGeometry): Default is a 4th order spline in each direction if the dimensions allow. + volBounds : dict + Dictionary where volume embedding bounds for each FFD volume is specified. + Keys of the dictionary specifies the FFD volume index. Values are lists of lists. + First list contains the min and max bounds for the u parameter, second v, third w. + This parameter can also be set after initialization using the `setVolBounds` method. + For example if the FFD has 3 volumes, setting volBounds to: + + >>> volBounds = { + >>> 0: [[0., 0.5], [0., 1.], [0., 1.]], + >>> 1: [[0., 1.], [0.5, 1.], [0., 1.]] + >>> } + + will set the parametric bounds of the first and second volumes, while the third + volume can still embed points using the usual bounds of 0 to 1 for all parametric + directions. In this example, the first volume only embeds points if the u coordinate + of the projection is between 0 and 0.5. Similarly, the second volume only embeds + a point if the v coordinate of the projection is between 0.5 and 1.0. This is useful + when multiple overlapping FFD volumes are used to either mimic circular or symmetric + FFDs. + Examples -------- The general sequence of operations for using DVGeometry is as follows:: - >>> from pygeo import DVGeometry - >>> DVGeo = DVGeometry('FFD_file.fmt') - >>> # Embed a set of coordinates Xpt into the object - >>> DVGeo.addPointSet(Xpt, 'myPoints') - >>> # Associate a 'reference axis' for large-scale manipulation - >>> DVGeo.addRefAxis('wing_axis', axis_curve) - >>> # Define a global design variable function: - >>> def twist(val, geo): - >>> geo.rot_z['wing_axis'].coef[:] = val[:] - >>> # Now add this as a global variable: - >>> DVGeo.addGlobalDV('wing_twist', 0.0, twist, lower=-10, upper=10) - >>> # Now add local (shape) variables - >>> DVGeo.addLocalDV('shape', lower=-0.5, upper=0.5, axis='y') + >>> from pygeo import DVGeometry + >>> DVGeo = DVGeometry('FFD_file.fmt') + >>> # Embed a set of coordinates Xpt into the object + >>> DVGeo.addPointSet(Xpt, 'myPoints') + >>> # Associate a 'reference axis' for large-scale manipulation + >>> DVGeo.addRefAxis('wing_axis', axis_curve) + >>> # Define a global design variable function: + >>> def twist(val, geo): + >>> geo.rot_z['wing_axis'].coef[:] = val[:] + >>> # Now add this as a global variable: + >>> DVGeo.addGlobalDV('wing_twist', 0.0, twist, lower=-10, upper=10) + >>> # Now add local (shape) variables + >>> DVGeo.addLocalDV('shape', lower=-0.5, upper=0.5, axis='y') """ - def __init__(self, fileName, *args, isComplex=False, child=False, faceFreeze=None, name=None, kmax=4, **kwargs): + def __init__( + self, + fileName, + *args, + isComplex=False, + child=False, + faceFreeze=None, + name=None, + kmax=4, + volBounds=None, + **kwargs, + ): super().__init__(fileName=fileName, name=name) self.DV_listGlobal = OrderedDict() # Global Design Variable List @@ -125,10 +156,13 @@ def __init__(self, fileName, *args, isComplex=False, child=False, faceFreeze=Non else: self.dtype = "d" + if volBounds is None: + volBounds = {} + # Load the FFD file in FFD mode. Also note that args and # kwargs are passed through in case additional pyBlock options # need to be set. - self.FFD = pyBlock("plot3d", fileName=fileName, FFD=True, kmax=kmax, **kwargs) + self.FFD = pyBlock("plot3d", fileName=fileName, FFD=True, kmax=kmax, volBounds=volBounds, **kwargs) self.origFFDCoef = self.FFD.coef.copy() self.coef = None @@ -472,7 +506,7 @@ def addRefAxis( "rot0axis": rot0axis, } nAxis = len(curve.coef) - elif xFraction or yFraction or zFraction: + elif xFraction is not None or yFraction is not None or zFraction is not None: # Some assumptions # - FFD should be a close approximation of geometry surface so that # xFraction roughly corresponds to airfoil LE, TE, or 1/4 chord @@ -560,7 +594,7 @@ def addRefAxis( pts_vec[ct_, :] = p_rot # Temporary ref axis node coordinates - aligned with main system of reference - if xFraction: + if xFraction is not None: # getting the bounds of the FFD section x_min = np.min(pts_vec[:, 0]) x_max = np.max(pts_vec[:, 0]) @@ -568,14 +602,14 @@ def addRefAxis( else: x_node = np.mean(pts_vec[:, 0]) - if yFraction: + if yFraction is not None: y_min = np.min(pts_vec[:, 1]) y_max = np.max(pts_vec[:, 1]) y_node = y_max - yFraction * (y_max - y_min) # top-bottom else: y_node = np.mean(pts_vec[:, 1]) - if zFraction: + if zFraction is not None: z_min = np.min(pts_vec[:, 2]) z_max = np.max(pts_vec[:, 2]) z_node = z_max - zFraction * (z_max - z_min) # top-bottom @@ -811,7 +845,7 @@ def coordXfer(coords, mode="fwd", applyDisplacement=True, **kwargs): self.FFD.calcdPtdCoef(ptName) self.updated[ptName] = False - def addChild(self, childDVGeo, childName=None): + def addChild(self, childDVGeo): """Embed a child FFD into this object. An FFD child is a 'sub' FFD that is fully contained within @@ -840,8 +874,10 @@ def addChild(self, childDVGeo, childName=None): childDVGeo.iChild = iChild # check if a custom name is provided, if not, we will use the old naming scheme based on the iChild index - if childName is None: + if childDVGeo.name is None: childName = f"child{iChild:d}" + else: + childName = childDVGeo.name # check if this child name has already been used if childName in self.children: @@ -864,7 +900,7 @@ def addChild(self, childDVGeo, childName=None): # Add the child to the parent and return self.children[childName] = childDVGeo - def addGlobalDV(self, dvName, value, func, lower=None, upper=None, scale=1.0, config=None): + def addGlobalDV(self, dvName, value, func, lower=None, upper=None, scale=1.0, config=None, prependName=True): """ Add a global design variable to the DVGeometry object. This type of design variable acts on one or more reference axis. @@ -905,9 +941,19 @@ def addGlobalDV(self, dvName, value, func, lower=None, upper=None, scale=1.0, co Use a string for a single configuration or a list for multiple configurations. The default value of None implies that the design variable applies to *ALL* configurations. + prependName : bool + Flag to determine if self.name attribute is prepended to this DV name. + The self.name attribute of DVGeo objects can be useful when there + are several DVGeo objects kicking around. One example use case for this + is when we have multiple child DVGeos. In this case, initializing all + children DVGeos with the name kwarg helps with bookkeeping; however, + if the name attribute is not None, the default behavior is to modify + DV names such that the DVGeo's name is prepended to the user provided + name. For backwards compatability, this behavior is maintained, but + can be disabled by setting the prependName argument to False. """ # if the parent DVGeometry object has a name attribute, prepend it - if self.name is not None: + if self.name is not None and prependName: dvName = self.name + "_" + dvName if isinstance(config, str): @@ -915,7 +961,16 @@ def addGlobalDV(self, dvName, value, func, lower=None, upper=None, scale=1.0, co self.DV_listGlobal[dvName] = geoDVGlobal(dvName, value, lower, upper, scale, func, config) def addLocalDV( - self, dvName, lower=None, upper=None, scale=1.0, axis="y", volList=None, pointSelect=None, config=None + self, + dvName, + lower=None, + upper=None, + scale=1.0, + axis="y", + volList=None, + pointSelect=None, + config=None, + prependName=True, ): """ Add one or more local design variables ot the DVGeometry @@ -960,6 +1015,17 @@ def addLocalDV( configurations. The default value of None implies that the design variable applies to *ALL* configurations. + prependName : bool + Flag to determine if self.name attribute is prepended to this DV name. + The self.name attribute of DVGeo objects can be useful when there + are several DVGeo objects kicking around. One example use case for this + is when we have multiple child DVGeos. In this case, initializing all + children DVGeos with the name kwarg helps with bookkeeping; however, + if the name attribute is not None, the default behavior is to modify + DV names such that the DVGeo's name is prepended to the user provided + name. For backwards compatability, this behavior is maintained, but + can be disabled by setting the prependName argument to False. + Returns ------- N : int @@ -978,7 +1044,7 @@ def addLocalDV( >>> PS = geo_utils.PointSelect(type = 'y', pt1=[0,0,0], pt2=[10, 0, 10]) >>> nVar = DVGeo.addLocalDV('shape_vars', lower=-1.0, upper=1.0, pointSelect=PS) """ - if self.name is not None: + if self.name is not None and prependName: dvName = self.name + "_" + dvName if isinstance(config, str): @@ -1194,6 +1260,7 @@ def addLocalSectionDV( orient0=None, orient2="svd", config=None, + prependName=True, ): """ Add one or more section local design variables to the DVGeometry @@ -1315,6 +1382,17 @@ class in geo_utils. Using pointSelect discards everything in volList. configurations. The default value of None implies that the design variable applies to *ALL* configurations. + prependName : bool + Flag to determine if self.name attribute is prepended to this DV name. + The self.name attribute of DVGeo objects can be useful when there + are several DVGeo objects kicking around. One example use case for this + is when we have multiple child DVGeos. In this case, initializing all + children DVGeos with the name kwarg helps with bookkeeping; however, + if the name attribute is not None, the default behavior is to modify + DV names such that the DVGeo's name is prepended to the user provided + name. For backwards compatability, this behavior is maintained, but + can be disabled by setting the prependName argument to False. + Returns ------- N : int @@ -1326,7 +1404,7 @@ class in geo_utils. Using pointSelect discards everything in volList. >>> # moving in the 1 direction, within +/- 1.0 units >>> DVGeo.addLocalSectionDV('shape_vars', secIndex='k', lower=-1, upper=1, axis=1) """ - if self.name is not None: + if self.name is not None and prependName: dvName = self.name + "_" + dvName if isinstance(config, str): @@ -1400,7 +1478,7 @@ class in geo_utils. Using pointSelect discards everything in volList. return self.DV_listSectionLocal[dvName].nVal - def addCompositeDV(self, dvName, ptSetName=None, u=None, scale=None): + def addCompositeDV(self, dvName, ptSetName=None, u=None, scale=None, prependName=True): """ Add composite DVs. Note that this is essentially a preprocessing call which only works in serial at the moment. @@ -1415,9 +1493,19 @@ def addCompositeDV(self, dvName, ptSetName=None, u=None, scale=None): The u matrix used for the composite DV, by default None scale : float or ndarray, optional The scaling applied to this DV, by default None + prependName : bool + Flag to determine if self.name attribute is prepended to this DV name. + The self.name attribute of DVGeo objects can be useful when there + are several DVGeo objects kicking around. One example use case for this + is when we have multiple child DVGeos. In this case, initializing all + children DVGeos with the name kwarg helps with bookkeeping; however, + if the name attribute is not None, the default behavior is to modify + DV names such that the DVGeo's name is prepended to the user provided + name. For backwards compatability, this behavior is maintained, but + can be disabled by setting the prependName argument to False. """ NDV = self.getNDV() - if self.name is not None: + if self.name is not None and prependName: dvName = f"{self.name}_{dvName}" if u is not None: # we are after a square matrix @@ -1430,7 +1518,7 @@ def addCompositeDV(self, dvName, ptSetName=None, u=None, scale=None): if ptSetName is None: raise ValueError("If u and s need to be computed, you must specify the ptSetName") self.computeTotalJacobian(ptSetName) - J_full = self.JT[ptSetName].todense() # this is in CSR format but we convert it to a dense matrix + J_full = self.JT[ptSetName].toarray() # this is in CSR format but we convert it to a dense matrix u, s, _ = np.linalg.svd(J_full, full_matrices=False) scale = np.sqrt(s) # normalize the scaling @@ -1451,6 +1539,7 @@ def addShapeFunctionDV( upper=None, scale=1.0, config=None, + prependName=True, ): """ Add shape function design variables to the DVGeometry. @@ -1502,6 +1591,17 @@ def addShapeFunctionDV( configurations. The default value of None implies that the design variable applies to *ALL* configurations. + prependName : bool + Flag to determine if self.name attribute is prepended to this DV name. + The self.name attribute of DVGeo objects can be useful when there + are several DVGeo objects kicking around. One example use case for this + is when we have multiple child DVGeos. In this case, initializing all + children DVGeos with the name kwarg helps with bookkeeping; however, + if the name attribute is not None, the default behavior is to modify + DV names such that the DVGeo's name is prepended to the user provided + name. For backwards compatability, this behavior is maintained, but + can be disabled by setting the prependName argument to False. + Returns ------- N : int @@ -1523,7 +1623,7 @@ def addShapeFunctionDV( >>> DVGeo.addShapeFunctionDV("shape_func", shapes) """ - if self.name is not None: + if self.name is not None and prependName: dvName = self.name + "_" + dvName if isinstance(config, str): @@ -1671,42 +1771,32 @@ def setDesignVars(self, dvDict): if self.useComposite: dvDict = self.mapXDictToDVGeo(dvDict) + def _checkArrLength(key, nIn, nRef): + if nIn != nRef: + raise Error( + f"Incorrect number of design variables for DV: {key}.\n" + + f"Expecting {nRef} variables but received {nIn}" + ) + for key in dvDict: if key in self.DV_listGlobal: vals_to_set = np.atleast_1d(dvDict[key]).astype("D") - if len(vals_to_set) != self.DV_listGlobal[key].nVal: - raise Error( - f"Incorrect number of design variables for DV: {key}.\n" - + f"Expecting {self.DV_listGlobal[key].nVal} variables but received {len(vals_to_set)}" - ) - + _checkArrLength(key, len(vals_to_set), self.DV_listGlobal[key].nVal) self.DV_listGlobal[key].value = vals_to_set if key in self.DV_listLocal: vals_to_set = np.atleast_1d(dvDict[key]).astype("D") - if len(vals_to_set) != self.DV_listLocal[key].nVal: - raise Error( - f"Incorrect number of design variables for DV: {key}.\n" - + f"Expecting {self.DV_listLocal[key].nVal} variables but received {len(vals_to_set)}" - ) + _checkArrLength(key, len(vals_to_set), self.DV_listLocal[key].nVal) self.DV_listLocal[key].value = vals_to_set if key in self.DV_listSectionLocal: vals_to_set = np.atleast_1d(dvDict[key]).astype("D") - if len(vals_to_set) != self.DV_listSectionLocal[key].nVal: - raise Error( - f"Incorrect number of design variables for DV: {key}.\n" - + f"Expecting {self.DV_listSectionLocal[key].nVal} variables but received {len(vals_to_set)}" - ) + _checkArrLength(key, len(vals_to_set), self.DV_listSectionLocal[key].nVal) self.DV_listSectionLocal[key].value = vals_to_set if key in self.DV_listSpanwiseLocal: vals_to_set = np.atleast_1d(dvDict[key]).astype("D") - if len(vals_to_set) != self.DV_listSpanwiseLocal[key].nVal: - raise Error( - f"Incorrect number of design variables for DV: {key}.\n" - + f"Expecting {self.DV_listSpanwiseLocal[key].nVal} variables but received {len(vals_to_set)}" - ) + _checkArrLength(key, len(vals_to_set), self.DV_listSpanwiseLocal[key].nVal) self.DV_listSpanwiseLocal[key].value = vals_to_set # Jacobians are, in general, no longer up to date @@ -1959,7 +2049,6 @@ def update(self, ptSetName, childDelta=True, config=None): variable applies to *ALL* configurations. """ - self.curPtSet = ptSetName # We've postponed things as long as we can...do the finalization. self._finalize() @@ -2840,7 +2929,9 @@ def addVariablesPyOpt( # then we simply return without adding any of the other DVs if self.useComposite: dv = self.DVComposite - optProb.addVarGroup(dv.name, dv.nVal, "c", value=dv.value, lower=dv.lower, upper=dv.upper, scale=dv.scale) + optProb.addVarGroup( + dv.name, dv.nVal, "c", value=dv.value.real, lower=dv.lower, upper=dv.upper, scale=dv.scale + ) # add the linear DV constraints that replace the existing bounds! # Note that we assume all DVs are added here, i.e. no ignoreVars or any of the vars = False @@ -2885,11 +2976,23 @@ def addVariablesPyOpt( dv = varLists[lst][key] if key not in freezeVars: optProb.addVarGroup( - dv.name, dv.nVal, "c", value=dv.value, lower=dv.lower, upper=dv.upper, scale=dv.scale + dv.name, + dv.nVal, + "c", + value=dv.value.real, + lower=dv.lower, + upper=dv.upper, + scale=dv.scale, ) else: optProb.addVarGroup( - dv.name, dv.nVal, "c", value=dv.value, lower=dv.value, upper=dv.value, scale=dv.scale + dv.name, + dv.nVal, + "c", + value=dv.value.real, + lower=dv.value, + upper=dv.value, + scale=dv.scale, ) # Add variables from the children @@ -2898,8 +3001,8 @@ def addVariablesPyOpt( optProb, globalVars, localVars, sectionlocalVars, spanwiselocalVars, ignoreVars, freezeVars ) - def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True, coordXfer=None): - """Write the (deformed) current state of the FFDs to a tecplot file, + def writeTecplot(self, fileName, solutionTime=None): + """Write the (deformed) current state of the FFD's to a tecplot file, including the children Parameters @@ -2909,13 +3012,6 @@ def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True, coordXf SolutionTime : float Solution time to write to the file. This could be a fictitious time to make visualization easier in tecplot. - writeEmbeding : bool - Whether to write the embedding volume in the file. - True by default for visualization but can be turned off for a leaner file. - coordXfer : callback func - Apply the coordinate transfer to the points before writing. If not, the - output is the deformed points in the FFD coordinate system. - None by default. """ # Name here doesn't matter, just take the first one @@ -2927,7 +3023,7 @@ def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True, coordXf vol_counter = 0 # Write master volumes: - vol_counter += self._writeVols(f, vol_counter, solutionTime, writeEmbedding, coordXfer) + vol_counter += self._writeVols(f, vol_counter, solutionTime) closeTecplot(f) if len(self.points) > 0: @@ -3254,6 +3350,35 @@ def demoDesignVars( # Reset DVs to their original values self.setDesignVars(dvDict) + def setVolBounds(self, volBounds): + """ + Routine to set the FFD embedding volume bounds after initialization + + Parameters + ---------- + volBounds : dict + Dictionary where volume embedding bounds for each FFD volume is specified. + Keys of the dictionary specifies the FFD volume index. Values are lists of lists. + First list contains the min and max bounds for the u parameter, second v, third w. + This parameter can also be set after initialization using the `setVolBounds` method. + For example if the FFD has 3 volumes, setting volBounds to: + + >>> volBounds = { + >>> 0: [[0., 0.5], [0., 1.], [0., 1.]], + >>> 1: [[0., 1.], [0.5, 1.], [0., 1.]] + >>> } + + will set the parametric bounds of the first and second volumes, while the third + volume can still embed points using the usual bounds of 0 to 1 for all parametric + directions. In this example, the first volume only embeds points if the u coordinate + of the projection is between 0 and 0.5. Similarly, the second volume only embeds + a point if the v coordinate of the projection is between 0.5 and 1.0. This is useful + when multiple overlapping FFD volumes are used to either mimic circular or symmetric + FFDs. + """ + # + self.FFD.setVolBounds(volBounds) + # ---------------------------------------------------------------------- # THE REMAINDER OF THE FUNCTIONS NEED NOT BE CALLED BY THE USER # ---------------------------------------------------------------------- @@ -3591,6 +3716,7 @@ def _getDVOffsets(self): self.nDVG_count = 0 self.nDVSL_count = self.nDVG_T self.nDVL_count = self.nDVG_T + self.nDVSL_T + self.nDVSW_count = self.nDVG_T + self.nDVSL_T + self.nDVL_T nDVG = self._getNDVGlobalSelf() nDVL = self._getNDVLocalSelf() @@ -3609,7 +3735,7 @@ def _getDVOffsets(self): child.nDVG_count = self.nDVG_count + nDVG child.nDVL_count = self.nDVL_count + nDVL child.nDVSL_count = self.nDVSL_count + nDVSL - child.nDVSW_count = self.nDVSW_count + nDVSL + child.nDVSW_count = self.nDVSW_count + nDVSW # Increment the counters for the children nDVG += child._getNDVGlobalSelf() @@ -4434,44 +4560,16 @@ def _cascadedDVJacobian(self, config=None): return Jacobian - def _writeVols(self, handle, vol_counter, solutionTime, writeEmbedding, coordXfer): + def _writeVols(self, handle, vol_counter, solutionTime): for i in range(len(self.FFD.vols)): - if coordXfer is not None: - data = self.FFD.vols[i].coef - ny = data.shape[1] - nz = data.shape[2] - FFDPts = np.zeros_like(data) - for k in range(nz): - for j in range(ny): - points = data[:, j, k, :] - FFDPt = coordXfer(points, mode="fwd", applyDisplacement=True) - FFDPts[:, j, k, :] = FFDPt - else: - FFDPts = self.FFD.vols[i].coef - - writeTecplot3D(handle, "FFD_vol%d" % i, FFDPts, solutionTime) + writeTecplot3D(handle, "FFD_vol%d" % i, self.FFD.vols[i].coef, solutionTime) self.FFD.vols[i].computeData(recompute=True) - - if writeEmbedding: - if coordXfer is not None: - data = self.FFD.vols[i].data - ny = data.shape[1] - nz = data.shape[2] - embeddingPts = np.zeros_like(data) - for k in range(nz): - for j in range(ny): - points = data[:, j, k, :] - embeddingPt = coordXfer(points, mode="fwd", applyDisplacement=True) - embeddingPts[:, j, k, :] = embeddingPt - else: - embeddingPts = self.FFD.vols[i].data - writeTecplot3D(handle, "embedding_vol", embeddingPts, solutionTime) - + writeTecplot3D(handle, "embedding_vol", self.FFD.vols[i].data, solutionTime) vol_counter += 1 # Write children volumes: for child in self.children.values(): - vol_counter += child._writeVols(handle, vol_counter, solutionTime, writeEmbedding) + vol_counter += child._writeVols(handle, vol_counter, solutionTime) return vol_counter diff --git a/pygeo/pyBlock.py b/pygeo/pyBlock.py index 83f3f647..2b1ae2b3 100644 --- a/pygeo/pyBlock.py +++ b/pygeo/pyBlock.py @@ -50,7 +50,7 @@ class pyBlock: allow. """ - def __init__(self, initType, fileName=None, FFD=False, symmPlane=None, kmax=4, **kwargs): + def __init__(self, initType, fileName=None, FFD=False, symmPlane=None, kmax=4, volBounds=None, **kwargs): self.initType = initType self.FFD = False self.topo = None # The topology of the volumes/surface @@ -59,7 +59,10 @@ def __init__(self, initType, fileName=None, FFD=False, symmPlane=None, kmax=4, * self.coef = None # The global (reduced) set of control pts self.embeddedVolumes = {} self.symmPlane = symmPlane - self.filename = fileName + if volBounds is None: + self.volBounds = {} + else: + self.volBounds = volBounds if initType == "plot3d": self._readPlot3D(fileName, FFD=FFD, kmax=kmax, **kwargs) @@ -882,7 +885,13 @@ def projectPoints(self, x0, interiorOnly, embTol, eps, nIter): for j in range(self.nVol): iVol = volList[j] - u0, v0, w0, D0 = self.vols[iVol].projectPoint(x0[i], eps=eps, nIter=nIter) + + if iVol in self.volBounds: + volBounds = self.volBounds[iVol] + else: + volBounds = None + + u0, v0, w0, D0 = self.vols[iVol].projectPoint(x0[i], eps=eps, nIter=nIter, volBounds=volBounds) D0Norm = np.linalg.norm(D0) # If the new distance is less than the previous best @@ -932,9 +941,10 @@ def projectPoints(self, x0, interiorOnly, embTol, eps, nIter): # Check to see if we have bad projections and print a warning: if counter > 0: - print(self.filename) - print(f" -> Warning: {counter} point(s) from {self.filename} not projected to tolerance {eps}.") - print(f"Max Error: {DMax:.6g} ; RMS Error: {DRms:.6g}") + print( + " -> Warning: %d point(s) not projected to tolerance: %g. " % (counter, eps) + + "Max Error: %12.6g ; RMS Error: %12.6g" % (DMax, DRms) + ) print("List of Points is: (pt, delta):") for i in range(len(badPts)): print( @@ -971,6 +981,10 @@ def getBounds(self): return Xmin, Xmax + def setVolBounds(self, volBounds): + # routine to update the volume bounds after initialization + self.volBounds = volBounds + class EmbeddedVolume: """A Container class for a set of embedded volume points diff --git a/tests/reg_tests/ref/test_active_children.ref b/tests/reg_tests/ref/test_active_children.ref index 77cc25e1..68d23389 100644 --- a/tests/reg_tests/ref/test_active_children.ref +++ b/tests/reg_tests/ref/test_active_children.ref @@ -27,7 +27,7 @@ 1 ] }, - "span2": { + "child2_span2": { "__ndarray__": [ [ 0.0 @@ -83,7 +83,7 @@ 1 ] }, - "span2": { + "child2_span2": { "__ndarray__": [ [ 0.375 @@ -139,7 +139,7 @@ 1 ] }, - "span2": { + "child2_span2": { "__ndarray__": [ [ 0.375 @@ -195,7 +195,7 @@ 1 ] }, - "span2": { + "child2_span2": { "__ndarray__": [ [ 0.0 diff --git a/tests/reg_tests/test_DVConstraints.py b/tests/reg_tests/test_DVConstraints.py index 54cb2f57..021b5dde 100644 --- a/tests/reg_tests/test_DVConstraints.py +++ b/tests/reg_tests/test_DVConstraints.py @@ -4,6 +4,7 @@ # External modules from baseclasses import BaseRegTest +from baseclasses.utils import Error as baseclassesError from mpi4py import MPI import numpy as np from parameterized import parameterized_class @@ -319,6 +320,53 @@ def test_thickness1D_box(self, train=False, refDeriv=False): funcs["DVCon1_thickness_constraints_2"], 8.0 * np.ones(3), name="thickness_base", rtol=1e-7, atol=1e-7 ) + def test_projected_thickness1D_box(self, train=False, refDeriv=False): + refFile = os.path.join(self.base_path, "ref/test_DVConstraints_projected_thickness1D_box.ref") + with BaseRegTest(refFile, train=train) as handler: + DVGeo, DVCon = self.generate_dvgeo_dvcon("box") + DVGeo.addLocalDV("local_x", lower=-0.5, upper=0.5, axis="x", scale=1) + ptList = [[0.0, 0.0, 0.1], [0.0, 0.0, 5.0]] + ptList2 = [[-0.5, 0.0, 2.0], [0.5, 0.0, 2.0]] + DVCon.addThicknessConstraints1D(ptList, nCon=3, axis=[0, 1, 0], projected=True, scaled=False) + DVCon.addThicknessConstraints1D(ptList, nCon=3, axis=[1, 0, 0], projected=True, scaled=False) + DVCon.addThicknessConstraints1D(ptList2, nCon=3, axis=[0, 0, 1], projected=True, scaled=False) + + funcs, funcsSens = generic_test_base(DVGeo, DVCon, handler) + + # Check that unscaled thicknesses are computed correctly at baseline + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_0"], np.ones(3), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_1"], 2.0 * np.ones(3), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_2"], 8.0 * np.ones(3), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + + # add skew to one face + nodes = DVGeo.FFD.coef + dx = np.max(nodes[:, 0]) - np.min(nodes[:, 0]) + dy = np.max(nodes[:, 1]) - np.min(nodes[:, 1]) + y_scale = ((nodes[:, 1] - np.min(nodes[:, 1])) / dy) * (nodes[:, 0] - np.min(nodes[:, 0])) / dx + + DVGeo.setDesignVars({"local_x": y_scale}) + + funcs = {} + DVCon.evalFunctions(funcs) + + # DVCon1_thickness_constraints_0 should stay the same since the thickness constraint is projected! + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_0"], np.ones(3), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_1"], 2.5 * np.ones(3), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + # The z direction is uneffected by the changes + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_2"], 8.0 * np.ones(3), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + def test_thickness2D(self, train=False, refDeriv=False): refFile = os.path.join(self.base_path, "ref/test_DVConstraints_thickness2D.ref") with BaseRegTest(refFile, train=train) as handler: @@ -398,6 +446,64 @@ def test_thickness2D_box(self, train=False, refDeriv=False): funcs["DVCon1_thickness_constraints_2"], 8.0 * np.ones(4), name="thickness_base", rtol=1e-7, atol=1e-7 ) + def test_projected_thickness2D_box(self, train=False, refDeriv=False): + refFile = os.path.join(self.base_path, "ref/test_DVConstraints_projected_thickness2D_box.ref") + with BaseRegTest(refFile, train=train) as handler: + DVGeo, DVCon = self.generate_dvgeo_dvcon("box") + DVGeo.addLocalDV("local_x", lower=-0.5, upper=0.5, axis="x", scale=1) + + leList = [[-0.25, 0.0, 0.1], [-0.25, 0.0, 7.9]] + teList = [[0.75, 0.0, 0.1], [0.75, 0.0, 7.9]] + + leList2 = [[0.0, -0.25, 0.1], [0.0, -0.25, 7.9]] + teList2 = [[0.0, 0.25, 0.1], [0.0, 0.25, 7.9]] + + leList3 = [[-0.5, -0.25, 0.1], [0.5, -0.25, 0.1]] + teList3 = [[-0.5, 0.25, 0.1], [0.5, 0.25, 0.1]] + + DVCon.addThicknessConstraints2D(leList, teList, 2, 2, scaled=False, projected=True) + DVCon.addThicknessConstraints2D(leList2, teList2, 2, 2, scaled=False, projected=True) + DVCon.addThicknessConstraints2D(leList3, teList3, 2, 2, scaled=False, projected=True) + + funcs, funcsSens = generic_test_base(DVGeo, DVCon, handler) + # Check that unscaled thicknesses are computed correctly at baseline + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_0"], np.ones(4), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_1"], 2.0 * np.ones(4), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_2"], 8.0 * np.ones(4), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + + # add skew to one face + nodes = DVGeo.FFD.coef + dx = np.max(nodes[:, 0]) - np.min(nodes[:, 0]) + dy = np.max(nodes[:, 1]) - np.min(nodes[:, 1]) + y_scale = ((nodes[:, 1] - np.min(nodes[:, 1])) / dy) * (nodes[:, 0] - np.min(nodes[:, 0])) / dx + + DVGeo.setDesignVars({"local_x": y_scale}) + + funcs = {} + DVCon.evalFunctions(funcs) + + # DVCon1_thickness_constraints_0 should stay the same since the thickness constraint is projected! + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_0"], np.ones(4), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_1"], + np.array([2.25, 2.75, 2.25, 2.75]), + name="thickness_base", + rtol=1e-7, + atol=1e-7, + ) + # The z direction is uneffected by the changes + handler.assert_allclose( + funcs["DVCon1_thickness_constraints_2"], 8.0 * np.ones(4), name="thickness_base", rtol=1e-7, atol=1e-7 + ) + def test_volume(self, train=False, refDeriv=False): refFile = os.path.join(self.base_path, "ref/test_DVConstraints_volume.ref") with BaseRegTest(refFile, train=train) as handler: @@ -966,6 +1072,141 @@ def test_LERadius(self, train=False, refDeriv=False): funcs, funcsSens = self.wing_test_deformed(DVGeo, DVCon, handler) +class RegTestProximity(unittest.TestCase): + N_PROCS = 1 + + def setUp(self): + # Store the path where this current script lives + # This all paths in the script are relative to this path + # This is needed to support testflo running directories and files as inputs + self.base_path = os.path.dirname(os.path.abspath(__file__)) + self.comm = MPI.COMM_WORLD + + def test_proximity_constraint(self, train=False, refDeriv=False): + refFile = os.path.join(self.base_path, "ref/test_proximity_constraints.ref") + with BaseRegTest(refFile, train=train) as handler: + # create a dvgeo with one parent, and two overlapping children + parent_ffd_file = os.path.join(self.base_path, "../../input_files/parent.xyz") + child_ffd_file = os.path.join(self.base_path, "../../input_files/box1.xyz") + + DVGeo_parent = DVGeometry(parent_ffd_file, name="parent") + # the children ffd box's extents are: + # x \in [-0.1, 1.1] + # y \in [-0.6, 0.6] + # z \in [-0.6, 0.6] + DVGeo_child1 = DVGeometry(child_ffd_file, child=True, name="child1") + DVGeo_child2 = DVGeometry(child_ffd_file, child=True, name="child2") + DVGeo_child1.addRefAxis("ref_axis1", xFraction=0.25, alignIndex="k") + DVGeo_child2.addRefAxis("ref_axis2", xFraction=0.25, alignIndex="k") + + DVGeo_parent.addChild(DVGeo_child1) + DVGeo_parent.addChild(DVGeo_child2) + + # add shape func DVs to each child DVGeo so that we can move them in the x dir + lindex1 = DVGeo_child1.getLocalIndex(0).flatten() + lindex2 = DVGeo_child2.getLocalIndex(0).flatten() + + shape1 = {} + shape2 = {} + # the two ffds are identical so we just have one loop + for idx in range(len(lindex1)): + idx1 = lindex1[idx] + idx2 = lindex2[idx] + shape1[idx1] = np.array([1.0, 0.0, 0.0]) + shape2[idx2] = np.array([1.0, 0.0, 0.0]) + + # dvgeo names will be prepended to the dv name to distinguish the two + DVGeo_child1.addShapeFunctionDV("x_disp", [shape1]) + DVGeo_child2.addShapeFunctionDV("x_disp", [shape2]) + + # create the dvcon object + DVCon = DVConstraints() + DVCon.setDVGeo(DVGeo_parent) + + # dummy tri meshes for dvcon to be used for projections. + # these represent different cfd surfaces. these can be arbitrarily large + # because they wont be embedded in the FFDs. + # we have constant x coordinates for the surfaces. + p0 = np.array([0.1, -1.0, -1.0]) + p1 = np.array([0.1, 1.0, -1.0]) + p2 = np.array([0.1, 0.0, 2.0]) + surf1 = [[p0.copy()], [p1 - p0], [p2 - p0]] + + p0[0] = 0.2 + p1[0] = 0.2 + p2[0] = 0.2 + surf2 = [[p0.copy()], [p1 - p0], [p2 - p0]] + + # the last surface is outside the child FFDs + p0[0] = 1.2 + p1[0] = 1.2 + p2[0] = 1.2 + surf3 = [[p0.copy()], [p1 - p0], [p2 - p0]] + + # add three surfaces to dvcon + DVCon.setSurface(surf1, "surf1") + DVCon.setSurface(surf2, "surf2") + DVCon.setSurface(surf3, "surf3") + + # add the proximity constraints + # we want to add 2 constraints; + # first one goes from surf1 to surf2. The first point is in the first child, + # and second is in the second child. + DVCon.addProximityConstraints( + [np.array([0.15, 0.0, 0.0])], + [np.array([-1.0, 0.0, 0.0])], + "surf1", + "surf2", + pointSetKwargsA={"activeChildren": ["child1"]}, + pointSetKwargsB={"activeChildren": ["child2"]}, + scaled=False, + name="proximity1", + ) + + # second constraint goes from surf2 to surf3. The pt in surf2 is in the second child, + # the pt in surf3 is not added to any child FFDs + DVCon.addProximityConstraints( + [np.array([0.25, 0.0, 0.0])], + [np.array([-1.0, 0.0, 0.0])], + "surf2", + "surf3", + pointSetKwargsA={"activeChildren": ["child2"]}, + pointSetKwargsB={}, + scaled=False, + name="proximity2", + ) + + # add a third constraint where the vector direction is wrong and catch the error + with self.assertRaises(baseclassesError): + DVCon.addProximityConstraints( + [np.array([0.25, 0.0, 0.0])], + [np.array([1.0, 0.0, 0.0])], + "surf2", + "surf3", + pointSetKwargsA={"activeChildren": ["child2"]}, + pointSetKwargsB={}, + scaled=False, + name="proximity3", + ) + + # evaluate the original values + funcs = {} + DVCon.evalFunctions(funcs, includeLinear=True) + handler.root_add_dict("funcs_base", funcs, rtol=1e-10, atol=1e-10) + funcsSens = {} + DVCon.evalFunctionsSens(funcsSens, includeLinear=True) + handler.root_add_dict("sens_base", funcsSens, rtol=1e-10, atol=1e-10) + + # move child2 and re-evaluate + DVGeo_parent.setDesignVars({"child2_x_disp": 0.05}) + funcs = {} + DVCon.evalFunctions(funcs, includeLinear=True) + handler.root_add_dict("funcs_new", funcs, rtol=1e-10, atol=1e-10) + funcsSens = {} + DVCon.evalFunctionsSens(funcsSens, includeLinear=True) + handler.root_add_dict("sens_new", funcsSens, rtol=1e-10, atol=1e-10) + + @unittest.skipUnless(geogradInstalled, "requires geograd") class RegTestGeograd(unittest.TestCase): N_PROCS = 1 diff --git a/tests/reg_tests/test_DVGeometry.py b/tests/reg_tests/test_DVGeometry.py index afc83a4c..487c6a7b 100644 --- a/tests/reg_tests/test_DVGeometry.py +++ b/tests/reg_tests/test_DVGeometry.py @@ -1213,6 +1213,66 @@ def test_embedding_solver(self): np.testing.assert_allclose(test_points, new_points, atol=1e-15) + def train_volume_bounds(self, train=True): + self.test_volume_bounds(train=train) + + def test_volume_bounds(self, train=False): + refFile = os.path.join(self.base_path, "ref/test_vol_bounds.ref") + with BaseRegTest(refFile, train=train) as handler: + handler.root_print("Test point embedding with volume bounds") + + ffdfile = os.path.join(self.base_path, "../../input_files/outerBoxFFD.xyz") + + volBounds = { + 0: [[0.5, 1.0], [0.0, 0.5], [0.25, 0.75]], + 1: [[0.0, 1.0], [0.0, 1.0], [0.0, 0.5]], + } + + # initialize with custom volume bounds + DVGeo = DVGeometry(ffdfile, volBounds=volBounds) + DVGeo.addLocalDV("xdir", lower=-1.0, upper=1.0, axis="x", scale=1.0) + + # get a few points inside and outside the bound for the boxes + uvw = [ + np.array([0.75, 0.25, 0.4]), # in both boxes + np.array([0.75, 0.25, 0.6]), # in first box, outside second + np.array([0.25, 0.75, 0.25]), # in first box, outside second + ] + + # get the x-y-z coordinates of these points + pts0 = [] + pts1 = [] + for ii in range(len(uvw)): + pts0.append(DVGeo.FFD.vols[0](uvw[ii][0], uvw[ii][1], uvw[ii][2])) + pts1.append(DVGeo.FFD.vols[1](uvw[ii][0], uvw[ii][1], uvw[ii][2])) + + # project these points back into the FFD + DVGeo.addPointSet(pts0, "pts0") + DVGeo.addPointSet(pts1, "pts1") + + pts0_1 = DVGeo.update("pts0") + pts1_1 = DVGeo.update("pts1") + + handler.root_add_val("pts0_1", pts0_1, rtol=1e-12, atol=1e-12) + handler.root_add_val("pts1_1", pts1_1, rtol=1e-12, atol=1e-12) + + # change the bounds and do it again + volBounds = { + 0: [[0.8, 1.0], [0.0, 0.5], [0.25, 0.75]], + 1: [[0.0, 1.0], [0.0, 1.0], [0.0, 1.0]], + } + DVGeo.setVolBounds(volBounds) + + # project these points back into the FFD + DVGeo.addPointSet(pts0, "pts0_new") + DVGeo.addPointSet(pts1, "pts1_new") + + pts0_2 = DVGeo.update("pts0_new") + pts1_2 = DVGeo.update("pts1_new") + + handler.root_add_val("pts0_2", pts0_2, rtol=1e-12, atol=1e-12) + handler.root_add_val("pts1_2", pts1_2, rtol=1e-12, atol=1e-12) + def test_coord_xfer(self): DVGeo, _ = commonUtils.setupDVGeo(self.base_path) @@ -1412,14 +1472,16 @@ def test_active_children(self, train=False): with BaseRegTest(refFile, train=train) as handler: handler.root_print("Test shape function DVs") - DVGeo, DVGeoChild1 = commonUtils.setupDVGeo(self.base_path) - _, DVGeoChild2 = commonUtils.setupDVGeo(self.base_path) + DVGeo, DVGeoChild1 = commonUtils.setupDVGeo(self.base_path, childName="child1") + _, DVGeoChild2 = commonUtils.setupDVGeo(self.base_path, childName="child2") # add design variables - DVGeoChild1.addGlobalDV(dvName="span1", value=0.5, func=commonUtils.spanX, lower=0.1, upper=10, scale=1) + DVGeoChild1.addGlobalDV( + dvName="span1", value=0.5, func=commonUtils.spanX, lower=0.1, upper=10, scale=1, prependName=False + ) DVGeoChild2.addGlobalDV(dvName="span2", value=0.5, func=commonUtils.spanX, lower=0.1, upper=10, scale=1) - DVGeo.addChild(DVGeoChild1, "child1") - DVGeo.addChild(DVGeoChild2, "child2") + DVGeo.addChild(DVGeoChild1) + DVGeo.addChild(DVGeoChild2) points = np.zeros([2, 3]) points[0, :] = [0.25, 0, 0] @@ -1457,7 +1519,7 @@ def test_active_children(self, train=False): handler.root_add_dict(f"dIdx_{ptName}", dIdx, rtol=1e-10, atol=1e-10) # perturb the DV and test point coordinates - xDV = {"span1": np.array([2.0]), "span2": np.array([3.0])} + xDV = {"span1": np.array([2.0]), "child2_span2": np.array([3.0])} DVGeo.setDesignVars(xDV) for ptName in ptNames: From ff076de36aef90bf2b99fc3fd02f8b659911cb44 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 17 May 2024 15:38:46 -0400 Subject: [PATCH 091/110] another one --- pygeo/constraints/thicknessConstraint.py | 231 +++++++++++++++++++++++ 1 file changed, 231 insertions(+) diff --git a/pygeo/constraints/thicknessConstraint.py b/pygeo/constraints/thicknessConstraint.py index 7b435705..a18b0b32 100644 --- a/pygeo/constraints/thicknessConstraint.py +++ b/pygeo/constraints/thicknessConstraint.py @@ -88,6 +88,127 @@ def writeTecplot(self, handle): handle.write("%d %d\n" % (2 * i + 1, 2 * i + 2)) +class ProjectedThicknessConstraint(GeometricConstraint): + """ + DVConstraints representation of a set of projected thickness + constraints. One of these objects is created each time a + addThicknessConstraints2D or addThicknessConstraints1D call is + made. The user should not have to deal with this class directly. + + This is different from ThicknessConstraints becuase it measures the projected + thickness along the orginal direction of the constraint. + """ + + def __init__(self, name, coords, lower, upper, scaled, scale, DVGeo, addToPyOpt, compNames): + super().__init__(name, len(coords) // 2, lower, upper, scale, DVGeo, addToPyOpt) + + self.coords = coords + self.scaled = scaled + + # First thing we can do is embed the coordinates into DVGeo + # with the name provided: + self.DVGeo.addPointSet(self.coords, self.name, compNames=compNames) + + # Now get the reference lengths and directions + self.D0 = np.zeros(self.nCon) + self.dir_vec = np.zeros((self.nCon, 3)) + for i in range(self.nCon): + vec = self.coords[2 * i] - self.coords[2 * i + 1] + self.D0[i] = geo_utils.norm.euclideanNorm(vec) + self.dir_vec[i] = vec / self.D0[i] + + def evalFunctions(self, funcs, config): + """ + Evaluate the functions this object has and place in the funcs dictionary + + Parameters + ---------- + funcs : dict + Dictionary to place function values + """ + # Pull out the most recent set of coordinates: + self.coords = self.DVGeo.update(self.name, config=config) + D = np.zeros(self.nCon) + for i in range(self.nCon): + vec = self.coords[2 * i] - self.coords[2 * i + 1] + + # take the dot product with the direction vector + D[i] = vec[0] * self.dir_vec[i, 0] + vec[1] * self.dir_vec[i, 1] + vec[2] * self.dir_vec[i, 2] + + if self.scaled: + D[i] /= self.D0[i] + + funcs[self.name] = D + + def evalFunctionsSens(self, funcsSens, config): + """ + Evaluate the sensitivity of the functions this object has and + place in the funcsSens dictionary + + Parameters + ---------- + funcsSens : dict + Dictionary to place function values + """ + + nDV = self.DVGeo.getNDV() + if nDV > 0: + dTdPt = np.zeros((self.nCon, self.coords.shape[0], self.coords.shape[1])) + for i in range(self.nCon): + D_b = 1.0 + + # the reverse mode seeds still need to be scaled + if self.scaled: + D_b /= self.D0[i] + + # d(dot(vec,n))/d(vec) = n + # where vec = thickness vector + # and n = the reference direction + # This is easier to see if you write out the dot product + # dot(vec, n) = vec_1*n_1 + vec_2*n_2 + vec_3*n_3 + # d(dot(vec,n))/d(vec_1) = n_1 + # d(dot(vec,n))/d(vec_2) = n_2 + # d(dot(vec,n))/d(vec_3) = n_3 + vec_b = self.dir_vec[i] * D_b + + # the reverse mode of calculating vec is just scattering the seed of vec_b to the coords + # vec = self.coords[2 * i] - self.coords[2 * i + 1] + # we just set the coordinate seeds directly into the jacobian + dTdPt[i, 2 * i, :] = vec_b + dTdPt[i, 2 * i + 1, :] = -vec_b + + funcsSens[self.name] = self.DVGeo.totalSensitivity(dTdPt, self.name, config=config) + + def writeTecplot(self, handle): + """ + Write the visualization of this set of thickness constraints + to the open file handle + """ + + handle.write("Zone T=%s\n" % self.name) + handle.write("Nodes = %d, Elements = %d ZONETYPE=FELINESEG\n" % (len(self.coords), len(self.coords) // 2)) + handle.write("DATAPACKING=POINT\n") + for i in range(len(self.coords)): + handle.write(f"{self.coords[i, 0]:f} {self.coords[i, 1]:f} {self.coords[i, 2]:f}\n") + + for i in range(len(self.coords) // 2): + handle.write("%d %d\n" % (2 * i + 1, 2 * i + 2)) + + # create a seperate zone to plot the projected direction for each thickness constraint + handle.write("Zone T=%s_ref_directions\n" % self.name) + handle.write("Nodes = %d, Elements = %d ZONETYPE=FELINESEG\n" % (len(self.dir_vec) * 2, len(self.dir_vec))) + handle.write("DATAPACKING=POINT\n") + + for i in range(self.nCon): + pt1 = self.coords[i * 2 + 1] + pt2 = pt1 + self.dir_vec[i] + handle.write(f"{pt1[0]:f} {pt1[1]:f} {pt1[2]:f}\n") + handle.write(f"{pt2[0]:f} {pt2[1]:f} {pt2[2]:f}\n") + + for i in range(self.nCon): + handle.write("%d %d\n" % (2 * i + 1, 2 * i + 2)) + + class ThicknessToChordConstraint(GeometricConstraint): """ ThicknessToChordConstraint represents of a set of @@ -174,3 +295,113 @@ def writeTecplot(self, handle): for i in range(len(self.coords) // 2): handle.write("%d %d\n" % (2 * i + 1, 2 * i + 2)) + + +class ProximityConstraint(GeometricConstraint): + """ + DVConstraints representation of a set of proximity + constraints. The user should not have to deal with this + class directly. + """ + + def __init__( + self, + name, + coordsA, + coordsB, + pointSetKwargsA, + pointSetKwargsB, + lower, + upper, + scaled, + scale, + DVGeo, + addToPyOpt, + compNames, + ): + super().__init__(name, len(coordsA), lower, upper, scale, DVGeo, addToPyOpt) + + self.coordsA = coordsA + self.coordsB = coordsB + self.scaled = scaled + + # First thing we can do is embed the coordinates into the DVGeo. + # ptsets A and B get different kwargs + self.DVGeo.addPointSet(self.coordsA, f"{self.name}_A", compNames=compNames, **pointSetKwargsA) + self.DVGeo.addPointSet(self.coordsB, f"{self.name}_B", compNames=compNames, **pointSetKwargsB) + + # Now get the reference lengths + self.D0 = np.zeros(self.nCon) + for i in range(self.nCon): + self.D0[i] = geo_utils.norm.euclideanNorm(self.coordsA[i] - self.coordsB[i]) + + def evalFunctions(self, funcs, config): + """ + Evaluate the functions this object has and place in the funcs dictionary + + Parameters + ---------- + funcs : dict + Dictionary to place function values + """ + # Pull out the most recent set of coordinates: + self.coordsA = self.DVGeo.update(f"{self.name}_A", config=config) + self.coordsB = self.DVGeo.update(f"{self.name}_B", config=config) + D = np.zeros(self.nCon) + for i in range(self.nCon): + D[i] = geo_utils.norm.euclideanNorm(self.coordsA[i] - self.coordsB[i]) + if self.scaled: + D[i] /= self.D0[i] + funcs[self.name] = D + + def evalFunctionsSens(self, funcsSens, config): + """ + Evaluate the sensitivity of the functions this object has and + place in the funcsSens dictionary + + Parameters + ---------- + funcsSens : dict + Dictionary to place function values + """ + + nDV = self.DVGeo.getNDV() + if nDV > 0: + dTdPtA = np.zeros((self.nCon, self.nCon, 3)) + dTdPtB = np.zeros((self.nCon, self.nCon, 3)) + + for i in range(self.nCon): + pAb, pBb = geo_utils.eDist_b(self.coordsA[i], self.coordsB[i]) + if self.scaled: + pAb /= self.D0[i] + pBb /= self.D0[i] + dTdPtA[i, i, :] = pAb + dTdPtB[i, i, :] = pBb + + funcSensA = self.DVGeo.totalSensitivity(dTdPtA, f"{self.name}_A", config=config) + funcSensB = self.DVGeo.totalSensitivity(dTdPtB, f"{self.name}_B", config=config) + + funcsSens[self.name] = {} + for key, value in funcSensA.items(): + funcsSens[self.name][key] = value + for key, value in funcSensB.items(): + if key in funcsSens[self.name]: + funcsSens[self.name][key] += value + else: + funcsSens[self.name][key] = value + + def writeTecplot(self, handle): + """ + Write the visualization of this set of thickness constraints + to the open file handle + """ + + handle.write("Zone T=%s\n" % self.name) + handle.write("Nodes = %d, Elements = %d ZONETYPE=FELINESEG\n" % (len(self.coordsA) * 2, len(self.coordsA))) + handle.write("DATAPACKING=POINT\n") + for i in range(len(self.coordsA)): + handle.write(f"{self.coordsA[i, 0]:f} {self.coordsA[i, 1]:f} {self.coordsA[i, 2]:f}\n") + handle.write(f"{self.coordsB[i, 0]:f} {self.coordsB[i, 1]:f} {self.coordsB[i, 2]:f}\n") + + for i in range(len(self.coordsA)): + handle.write("%d %d\n" % (2 * i + 1, 2 * i + 2)) From 05495483a65123a99f95ad950e115e2a2a5fc134 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 31 May 2024 16:16:05 -0400 Subject: [PATCH 092/110] add this back --- pygeo/parameterization/DVGeo.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/pygeo/parameterization/DVGeo.py b/pygeo/parameterization/DVGeo.py index 2e70ce98..3de28866 100644 --- a/pygeo/parameterization/DVGeo.py +++ b/pygeo/parameterization/DVGeo.py @@ -3001,7 +3001,7 @@ def addVariablesPyOpt( optProb, globalVars, localVars, sectionlocalVars, spanwiselocalVars, ignoreVars, freezeVars ) - def writeTecplot(self, fileName, solutionTime=None): + def writeTecplot(self, fileName, solutionTime=None, writeEmbedding=True): """Write the (deformed) current state of the FFD's to a tecplot file, including the children @@ -3023,7 +3023,7 @@ def writeTecplot(self, fileName, solutionTime=None): vol_counter = 0 # Write master volumes: - vol_counter += self._writeVols(f, vol_counter, solutionTime) + vol_counter += self._writeVols(f, vol_counter, solutionTime, writeEmbedding) closeTecplot(f) if len(self.points) > 0: @@ -4560,16 +4560,19 @@ def _cascadedDVJacobian(self, config=None): return Jacobian - def _writeVols(self, handle, vol_counter, solutionTime): + def _writeVols(self, handle, vol_counter, solutionTime, writeEmbedding): for i in range(len(self.FFD.vols)): writeTecplot3D(handle, "FFD_vol%d" % i, self.FFD.vols[i].coef, solutionTime) self.FFD.vols[i].computeData(recompute=True) - writeTecplot3D(handle, "embedding_vol", self.FFD.vols[i].data, solutionTime) + + if writeEmbedding: + writeTecplot3D(handle, "embedding_vol", self.FFD.vols[i].data, solutionTime) + vol_counter += 1 # Write children volumes: for child in self.children.values(): - vol_counter += child._writeVols(handle, vol_counter, solutionTime) + vol_counter += child._writeVols(handle, vol_counter, solutionTime, writeEmbedding) return vol_counter From a21cf694f61b14d18b5a275958478ca3bbbee376 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 28 Jun 2024 11:36:30 -0400 Subject: [PATCH 093/110] separate out distances from warping and add new blending function --- pygeo/parameterization/DVGeoMulti.py | 81 +++++++++++++++++++--------- 1 file changed, 55 insertions(+), 26 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index dc665382..d5020504 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -527,16 +527,15 @@ def addPointSet( # compBInterPts, compBInterInd = IC.findIntersection( # points.astype(float), IC.compB.curvePts.astype(float) # ) - compAInterPtsLocal, compAInterIndLocal = IC.findIntersection(points, IC.compA.curvePts) - compBInterPtsLocal, compBInterIndLocal = IC.findIntersection(points, IC.compB.curvePts) + compAInterPtsLocal, compAInterIndLocal, compACurvePtDistLocal, compACurveIndLocal = IC.findIntersection(points, IC.compA.curvePts) + compBInterPtsLocal, compBInterIndLocal, compBCurvePtDistLocal, compBCurveIndLocal = IC.findIntersection(points, IC.compB.curvePts) # print(f"\nrank {self.comm.rank} local compAInterInd {compAInterIndLocal}") - compAInterNPts, compAInterSizes, compAInterPts, compAInterInd = IC._commCurveProj( - compAInterPtsLocal, compAInterIndLocal, self.comm - ) - compBInterNPts, compBInterSizes, compBInterPts, compBInterInd = IC._commCurveProj( - compBInterPtsLocal, compBInterIndLocal, self.comm - ) + _, _, compAInterPts, compAInterInd = IC._commCurveProj(compAInterPtsLocal, compAInterIndLocal, self.comm) + _, _, compBInterPts, compBInterInd = IC._commCurveProj(compBInterPtsLocal, compBInterIndLocal, self.comm) + + _, _, compACurvePtDist, _ = IC._commCurveProj(compACurvePtDistLocal, compACurveIndLocal, self.comm) + _, _, compBCurvePtDist, _ = IC._commCurveProj(compBCurvePtDistLocal, compBCurveIndLocal, self.comm) compAInterPts.dtype = self.dtype compBInterPts.dtype = self.dtype @@ -563,6 +562,10 @@ def addPointSet( IC.filletComp.compAInterInd = compAInterInd IC.filletComp.compBInterInd = compBInterInd + # save the point-curve distances in the fillet component + IC.filletComp.compACurvePtDist = compACurvePtDist + IC.filletComp.compBCurvePtDist = compBCurvePtDist + # save the names of the fillet intersection pointsets to find them later IC.filletComp.compAPtsName = compAPtsName IC.filletComp.compBPtsName = compBPtsName @@ -1443,7 +1446,7 @@ def setSurface(self, comm): self.seam = self._getIntersectionSeam(comm) - def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): + def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta, update=True): """ This function warps points using the displacements from curve projections. @@ -1452,6 +1455,7 @@ def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): indices: Indices of the points that we will use for this operation. curvePtCoords: Original coordinates of points on curves. delta: Displacements of the points on curves after projecting them. + update: Whether to update the coordinates in place. If not, ptsNew will just be the displacements """ @@ -1474,8 +1478,11 @@ def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta): for iDim in range(3): interp[iDim] = np.sum(Wi * delta[:, iDim]) / den - # finally, update the coord in place - ptsNew[j] = ptsNew[j] + interp + if update: + # finally, update the coord in place + ptsNew[j] = ptsNew[j] + interp + else: + ptsNew[j] = interp def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # seeds for delta @@ -3782,6 +3789,7 @@ def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True def findIntersection(self, surf, curve): # TODO fix this function nPtSurf = surf.shape[0] minSurfCurveDist = -np.ones(nPtSurf, dtype=self.dtype) + minSurfCurveDistInd = np.zeros(nPtSurf) intersectPts = [] intersectInd = [] @@ -3795,6 +3803,7 @@ def findIntersection(self, surf, curve): # TODO fix this function # find minimum of these distances and save it dist2ClosestPt = min(ptSurfCurveDist[0]) minSurfCurveDist[i] = dist2ClosestPt + minSurfCurveDistInd[i] = i # keep this as an intersection point if it is within tolerance if dist2ClosestPt < self.distTol: @@ -3803,7 +3812,7 @@ def findIntersection(self, surf, curve): # TODO fix this function intersectPts = np.asarray(intersectPts, dtype=self.dtype) - return intersectPts, intersectInd + return intersectPts, intersectInd, minSurfCurveDist, minSurfCurveDistInd def addPointSet(self, pts, ptSetName, compMap, comm): # Save the affected indices and the factor in the little dictionary @@ -3835,27 +3844,47 @@ def sens(self, dIdPt, ptSetName, comm): return compSens def project(self, ptSetName, newPts): - # redo the delta because this is how the fillet was initially set up - # TODO maybe stop doing this # update the pointset unless we haven't figured out the intersections yet if len(self.compA.curvePts) > 0: # TODO change to a first project flag or something # get delta of curve points to drive warping - newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) - curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) - delta = newCurveCoords - curvePtCoords + comps = [self.compA, self.compB] + sepDisp = [[], []] + + for ii, comp in enumerate(comps): + newCurveCoords = comp.curvePts + curvePtCoords = comp.curvePtsOrig + delta = newCurveCoords - curvePtCoords + + disp = self.filletComp.surfPtsOrig.copy() + pts0 = self.filletComp.surfPtsOrig + + self._warpSurfPts(pts0, disp, self.indices, curvePtCoords, delta, update=False) + sepDisp[ii] = disp + + # blend between the two displacements + for ii in range(self.filletComp.surfPts): + # distance from this point to each curve + dA = self.filletComp.compACurvePtDist + dB = self.filletComp.compBCurvePtDist + + # calculate weighting based on which curve is closer to this point + x = dA / (dA + dB) + + if dA < dB: + f = 4 * x ** 3 + elif dB >= dA: + f = 1 - 4 * x ** 3 - ptsNew = self.filletComp.surfPtsOrig.copy() - pts0 = self.filletComp.surfPtsOrig + # inverse distance-weighted displacement for this point from each curve + dxA = sepDisp[0][ii] + dxB = sepDisp[1][ii] - # warp interior fillet points - self._warpSurfPts(pts0, ptsNew, self.indices, curvePtCoords, delta) - self.filletComp.surfPts = ptsNew + # calculate displacement of this point + disp = dxA * (1.0 - f) + dxB * f + + self.filletComp.surfPts[ii] = self.filletComp.surfPtsOrig[ii] + disp - # write curve coords from file to see which proc has which (all should have complete set) - # print(f"write curves from proc {self.DVGeo.comm.rank}") - # np.savetxt(f"compACurve{self.DVGeo.comm.rank}.txt", self.compA.curvePts) - # np.savetxt(f"compBCurve{self.DVGeo.comm.rank}.txt", self.compB.curvePts) def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = deepcopy(self.filletComp.surfPtsOrig) From ad6c606114a88f1bfaef06491792c0790c443fd7 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 28 Jun 2024 13:14:12 -0400 Subject: [PATCH 094/110] fix --- pygeo/parameterization/DVGeoMulti.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index d5020504..da2aabdf 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -534,8 +534,8 @@ def addPointSet( _, _, compAInterPts, compAInterInd = IC._commCurveProj(compAInterPtsLocal, compAInterIndLocal, self.comm) _, _, compBInterPts, compBInterInd = IC._commCurveProj(compBInterPtsLocal, compBInterIndLocal, self.comm) - _, _, compACurvePtDist, _ = IC._commCurveProj(compACurvePtDistLocal, compACurveIndLocal, self.comm) - _, _, compBCurvePtDist, _ = IC._commCurveProj(compBCurvePtDistLocal, compBCurveIndLocal, self.comm) + _, _, compACurvePtDist, _ = IC._commCurveProj(compACurvePtDistLocal, compACurveIndLocal, self.comm, reshape=False) + _, _, compBCurvePtDist, _ = IC._commCurveProj(compBCurvePtDistLocal, compBCurveIndLocal, self.comm, reshape=False) compAInterPts.dtype = self.dtype compBInterPts.dtype = self.dtype @@ -1514,7 +1514,7 @@ def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # return the seeds for the delta vector return deltaBar - def _commCurveProj(self, pts, indices, comm): + def _commCurveProj(self, pts, indices, comm, reshape=True): """ This function will get the points, indices, and comm. This function is called once for each feature curve. @@ -1552,7 +1552,10 @@ def _commCurveProj(self, pts, indices, comm): comm.Allgatherv(sendbuf, recvbuf) # reshape into a nptsg,3 array - curvePtCoords = ptsGlobal.reshape((nptsg, 3)) + if reshape: + curvePtCoords = ptsGlobal.reshape((nptsg, 3)) + else: + curvePtCoords = ptsGlobal # this is a "serial" pointset, so the results are just local else: @@ -3807,11 +3810,12 @@ def findIntersection(self, surf, curve): # TODO fix this function # keep this as an intersection point if it is within tolerance if dist2ClosestPt < self.distTol: + print(f"intersection {surfPt} ind {i} {dist2ClosestPt} away") intersectPts.append(surfPt) intersectInd.append(i) intersectPts = np.asarray(intersectPts, dtype=self.dtype) - + print(f"min { minSurfCurveDist}") return intersectPts, intersectInd, minSurfCurveDist, minSurfCurveDistInd def addPointSet(self, pts, ptSetName, compMap, comm): @@ -3863,10 +3867,13 @@ def project(self, ptSetName, newPts): sepDisp[ii] = disp # blend between the two displacements - for ii in range(self.filletComp.surfPts): + for ii in range(self.filletComp.surfPts.shape[0]): # distance from this point to each curve - dA = self.filletComp.compACurvePtDist - dB = self.filletComp.compBCurvePtDist + dA = self.filletComp.compACurvePtDist[ii] + dB = self.filletComp.compBCurvePtDist[ii] + + print(dA) + print(dB) # calculate weighting based on which curve is closer to this point x = dA / (dA + dB) From e2267d450e7a166c3bb638fc4f7d5664c186ef9a Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 17 Jul 2024 15:37:38 -0400 Subject: [PATCH 095/110] fix weighting for tangency, add old version back for comparison --- pygeo/parameterization/DVGeoMulti.py | 104 ++++++++++++++++----------- 1 file changed, 64 insertions(+), 40 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index da2aabdf..04b35a9f 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -244,6 +244,7 @@ def addIntersection( excludeSurfaces=None, remeshBwd=True, anisotropy=[1.0, 1.0, 1.0], + tangency=True, ): """ Method that defines intersections between components. @@ -326,7 +327,7 @@ def addIntersection( if filletComp is None: print("no") - inter = FilletIntersection(compA, compB, filletComp, distTol, self, self.dtype) + inter = FilletIntersection(compA, compB, filletComp, distTol, self, self.dtype, tangency) # initialize a standard intersection object else: @@ -527,15 +528,27 @@ def addPointSet( # compBInterPts, compBInterInd = IC.findIntersection( # points.astype(float), IC.compB.curvePts.astype(float) # ) - compAInterPtsLocal, compAInterIndLocal, compACurvePtDistLocal, compACurveIndLocal = IC.findIntersection(points, IC.compA.curvePts) - compBInterPtsLocal, compBInterIndLocal, compBCurvePtDistLocal, compBCurveIndLocal = IC.findIntersection(points, IC.compB.curvePts) + compAInterPtsLocal, compAInterIndLocal, compACurvePtDistLocal, compACurveIndLocal = ( + IC.findIntersection(points, IC.compA.curvePts) + ) + compBInterPtsLocal, compBInterIndLocal, compBCurvePtDistLocal, compBCurveIndLocal = ( + IC.findIntersection(points, IC.compB.curvePts) + ) # print(f"\nrank {self.comm.rank} local compAInterInd {compAInterIndLocal}") - _, _, compAInterPts, compAInterInd = IC._commCurveProj(compAInterPtsLocal, compAInterIndLocal, self.comm) - _, _, compBInterPts, compBInterInd = IC._commCurveProj(compBInterPtsLocal, compBInterIndLocal, self.comm) + _, _, compAInterPts, compAInterInd = IC._commCurveProj( + compAInterPtsLocal, compAInterIndLocal, self.comm + ) + _, _, compBInterPts, compBInterInd = IC._commCurveProj( + compBInterPtsLocal, compBInterIndLocal, self.comm + ) - _, _, compACurvePtDist, _ = IC._commCurveProj(compACurvePtDistLocal, compACurveIndLocal, self.comm, reshape=False) - _, _, compBCurvePtDist, _ = IC._commCurveProj(compBCurvePtDistLocal, compBCurveIndLocal, self.comm, reshape=False) + _, _, compACurvePtDist, _ = IC._commCurveProj( + compACurvePtDistLocal, compACurveIndLocal, self.comm, reshape=False + ) + _, _, compBCurvePtDist, _ = IC._commCurveProj( + compBCurvePtDistLocal, compBCurveIndLocal, self.comm, reshape=False + ) compAInterPts.dtype = self.dtype compBInterPts.dtype = self.dtype @@ -3780,12 +3793,14 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): - def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, project=True): + def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, tangency, project=True): super().__init__(compA, compB, distTol, DVGeo, project, dtype) self.filletComp = DVGeo.comps[filletComp] self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self self.firstUpdate = True + self.tangency = tangency + # dict to keep track of the total number of points on each curve # self.nCurvePts = {} @@ -3851,47 +3866,56 @@ def project(self, ptSetName, newPts): # update the pointset unless we haven't figured out the intersections yet if len(self.compA.curvePts) > 0: # TODO change to a first project flag or something - # get delta of curve points to drive warping - comps = [self.compA, self.compB] - sepDisp = [[], []] + if self.tangency: + # get delta of curve points to drive warping + comps = [self.compA, self.compB] + sepDisp = [[], []] - for ii, comp in enumerate(comps): - newCurveCoords = comp.curvePts - curvePtCoords = comp.curvePtsOrig - delta = newCurveCoords - curvePtCoords + for ii, comp in enumerate(comps): + newCurveCoords = comp.curvePts + curvePtCoords = comp.curvePtsOrig + delta = newCurveCoords - curvePtCoords - disp = self.filletComp.surfPtsOrig.copy() - pts0 = self.filletComp.surfPtsOrig + disp = deepcopy(self.filletComp.surfPtsOrig) + pts0 = self.filletComp.surfPtsOrig - self._warpSurfPts(pts0, disp, self.indices, curvePtCoords, delta, update=False) - sepDisp[ii] = disp + self._warpSurfPts(pts0, disp, self.indices, curvePtCoords, delta, update=False) + sepDisp[ii] = disp - # blend between the two displacements - for ii in range(self.filletComp.surfPts.shape[0]): - # distance from this point to each curve - dA = self.filletComp.compACurvePtDist[ii] - dB = self.filletComp.compBCurvePtDist[ii] + # blend between the two displacements + for ii in range(self.filletComp.surfPts.shape[0]): + # distance from this point to each curve + dA = self.filletComp.compACurvePtDist[ii] + dB = self.filletComp.compBCurvePtDist[ii] - print(dA) - print(dB) + # calculate weighting based on which curve is closer to this point + x = dA / (dA + dB) - # calculate weighting based on which curve is closer to this point - x = dA / (dA + dB) + if dA < dB: + f = 4 * x**3 + elif dB <= dA: + f = 1 - 4 * (1 - x) ** 3 - if dA < dB: - f = 4 * x ** 3 - elif dB >= dA: - f = 1 - 4 * x ** 3 + # inverse distance-weighted displacement for this point from each curve + dxA = sepDisp[0][ii] + dxB = sepDisp[1][ii] - # inverse distance-weighted displacement for this point from each curve - dxA = sepDisp[0][ii] - dxB = sepDisp[1][ii] + # calculate displacement of this point + disp = dxA * (1.0 - f) + dxB * f - # calculate displacement of this point - disp = dxA * (1.0 - f) + dxB * f - - self.filletComp.surfPts[ii] = self.filletComp.surfPtsOrig[ii] + disp + self.filletComp.surfPts[ii] = self.filletComp.surfPtsOrig[ii] + disp + + else: + newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) + curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) + delta = newCurveCoords - curvePtCoords + + ptsNew = self.filletComp.surfPtsOrig.copy() + pts0 = self.filletComp.surfPtsOrig + # warp interior fillet points + self._warpSurfPts(pts0, ptsNew, self.indices, curvePtCoords, delta, update=True) + self.filletComp.surfPts = ptsNew def project_b(self, ptSetName, dIdpt, comm=None, comp=None): points = deepcopy(self.filletComp.surfPtsOrig) @@ -3900,7 +3924,7 @@ def project_b(self, ptSetName, dIdpt, comm=None, comp=None): # TODO these pointsets should never some here from totalSensitivity if ptSetName == self.filletComp.compAPtsName or ptSetName == self.filletComp.compBPtsName: print("no") - return compSens + # return compSens # number of functions we have N = dIdpt.shape[0] From 672e1e8724c5e17aa064c9b44545538d405551fc Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 19 Jul 2024 15:39:22 -0400 Subject: [PATCH 096/110] handle the dat file pointwise exports without manual edits --- pygeo/parameterization/DVGeoMulti.py | 42 ++++++++++++++++++++++++---- 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index da2aabdf..6ffb40ca 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -367,6 +367,15 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru """ If using coordXfer callback function, the curvePts need to be in the ADflow reference frame and the callback function needs to be passed in + + curveFiles assumes you have a dat file or files in the format Pointwise exports or a similar version + This is either a list of files where each looks like: + row 1: header + row 2-end: x y z of point on curve + or a singular file with similar sections for each individual connector: + row 1: header specifying how many points are on that connector + row 2-that number: x y z of point + where this repeats for as many segments make up the connector """ if not self.filletIntersection: print("no") # TODO real error @@ -1249,13 +1258,34 @@ def _readDATFile(self, filename, surf=True): points = surfPts[surfPts[:, 0].argsort()] else: curves = [] - for f in filename: - curvePts = np.loadtxt(f, skiprows=1, dtype=self.dtype) - curves.append(curvePts) - points = curves[0] - for i in range(1, len(filename)): - points = np.vstack((points, curves[i])) + # list of filenames + # assume each file looks like + # row 1: header + # row 2-end: x y z of point on connector + if isinstance(filename, list): + for f in filename: + curvePts = np.loadtxt(f, skiprows=1, dtype=self.dtype) + curves.append(curvePts) + + # singular file + # assume it is made up of sections for each connector from the mesh + # where each looks like + # row 1: header containing number of points on connector + # row 2 - numPoints: x y z of point + elif isinstance(filename, str): + begin = 0 + with open(filename) as file: + while line := file.readline(): + if " " not in line: + skip = int(line) + + temp = np.loadtxt(filename, skiprows=begin+1, max_rows=skip) + begin += skip + 1 + + curves.append(temp) + + points = np.vstack(curves) return points From 324bbec8d9e3877862d0b2ce3852efd92de8e38f Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 24 Jul 2024 14:29:44 -0400 Subject: [PATCH 097/110] start framework for rotations --- pygeo/parameterization/DVGeoMulti.py | 44 +++++++++++++++++++++------- 1 file changed, 33 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 04b35a9f..8f3e1c69 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -364,7 +364,7 @@ def addIntersection( self.intersectComps.append(inter) - def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None): + def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None, secondary=False): """ If using coordXfer callback function, the curvePts need to be in the ADflow reference frame and the callback function needs to be passed in @@ -382,11 +382,24 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru # figure out which component and fillet we're dealing with comp = self.comps[compName] - # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD - ptSetName = f"{compName}_curve" - comp.curvePtsName = ptSetName - comp.curvePts = curvePts - comp.curvePtsOrig = deepcopy(curvePts) + if secondary: + # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD + ptSetName = f"{compName}_curve_secondary" + comp.secondCurvePtsName = ptSetName + comp.secondCurvePts = curvePts + comp.secondCurvePtsOrig = deepcopy(curvePts) + + # get the initial vector so we can calculate rotations + vector = comp.curvePts - curvePts + comp.vector = vector + comp.vectorOrig = deepcopy(vector) + + else: + # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD + ptSetName = f"{compName}_curve" + comp.curvePtsName = ptSetName + comp.curvePts = curvePts + comp.curvePtsOrig = deepcopy(curvePts) # add the curve pointset to the component's DVGeo comp.DVGeo.addPointSet( @@ -1178,9 +1191,9 @@ def writeCompSurf(self, compName, fileName): comp = self.comps[compName] comp.writeSurf(fileName) - def writeCompCurve(self, compName, fileName): + def writeCompCurve(self, compName, fileName, secondary=False): comp = self.comps[compName] - comp.writeCurve(fileName) + comp.writeCurve(fileName, secondary) def writePointSet(self, name, fileName, solutionTime=None): """ @@ -1372,6 +1385,8 @@ def __init__(self, name, isFillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName self.surfPtsName = surfPtsName self.curvePts = [] self.curvePtsName = None + self.secondCurvePts = [] + self.secondCurvePtsName = None self.intersection = None self.intersectInd = {} @@ -1388,15 +1403,22 @@ def writeSurf(self, fileName): writeTecplot1D(f, self.name, self.surfPts) closeTecplot(f) - def writeCurve(self, fileName): - fileName = f"{fileName}_{self.name}_curve.dat" + def writeCurve(self, fileName, secondary): + if secondary: + curveName = self.secondCurvePtsName + curvePts = self.secondCurvePts + else: + curveName = self.curvePtsName + curvePts = self.curvePts + + fileName = f"{fileName}_{curveName}.dat" f = openTecplot(fileName, 3) if self.isFillet: writeTecplot1D(f, self.name, self.surfPts[self.compAInterInd]) writeTecplot1D(f, self.name, self.surfPts[self.compBInterInd]) else: - writeTecplot1D(f, self.name, self.curvePts) + writeTecplot1D(f, self.name, curvePts) closeTecplot(f) From f61254bebfa7c10f8c8e636220e15ee4aeb4b3fb Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 12 Aug 2024 15:33:30 -0400 Subject: [PATCH 098/110] vector --- pygeo/parameterization/DVGeoMulti.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index ef18499e..84dae289 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -364,6 +364,8 @@ def addIntersection( self.intersectComps.append(inter) + return inter + def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None, secondary=False): """ If using coordXfer callback function, the curvePts need to be in the ADflow reference frame @@ -3891,13 +3893,12 @@ def addPointSet(self, pts, ptSetName, compMap, comm): def update(self, ptSetName, delta, comp=None): # update the pointset unless we haven't figured out the intersections yet - # TODO change to a firstUpdate flag or something - if self.firstUpdate: n = self.filletComp.surfPtsOrig.shape[0] indices = np.linspace(0, n - 1, n, dtype=int) self.indices = indices self.firstUpdate = False + else: # fillet points on boundaries need updated based on the points embedded in the neighbor FFDs if comp is not None: @@ -4082,3 +4083,12 @@ def _getUpdatedCoords(self): self.compA.updateSurfPts() self.compB.updateSurfPts() self.DVGeo.update(self.filletComp.surfPtsName) + + def _updateRotation(self): + comps = [self.compA, self.compB] + + for comp in comps: + vOrig = comp.vectorOrig + v = comp.curvePts - comp.secondCurvePts + + comp.vector = v From 59f306b50a16b1eced1c27d593fd1c1bbc19ed98 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 4 Nov 2024 13:16:00 -0500 Subject: [PATCH 099/110] rotation, some formatting --- pygeo/parameterization/DVGeoMulti.py | 31 ++++++++++++++++++---------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 84dae289..d51acc76 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -373,12 +373,12 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru curveFiles assumes you have a dat file or files in the format Pointwise exports or a similar version This is either a list of files where each looks like: - row 1: header - row 2-end: x y z of point on curve + - row 1: header + - row 2-end: x y z of point on curve or a singular file with similar sections for each individual connector: - row 1: header specifying how many points are on that connector - row 2-that number: x y z of point - where this repeats for as many segments make up the connector + - row 1: header specifying how many points are on that connector + - row 2-that number: x y z of point + - where this repeats for as many segments make up the connector """ if not self.filletIntersection: print("no") # TODO real error @@ -1308,7 +1308,7 @@ def _readDATFile(self, filename, surf=True): if " " not in line: skip = int(line) - temp = np.loadtxt(filename, skiprows=begin+1, max_rows=skip) + temp = np.loadtxt(filename, skiprows=begin + 1, max_rows=skip) begin += skip + 1 curves.append(temp) @@ -3879,12 +3879,12 @@ def findIntersection(self, surf, curve): # TODO fix this function # keep this as an intersection point if it is within tolerance if dist2ClosestPt < self.distTol: - print(f"intersection {surfPt} ind {i} {dist2ClosestPt} away") + # print(f"intersection {surfPt} ind {i} {dist2ClosestPt} away") intersectPts.append(surfPt) intersectInd.append(i) intersectPts = np.asarray(intersectPts, dtype=self.dtype) - print(f"min { minSurfCurveDist}") + # print(f"min { minSurfCurveDist}") return intersectPts, intersectInd, minSurfCurveDist, minSurfCurveDistInd def addPointSet(self, pts, ptSetName, compMap, comm): @@ -3898,7 +3898,7 @@ def update(self, ptSetName, delta, comp=None): indices = np.linspace(0, n - 1, n, dtype=int) self.indices = indices self.firstUpdate = False - + else: # fillet points on boundaries need updated based on the points embedded in the neighbor FFDs if comp is not None: @@ -4089,6 +4089,15 @@ def _updateRotation(self): for comp in comps: vOrig = comp.vectorOrig - v = comp.curvePts - comp.secondCurvePts + vNew = comp.curvePts - comp.secondCurvePts + + dot = np.dot(vOrig, vNew) + theta = np.arccos(dot / (np.norm(vOrig) * np.norm(vNew))) + vRot = np.cross(vOrig, vNew) + + [wx, wy, wz] = vRot / np.norm(vRot) + w = np.array(((0, -wz, wy), (wz, 0, -wx), (-wy, wx, 0))) + R = np.identity(3) + w * np.sin(theta) + np.matmul(w, w) * (1 - np.cos(theta)) - comp.vector = v + comp.vector = vNew + comp.R = R From 0280ccb8252874b7211ce656bc06f62b99ed3858 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 4 Nov 2024 13:19:48 -0500 Subject: [PATCH 100/110] comment --- pygeo/parameterization/DVGeoMulti.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 8f3e1c69..ab408e82 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3847,12 +3847,12 @@ def findIntersection(self, surf, curve): # TODO fix this function # keep this as an intersection point if it is within tolerance if dist2ClosestPt < self.distTol: - print(f"intersection {surfPt} ind {i} {dist2ClosestPt} away") + # print(f"intersection {surfPt} ind {i} {dist2ClosestPt} away") intersectPts.append(surfPt) intersectInd.append(i) intersectPts = np.asarray(intersectPts, dtype=self.dtype) - print(f"min { minSurfCurveDist}") + # print(f"min { minSurfCurveDist}") return intersectPts, intersectInd, minSurfCurveDist, minSurfCurveDistInd def addPointSet(self, pts, ptSetName, compMap, comm): From e065c1eee5dd0ce51f71589a572a21f8640cc65d Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 4 Nov 2024 14:16:25 -0500 Subject: [PATCH 101/110] slow --- pygeo/parameterization/DVGeoMulti.py | 35 ++++++++++++++++------------ 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index d51acc76..1560fbb5 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -3958,6 +3958,18 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts[ii] = self.filletComp.surfPtsOrig[ii] + disp + elif self.rotate: + rotMat = np.zeros((3, 3, len(curvePtCoords))) + + newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) + curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) + + vOrig = comp.vectorOrig + vNew = comp.curvePts - comp.secondCurvePts + + for i in range(len(curvePtCoords)): + rotMat[i] = self._getRotMatrix(vOrig[i], vNew[i]) + else: newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) @@ -4084,20 +4096,13 @@ def _getUpdatedCoords(self): self.compB.updateSurfPts() self.DVGeo.update(self.filletComp.surfPtsName) - def _updateRotation(self): - comps = [self.compA, self.compB] - - for comp in comps: - vOrig = comp.vectorOrig - vNew = comp.curvePts - comp.secondCurvePts - - dot = np.dot(vOrig, vNew) - theta = np.arccos(dot / (np.norm(vOrig) * np.norm(vNew))) - vRot = np.cross(vOrig, vNew) + def _getRotMatrix(self, vec1, vec2): + dot = np.dot(vec1, vec2) + theta = np.arccos(dot / (np.linalg.norm(vec1) * np.linalg.norm(vec2))) + vRot = np.cross(vec1, vec2) - [wx, wy, wz] = vRot / np.norm(vRot) - w = np.array(((0, -wz, wy), (wz, 0, -wx), (-wy, wx, 0))) - R = np.identity(3) + w * np.sin(theta) + np.matmul(w, w) * (1 - np.cos(theta)) + [wx, wy, wz] = vRot / np.linalg.norm(vRot) + w = np.array(((0, -wz, wy), (wz, 0, -wx), (-wy, wx, 0))) + rotMat = np.identity(3) + w * np.sin(theta) + np.matmul(w, w) * (1 - np.cos(theta)) - comp.vector = vNew - comp.R = R + return rotMat From f8c80e394553b54ff2a0adedc5c6e90449aa48fd Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 22 Nov 2024 16:17:54 -0500 Subject: [PATCH 102/110] this runs but something is wrong :/ --- pygeo/parameterization/DVGeoMulti.py | 143 ++++++++++++++++++++++----- 1 file changed, 121 insertions(+), 22 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 1560fbb5..02c36717 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -244,7 +244,8 @@ def addIntersection( excludeSurfaces=None, remeshBwd=True, anisotropy=[1.0, 1.0, 1.0], - tangency=True, + tangency=False, + rotate=False, ): """ Method that defines intersections between components. @@ -327,7 +328,7 @@ def addIntersection( if filletComp is None: print("no") - inter = FilletIntersection(compA, compB, filletComp, distTol, self, self.dtype, tangency) + inter = FilletIntersection(compA, compB, filletComp, distTol, self, self.dtype, tangency, rotate) # initialize a standard intersection object else: @@ -413,9 +414,8 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru comp.curvePtsOrig = deepcopy(curvePts) # add the curve pointset to the component's DVGeo - comp.DVGeo.addPointSet( - curvePts, ptSetName, origConfig=origConfig, coordXfer=coordXfer - ) # TODO is comm right here + comp.DVGeo.addPointSet(curvePts, ptSetName, origConfig=origConfig, coordXfer=coordXfer) + # TODO is comm right here # add the curve pointset to DVGeoMulti self.points[ptSetName] = PointSet(curvePts, comm=self.comm, comp=compName) @@ -582,8 +582,8 @@ def addPointSet( # print(f"\nrank {self.comm.rank} total compBInterInd {compBInterPts}") # add those intersection points to each DVGeo so they get deformed with the FFD - compAPtsName = f"{IC.compA.name}_fillet_intersection" - compBPtsName = f"{IC.compB.name}_fillet_intersection" + compAPtsName = f"{IC.compA.name}_fillet_intersection_main" + compBPtsName = f"{IC.compB.name}_fillet_intersection_main" # print(f"\nrank {self.comm.rank} compAInterPts {compAInterPts}") # print(f"\nrank {self.comm.rank} compBInterPts {compAInterPts}") @@ -612,6 +612,59 @@ def addPointSet( IC.addPointSet(compAInterPts, compAPtsName, [], comm) IC.addPointSet(compBInterPts, compBPtsName, [], comm) + # secondary curve + compAPtsName_sec = f"{IC.compA.name}_fillet_intersection_sec" + compBPtsName_sec = f"{IC.compB.name}_fillet_intersection_sec" + IC.filletComp.compAPtsName_sec = compAPtsName_sec + IC.filletComp.compBPtsName_sec = compBPtsName_sec + + if IC.rotate: + # find points that match the secondary curve + ( + compAInterPtsLocal_sec, + compAInterIndLocal_sec, + compACurvePtDistLocal_sec, + compACurveIndLocal_sec, + ) = IC.findIntersection(points, IC.compA.secondCurvePts) + ( + compBInterPtsLocal_sec, + compBInterIndLocal_sec, + compBCurvePtDistLocal_sec, + compBCurveIndLocal_sec, + ) = IC.findIntersection(points, IC.compB.secondCurvePts) + + _, _, compAInterPts_sec, compAInterInd_sec = IC._commCurveProj( + compAInterPtsLocal_sec, compAInterIndLocal_sec, self.comm + ) + _, _, compBInterPts_sec, compBInterInd_sec = IC._commCurveProj( + compBInterPtsLocal_sec, compBInterIndLocal_sec, self.comm + ) + + _, _, compACurvePtDist_sec, _ = IC._commCurveProj( + compACurvePtDistLocal_sec, compACurveIndLocal_sec, self.comm, reshape=False + ) + _, _, compBCurvePtDist_sec, _ = IC._commCurveProj( + compBCurvePtDistLocal_sec, compBCurveIndLocal_sec, self.comm, reshape=False + ) + + compAInterPts_sec.dtype = self.dtype + compBInterPts_sec.dtype = self.dtype + + IC.filletComp.compAInterInd_sec = compAInterInd_sec + IC.filletComp.compBInterInd_sec = compBInterInd_sec + + IC.filletComp.compACurvePtDist_sec = compACurvePtDist_sec + IC.filletComp.compBCurvePtDist_sec = compBCurvePtDist_sec + + IC.compA.DVGeo.addPointSet(compAInterPts_sec, compAPtsName_sec) + IC.compB.DVGeo.addPointSet(compBInterPts_sec, compBPtsName_sec) + + self.points[compAPtsName_sec] = PointSet(compAInterPts_sec, comm=comm, comp=IC.compA.name) + self.points[compBPtsName_sec] = PointSet(compBInterPts_sec, comm=comm, comp=IC.compB.name) + + IC.addPointSet(compAInterPts_sec, compAPtsName_sec, [], comm) + IC.addPointSet(compBInterPts_sec, compBPtsName_sec, [], comm) + # non-fillet intersections require more checking else: # we now need to create the component mapping information @@ -1428,29 +1481,38 @@ def updateSurfPts(self): else: self.surfPts = self.DVGeo.update(self.surfPtsName).copy() self.curvePts = self.DVGeo.update(self.curvePtsName).copy() + self.secondCurvePts = self.DVGeo.update(self.secondCurvePtsName).copy() def writeSurf(self, fileName): fileName = f"{fileName}_{self.name}_surf.dat" f = openTecplot(fileName, 3) - writeTecplot1D(f, self.name, self.surfPts) + writeTecplot1D(f, f"{self.name}Surf", self.surfPts) closeTecplot(f) def writeCurve(self, fileName, secondary): if secondary: curveName = self.secondCurvePtsName curvePts = self.secondCurvePts + tag = "2nd" + + if self.isFillet: + ind = [self.compAInterInd_sec, self.compBInterInd_sec] else: curveName = self.curvePtsName curvePts = self.curvePts + tag = "1st" + + if self.isFillet: + ind = [self.compAInterInd, self.compBInterInd] fileName = f"{fileName}_{curveName}.dat" f = openTecplot(fileName, 3) if self.isFillet: - writeTecplot1D(f, self.name, self.surfPts[self.compAInterInd]) - writeTecplot1D(f, self.name, self.surfPts[self.compBInterInd]) + writeTecplot1D(f, f"{self.name}Curve{tag}_CompA", self.surfPts[ind[0]]) + writeTecplot1D(f, f"{self.name}Curve{tag}_CompB", self.surfPts[ind[1]]) else: - writeTecplot1D(f, self.name, curvePts) + writeTecplot1D(f, f"{self.name}Curve{tag}", curvePts) closeTecplot(f) @@ -1460,6 +1522,10 @@ def updateFilletPts(self, newInterPts, ptSetName): newPts[self.compAInterInd] = newInterPts elif ptSetName == self.compBPtsName: newPts[self.compBInterInd] = newInterPts + elif ptSetName == self.compAPtsName_sec: + newPts[self.compAInterInd] = newInterPts + elif ptSetName == self.compBPtsName_sec: + newPts[self.compBInterInd] = newInterPts else: print("no") @@ -3847,13 +3913,14 @@ def associatePointsToSurface(self, points, ptSetName, surface, surfaceEps): class FilletIntersection(Intersection): - def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, tangency, project=True): + def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, tangency, rotate, project=True): super().__init__(compA, compB, distTol, DVGeo, project, dtype) self.filletComp = DVGeo.comps[filletComp] self.compA.intersection = self.compB.intersection = self.filletComp.intersection = self self.firstUpdate = True self.tangency = tangency + self.rotate = rotate # dict to keep track of the total number of points on each curve # self.nCurvePts = {} @@ -3904,7 +3971,12 @@ def update(self, ptSetName, delta, comp=None): if comp is not None: if not comp.isFillet: fillet = self.filletComp - if ptSetName is fillet.compAPtsName or ptSetName is fillet.compBPtsName: + if ( + ptSetName is fillet.compAPtsName + or ptSetName is fillet.compBPtsName + or ptSetName is fillet.compAPtsName_sec + or ptSetName is fillet.compBPtsName_sec + ): points = self.points[ptSetName].points fillet.updateFilletPts(points, ptSetName) @@ -3959,17 +4031,38 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts[ii] = self.filletComp.surfPtsOrig[ii] + disp elif self.rotate: - rotMat = np.zeros((3, 3, len(curvePtCoords))) + nFilPts = len(self.filletComp.surfPtsOrig) newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) - - vOrig = comp.vectorOrig - vNew = comp.curvePts - comp.secondCurvePts + curvePtCoords2 = np.vstack((self.compA.secondCurvePts, self.compB.secondCurvePts)) + + rotMat = np.zeros((len(curvePtCoords), 3, 3)) + + vOrig = np.vstack((self.compA.vectorOrig, self.compB.vectorOrig)) + vNew = newCurveCoords - curvePtCoords2 + + delta = np.zeros((len(curvePtCoords), 3)) for i in range(len(curvePtCoords)): rotMat[i] = self._getRotMatrix(vOrig[i], vNew[i]) + for i in range(nFilPts): + xf = self.filletComp.surfPtsOrig[i] + for j in range(len(curvePtCoords)): + Mj = rotMat[j] + xcj = curvePtCoords[j] + delta[j] += np.matmul(Mj, xf) + xcj - np.matmul(Mj, xcj) - xf + + delta *= 10e-8 + + ptsNew = self.filletComp.surfPtsOrig.copy() + pts0 = self.filletComp.surfPtsOrig + + # warp interior fillet points + self._warpSurfPts(pts0, ptsNew, self.indices, curvePtCoords, delta, update=True) + self.filletComp.surfPts = ptsNew + else: newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) @@ -4096,12 +4189,18 @@ def _getUpdatedCoords(self): self.compB.updateSurfPts() self.DVGeo.update(self.filletComp.surfPtsName) - def _getRotMatrix(self, vec1, vec2): - dot = np.dot(vec1, vec2) - theta = np.arccos(dot / (np.linalg.norm(vec1) * np.linalg.norm(vec2))) - vRot = np.cross(vec1, vec2) + def _getRotMatrix(self, vec1List, vec2List): + # vec1List = np.atleast_3d(vec1List) + # vec2List = np.atleast_3d(vec2List) + # dot = np.sum(vec1List * vec2List, axis=0) # check: with one vector in each this matches normal np.dot + # theta = np.arccos(dot / (np.linalg.norm(vec1List) * np.linalg.norm(vec2List))) + # vRot = np.cross(vec1List[:, None, :], vec2List[None, :, :]) + + dot = np.dot(vec1List, vec2List) + theta = np.arccos(dot / (np.linalg.norm(vec1List) * np.linalg.norm(vec2List))) + vRot = np.cross(vec1List, vec2List) - [wx, wy, wz] = vRot / np.linalg.norm(vRot) + [wx, wy, wz] = vRot / (np.linalg.norm(vRot) + 1e-8) # get 0 back instead of NAN if there's no change w = np.array(((0, -wz, wy), (wz, 0, -wx), (-wy, wx, 0))) rotMat = np.identity(3) + w * np.sin(theta) + np.matmul(w, w) * (1 - np.cos(theta)) From 62e139489b04fc9c3450fe69ace1ac2185601ab2 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 27 Nov 2024 11:50:42 -0500 Subject: [PATCH 103/110] rotation works!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1 but is mesh dependent :/ --- pygeo/parameterization/DVGeoMulti.py | 61 +++++++++++++++++++++++----- 1 file changed, 50 insertions(+), 11 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 02c36717..4f985e24 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1617,6 +1617,45 @@ def _warpSurfPts(self, pts0, ptsNew, indices, curvePtCoords, delta, update=True) else: ptsNew[j] = interp + def _warpSurfPts2(self, pts0, ptsNew, indices, curvePtCoords, delta, update=True): + """ + This function warps points using the displacements from curve projections. + + pts0: The original surface point coordinates. + ptsNew: Updated surface pt coordinates. We will add the warped delta to these inplace. + indices: Indices of the points that we will use for this operation. + curvePtCoords: Original coordinates of points on curves. + delta: Displacements of the points on curves after projecting them. + update: Whether to update the coordinates in place. If not, ptsNew will just be the displacements + + """ + + # Return if curvePtCoords is empty + if not np.any(curvePtCoords): + return + + for j in indices: + # point coordinates with the baseline design + # this is the point we will warp + ptCoords = pts0[j] + s0 = delta[j, :, :] + + # Vectorized point-based warping + rr = ptCoords - curvePtCoords + LdefoDist = 1.0 / np.sqrt(rr[:, 0] ** 2 + rr[:, 1] ** 2 + rr[:, 2] ** 2 + 1e-16) + LdefoDist3 = LdefoDist**3 + Wi = LdefoDist3 + den = np.sum(Wi) + interp = np.zeros(3, dtype=self.dtype) + for iDim in range(3): + interp[iDim] = np.sum(Wi * s0[:, iDim]) / den + + if update: + # finally, update the coord in place + ptsNew[j] = ptsNew[j] + interp + else: + ptsNew[j] = interp + def _warpSurfPts_b(self, dIdPt, pts0, indices, curvePtCoords): # seeds for delta deltaBar = np.zeros((dIdPt.shape[0], curvePtCoords.shape[0], 3), dtype=self.dtype) @@ -4031,36 +4070,36 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts[ii] = self.filletComp.surfPtsOrig[ii] + disp elif self.rotate: - nFilPts = len(self.filletComp.surfPtsOrig) - newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) curvePtCoords2 = np.vstack((self.compA.secondCurvePts, self.compB.secondCurvePts)) - rotMat = np.zeros((len(curvePtCoords), 3, 3)) + nFilPts = len(self.filletComp.surfPtsOrig) + nIntPts = len(curvePtCoords) vOrig = np.vstack((self.compA.vectorOrig, self.compB.vectorOrig)) vNew = newCurveCoords - curvePtCoords2 - delta = np.zeros((len(curvePtCoords), 3)) + rotMat = np.zeros((nIntPts, 3, 3)) + b = np.zeros((nIntPts, 3)) - for i in range(len(curvePtCoords)): + for i in range(nIntPts): rotMat[i] = self._getRotMatrix(vOrig[i], vNew[i]) + b[i] = newCurveCoords[i] - np.dot(rotMat[i], curvePtCoords[i]) + + delta = np.zeros((nFilPts, nIntPts, 3)) for i in range(nFilPts): xf = self.filletComp.surfPtsOrig[i] - for j in range(len(curvePtCoords)): - Mj = rotMat[j] - xcj = curvePtCoords[j] - delta[j] += np.matmul(Mj, xf) + xcj - np.matmul(Mj, xcj) - xf - delta *= 10e-8 + for j in range(nIntPts): + delta[i, j, :] = np.matmul(rotMat[j], xf) + b[j] - xf ptsNew = self.filletComp.surfPtsOrig.copy() pts0 = self.filletComp.surfPtsOrig # warp interior fillet points - self._warpSurfPts(pts0, ptsNew, self.indices, curvePtCoords, delta, update=True) + self._warpSurfPts2(pts0, ptsNew, self.indices, curvePtCoords, delta, update=True) self.filletComp.surfPts = ptsNew else: From a91f2bb9306de800fecb0bc9f59b49aae56cf501 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Wed, 27 Nov 2024 11:53:56 -0500 Subject: [PATCH 104/110] start switch from secondary curve to FD curves --- pygeo/parameterization/DVGeoMulti.py | 179 +++++++++++++-------------- 1 file changed, 83 insertions(+), 96 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 02c36717..b1a1dbb6 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -323,6 +323,9 @@ def addIntersection( """ + # only necessary in fillet but store because there's a broad check later + self.rotate = rotate + # initialize a fillet intersection object if self.filletIntersection: if filletComp is None: @@ -367,7 +370,7 @@ def addIntersection( return inter - def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None, secondary=False): + def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None, diff=3 * [1e-6]): """ If using coordXfer callback function, the curvePts need to be in the ADflow reference frame and the callback function needs to be passed in @@ -394,17 +397,44 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru # figure out which component and fillet we're dealing with comp = self.comps[compName] - if secondary: - # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD - ptSetName = f"{compName}_curve_secondary" - comp.secondCurvePtsName = ptSetName - comp.secondCurvePts = curvePts - comp.secondCurvePtsOrig = deepcopy(curvePts) + vec = False + if self.filletIntersection and self.rotate: + vec = True + + # embed three points in the FFD for each curve - one perturbed in x, y, and z + ptSetNameX = f"{compName}_curve_offset_X" + ptSetNameY = f"{compName}_curve_offset_Y" + ptSetNameZ = f"{compName}_curve_offset_Z" + + ptsX = curvePts[:, 0] - diff + ptsY = curvePts[:, 1] - diff + ptsZ = curvePts[:, 2] - diff + + # add each offset curve to the component's DVGeo as a pointset so they get deformed in the FFD + comp.offsetCurvePtsNameX = ptSetNameX + comp.offsetCurvePtsNameY = ptSetNameY + comp.offsetCurvePtsNameZ = ptSetNameZ + + comp.offsetPtsX = ptsX + comp.offsetPtsY = ptsY + comp.offsetPtsZ = ptsZ + + comp.offsetCurvePtsOrigX = deepcopy(ptsX) + comp.offsetCurvePtsOrigY = deepcopy(ptsY) + comp.offsetCurvePtsOrigZ = deepcopy(ptsZ) # get the initial vector so we can calculate rotations - vector = comp.curvePts - curvePts - comp.vector = vector - comp.vectorOrig = deepcopy(vector) + vecX = comp.curvePts - ptsX + vecY = comp.curvePts - ptsY + vecZ = comp.curvePts - ptsZ + + comp.vecX = vecX + comp.vecY = vecY + comp.vecZ = vecZ + + comp.vecOrigX = deepcopy(vecX) + comp.vecOrigY = deepcopy(vecY) + comp.vecOrigZ = deepcopy(vecZ) else: # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD @@ -415,15 +445,29 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru # add the curve pointset to the component's DVGeo comp.DVGeo.addPointSet(curvePts, ptSetName, origConfig=origConfig, coordXfer=coordXfer) - # TODO is comm right here + + if vec: + comp.DVGeo.addPointSet(ptsX, ptSetNameX, origConfig=origConfig, coordXfer=coordXfer) + comp.DVGeo.addPointSet(ptsY, ptSetNameY, origConfig=origConfig, coordXfer=coordXfer) + comp.DVGeo.addPointSet(ptsZ, ptSetNameZ, origConfig=origConfig, coordXfer=coordXfer) # add the curve pointset to DVGeoMulti self.points[ptSetName] = PointSet(curvePts, comm=self.comm, comp=compName) + if vec: + self.points[ptSetNameX] = PointSet(ptsX, comm=self.comm, comp=compName) + self.points[ptSetNameY] = PointSet(ptsY, comm=self.comm, comp=compName) + self.points[ptSetNameZ] = PointSet(ptsZ, comm=self.comm, comp=compName) + # add the curve pointset to the intersection for IC in self.intersectComps: IC.addPointSet(curvePts, ptSetName, [], self.comm) + if vec: + IC.addPointSet(ptsX, ptSetNameX, [], self.comm) + IC.addPointSet(ptsY, ptSetNameY, [], self.comm) + IC.addPointSet(ptsZ, ptSetNameZ, [], self.comm) + # print(f"awrite {compName} curve from proc {self.comm.rank}") # np.savetxt(f"comp{compName}_{self.comm.rank}.txt", curvePts) @@ -612,59 +656,6 @@ def addPointSet( IC.addPointSet(compAInterPts, compAPtsName, [], comm) IC.addPointSet(compBInterPts, compBPtsName, [], comm) - # secondary curve - compAPtsName_sec = f"{IC.compA.name}_fillet_intersection_sec" - compBPtsName_sec = f"{IC.compB.name}_fillet_intersection_sec" - IC.filletComp.compAPtsName_sec = compAPtsName_sec - IC.filletComp.compBPtsName_sec = compBPtsName_sec - - if IC.rotate: - # find points that match the secondary curve - ( - compAInterPtsLocal_sec, - compAInterIndLocal_sec, - compACurvePtDistLocal_sec, - compACurveIndLocal_sec, - ) = IC.findIntersection(points, IC.compA.secondCurvePts) - ( - compBInterPtsLocal_sec, - compBInterIndLocal_sec, - compBCurvePtDistLocal_sec, - compBCurveIndLocal_sec, - ) = IC.findIntersection(points, IC.compB.secondCurvePts) - - _, _, compAInterPts_sec, compAInterInd_sec = IC._commCurveProj( - compAInterPtsLocal_sec, compAInterIndLocal_sec, self.comm - ) - _, _, compBInterPts_sec, compBInterInd_sec = IC._commCurveProj( - compBInterPtsLocal_sec, compBInterIndLocal_sec, self.comm - ) - - _, _, compACurvePtDist_sec, _ = IC._commCurveProj( - compACurvePtDistLocal_sec, compACurveIndLocal_sec, self.comm, reshape=False - ) - _, _, compBCurvePtDist_sec, _ = IC._commCurveProj( - compBCurvePtDistLocal_sec, compBCurveIndLocal_sec, self.comm, reshape=False - ) - - compAInterPts_sec.dtype = self.dtype - compBInterPts_sec.dtype = self.dtype - - IC.filletComp.compAInterInd_sec = compAInterInd_sec - IC.filletComp.compBInterInd_sec = compBInterInd_sec - - IC.filletComp.compACurvePtDist_sec = compACurvePtDist_sec - IC.filletComp.compBCurvePtDist_sec = compBCurvePtDist_sec - - IC.compA.DVGeo.addPointSet(compAInterPts_sec, compAPtsName_sec) - IC.compB.DVGeo.addPointSet(compBInterPts_sec, compBPtsName_sec) - - self.points[compAPtsName_sec] = PointSet(compAInterPts_sec, comm=comm, comp=IC.compA.name) - self.points[compBPtsName_sec] = PointSet(compBInterPts_sec, comm=comm, comp=IC.compB.name) - - IC.addPointSet(compAInterPts_sec, compAPtsName_sec, [], comm) - IC.addPointSet(compBInterPts_sec, compBPtsName_sec, [], comm) - # non-fillet intersections require more checking else: # we now need to create the component mapping information @@ -1489,21 +1480,13 @@ def writeSurf(self, fileName): writeTecplot1D(f, f"{self.name}Surf", self.surfPts) closeTecplot(f) - def writeCurve(self, fileName, secondary): - if secondary: - curveName = self.secondCurvePtsName - curvePts = self.secondCurvePts - tag = "2nd" - - if self.isFillet: - ind = [self.compAInterInd_sec, self.compBInterInd_sec] - else: - curveName = self.curvePtsName - curvePts = self.curvePts - tag = "1st" + def writeCurve(self, fileName): + curveName = self.curvePtsName + curvePts = self.curvePts + tag = "1st" - if self.isFillet: - ind = [self.compAInterInd, self.compBInterInd] + if self.isFillet: + ind = [self.compAInterInd, self.compBInterInd] fileName = f"{fileName}_{curveName}.dat" f = openTecplot(fileName, 3) @@ -1522,10 +1505,6 @@ def updateFilletPts(self, newInterPts, ptSetName): newPts[self.compAInterInd] = newInterPts elif ptSetName == self.compBPtsName: newPts[self.compBInterInd] = newInterPts - elif ptSetName == self.compAPtsName_sec: - newPts[self.compAInterInd] = newInterPts - elif ptSetName == self.compBPtsName_sec: - newPts[self.compBInterInd] = newInterPts else: print("no") @@ -3971,12 +3950,7 @@ def update(self, ptSetName, delta, comp=None): if comp is not None: if not comp.isFillet: fillet = self.filletComp - if ( - ptSetName is fillet.compAPtsName - or ptSetName is fillet.compBPtsName - or ptSetName is fillet.compAPtsName_sec - or ptSetName is fillet.compBPtsName_sec - ): + if ptSetName is fillet.compAPtsName or ptSetName is fillet.compBPtsName: points = self.points[ptSetName].points fillet.updateFilletPts(points, ptSetName) @@ -4035,27 +4009,40 @@ def project(self, ptSetName, newPts): newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) - curvePtCoords2 = np.vstack((self.compA.secondCurvePts, self.compB.secondCurvePts)) - rotMat = np.zeros((len(curvePtCoords), 3, 3)) + offsetPtCoordsX = np.vstack((self.compA.offsetPtsX, self.compB.offsetPtsX)) + offsetPtCoordsY = np.vstack((self.compA.offsetPtsY, self.compB.offsetPtsY)) + offsetPtCoordsZ = np.vstack((self.compA.offsetPtsZ, self.compB.offsetPtsZ)) - vOrig = np.vstack((self.compA.vectorOrig, self.compB.vectorOrig)) - vNew = newCurveCoords - curvePtCoords2 + rotMatX = np.zeros((len(curvePtCoords), 3, 3)) + rotMatY = np.zeros((len(curvePtCoords), 3, 3)) + rotMatZ = np.zeros((len(curvePtCoords), 3, 3)) - delta = np.zeros((len(curvePtCoords), 3)) + vecOrigX = np.vstack((self.compA.vecOrigX, self.compB.vecOrigX)) + vecOrigY = np.vstack((self.compA.vecOrigY, self.compB.vecOrigY)) + vecOrigZ = np.vstack((self.compA.vecOrigZ, self.compB.vecOrigZ)) + + vecNewX = newCurveCoords - offsetPtCoordsX + vecNewY = newCurveCoords - offsetPtCoordsY + vecNewZ = newCurveCoords - offsetPtCoordsZ + + deltaX = np.zeros((len(curvePtCoords), 3)) + deltaY = np.zeros((len(curvePtCoords), 3)) + deltaZ = np.zeros((len(curvePtCoords), 3)) for i in range(len(curvePtCoords)): - rotMat[i] = self._getRotMatrix(vOrig[i], vNew[i]) + rotMatX[i] = self._getRotMatrix(vecOrigX[i], vecNewX[i]) + rotMatY[i] = self._getRotMatrix(vecOrigY[i], vecNewY[i]) + rotMatZ[i] = self._getRotMatrix(vecOrigZ[i], vecNewZ[i]) for i in range(nFilPts): xf = self.filletComp.surfPtsOrig[i] + for j in range(len(curvePtCoords)): Mj = rotMat[j] xcj = curvePtCoords[j] delta[j] += np.matmul(Mj, xf) + xcj - np.matmul(Mj, xcj) - xf - delta *= 10e-8 - ptsNew = self.filletComp.surfPtsOrig.copy() pts0 = self.filletComp.surfPtsOrig From 85dd8ffc755c0efd5754179380db2f06d6e7af6e Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 2 Dec 2024 21:57:38 -0500 Subject: [PATCH 105/110] rotation works with a vector in one direction --- pygeo/parameterization/DVGeoMulti.py | 124 ++++++++++++++++++--------- 1 file changed, 85 insertions(+), 39 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index faf3a7e6..cb8757c5 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -370,7 +370,7 @@ def addIntersection( return inter - def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None, diff=3 * [1e-6]): + def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=True, coordXfer=None, diff=1e-2): """ If using coordXfer callback function, the curvePts need to be in the ADflow reference frame and the callback function needs to be passed in @@ -398,6 +398,13 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru comp = self.comps[compName] vec = False + + # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD + ptSetName = f"{compName}_curve" + comp.curvePtsName = ptSetName + comp.curvePts = curvePts + comp.curvePtsOrig = deepcopy(curvePts) + if self.filletIntersection and self.rotate: vec = True @@ -406,9 +413,15 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru ptSetNameY = f"{compName}_curve_offset_Y" ptSetNameZ = f"{compName}_curve_offset_Z" - ptsX = curvePts[:, 0] - diff - ptsY = curvePts[:, 1] - diff - ptsZ = curvePts[:, 2] - diff + ptsX = deepcopy(curvePts) + ptsY = deepcopy(curvePts) + ptsZ = deepcopy(curvePts) + # ptsX -= diff + # ptsY -= diff + # ptsZ -= diff + ptsX -= [diff, 0, 0] + ptsY -= [0, diff, 0] + ptsZ -= [0, 0, diff] # add each offset curve to the component's DVGeo as a pointset so they get deformed in the FFD comp.offsetCurvePtsNameX = ptSetNameX @@ -428,21 +441,14 @@ def addCurve(self, compName, curveFiles=None, curvePtsArray=None, origConfig=Tru vecY = comp.curvePts - ptsY vecZ = comp.curvePts - ptsZ - comp.vecX = vecX - comp.vecY = vecY - comp.vecZ = vecZ + comp.vecX = vecX / np.linalg.norm(vecX) + comp.vecY = vecY / np.linalg.norm(vecY) + comp.vecZ = vecZ / np.linalg.norm(vecZ) comp.vecOrigX = deepcopy(vecX) comp.vecOrigY = deepcopy(vecY) comp.vecOrigZ = deepcopy(vecZ) - else: - # add this curve to the component's DVGeo as a pointset so it gets deformed in the FFD - ptSetName = f"{compName}_curve" - comp.curvePtsName = ptSetName - comp.curvePts = curvePts - comp.curvePtsOrig = deepcopy(curvePts) - # add the curve pointset to the component's DVGeo comp.DVGeo.addPointSet(curvePts, ptSetName, origConfig=origConfig, coordXfer=coordXfer) @@ -1246,9 +1252,9 @@ def writeCompSurf(self, compName, fileName): comp = self.comps[compName] comp.writeSurf(fileName) - def writeCompCurve(self, compName, fileName, secondary=False): + def writeCompCurve(self, compName, fileName, offset=None): comp = self.comps[compName] - comp.writeCurve(fileName, secondary) + comp.writeCurve(fileName, offset) def writePointSet(self, name, fileName, solutionTime=None): """ @@ -1450,7 +1456,7 @@ def updateTriMesh(self): class Comp: - def __init__(self, name, isFillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName=None, tol=1e-3): + def __init__(self, name, isFillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName=None, tol=1e-3, rotate=False): self.name = name self.isFillet = isFillet self.DVGeo = DVGeo @@ -1458,21 +1464,36 @@ def __init__(self, name, isFillet, surfPts, DVGeo, xMin, xMax, comm, surfPtsName self.xMin = xMin self.xMax = xMax self.comm = comm + self.surfPtsName = surfPtsName + self.surfPts = [] + self.curvePts = [] self.curvePtsName = None - self.secondCurvePts = [] - self.secondCurvePtsName = None + + self.offsetPtsX = [] + self.offsetCurvePtsNameX = None + self.offsetPtsY = [] + self.offsetCurvePtsNameY = None + self.offsetPtsZ = [] + self.offsetCurvePtsNameZ = None + self.intersection = None self.intersectInd = {} + self.rotate = rotate + def updateSurfPts(self): if self.isFillet: print("no") else: self.surfPts = self.DVGeo.update(self.surfPtsName).copy() self.curvePts = self.DVGeo.update(self.curvePtsName).copy() - self.secondCurvePts = self.DVGeo.update(self.secondCurvePtsName).copy() + + if self.rotate: + self.offsetPtsX = self.DVGeo.update(self.offsetCurvePtsNameX).copy() + self.offsetPtsY = self.DVGeo.update(self.offsetCurvePtsNameY).copy() + self.offsetPtsZ = self.DVGeo.update(self.offsetCurvePtsNameZ).copy() def writeSurf(self, fileName): fileName = f"{fileName}_{self.name}_surf.dat" @@ -1480,10 +1501,20 @@ def writeSurf(self, fileName): writeTecplot1D(f, f"{self.name}Surf", self.surfPts) closeTecplot(f) - def writeCurve(self, fileName): - curveName = self.curvePtsName - curvePts = self.curvePts - tag = "1st" + def writeCurve(self, fileName, offset): + tag = offset + if offset is None: + curveName = self.curvePtsName + curvePts = self.curvePts + elif offset.lower() == "x": + curveName = self.offsetCurvePtsNameX + curvePts = self.offsetPtsX + elif offset.lower() == "y": + curveName = self.offsetCurvePtsNameY + curvePts = self.offsetPtsY + elif offset.lower() == "z": + curveName = self.offsetCurvePtsNameZ + curvePts = self.offsetPtsZ if self.isFillet: ind = [self.compAInterInd, self.compBInterInd] @@ -3940,6 +3971,8 @@ def __init__(self, compA, compB, filletComp, distTol, DVGeo, dtype, tangency, ro self.tangency = tangency self.rotate = rotate + self.compA.rotate = self.compB.rotate = self.filletComp.rotate = rotate + # dict to keep track of the total number of points on each curve # self.nCurvePts = {} @@ -4058,10 +4091,6 @@ def project(self, ptSetName, newPts): rotMatY = np.zeros((nIntPts, 3, 3)) rotMatZ = np.zeros((nIntPts, 3, 3)) - bX = np.zeros((nIntPts, 3)) - bY = np.zeros((nIntPts, 3)) - bZ = np.zeros((nIntPts, 3)) - vecOrigX = np.vstack((self.compA.vecOrigX, self.compB.vecOrigX)) vecOrigY = np.vstack((self.compA.vecOrigY, self.compB.vecOrigY)) vecOrigZ = np.vstack((self.compA.vecOrigZ, self.compB.vecOrigZ)) @@ -4070,28 +4099,45 @@ def project(self, ptSetName, newPts): vecNewY = newCurveCoords - offsetPtCoordsY vecNewZ = newCurveCoords - offsetPtCoordsZ - deltaX = np.zeros((nFilPts, nIntPts, 3)) - deltaY = np.zeros((nFilPts, nIntPts, 3)) - deltaZ = np.zeros((nFilPts, nIntPts, 3)) + vecNewX /= np.linalg.norm(vecNewX) + vecNewY /= np.linalg.norm(vecNewY) + vecNewZ /= np.linalg.norm(vecNewZ) for i in range(nIntPts): rotMatX[i] = self._getRotMatrix(vecOrigX[i], vecNewX[i]) rotMatY[i] = self._getRotMatrix(vecOrigY[i], vecNewY[i]) rotMatZ[i] = self._getRotMatrix(vecOrigZ[i], vecNewZ[i]) - bX[i] = newCurveCoords[i] - np.dot(rotMatX[i], curvePtCoords[i]) - bY[i] = newCurveCoords[i] - np.dot(rotMatY[i], curvePtCoords[i]) - bZ[i] = newCurveCoords[i] - np.dot(rotMatZ[i], curvePtCoords[i]) + deltaX = np.zeros((nFilPts, nIntPts, 3)) + deltaY = np.zeros((nFilPts, nIntPts, 3)) + deltaZ = np.zeros((nFilPts, nIntPts, 3)) for i in range(nFilPts): - xf = self.filletComp.surfPtsOrig[i] + xf0 = self.filletComp.surfPtsOrig[i] # original fillet point for j in range(nIntPts): - deltaX[i, j, :] = np.matmul(rotMatX[j], xf) + bX[j] - xf - deltaY[i, j, :] = np.matmul(rotMatY[j], xf) + bY[j] - xf - deltaZ[i, j, :] = np.matmul(rotMatZ[j], xf) + bZ[j] - xf + xc0 = curvePtCoords[j] # original curve point + xcj = newCurveCoords[j] # updated curve point + + Mjx = rotMatX[j] + Mjy = rotMatY[j] + Mjz = rotMatZ[j] + + deltaX[i, j, :] = ( + xcj + - xf0 + + np.dot(Mjx, xf0) + # + np.dot(Mjy, xf0) + # + np.dot(Mjz, xf0) + - np.dot(Mjx, xc0) + # - np.dot(Mjy, xc0) + # - np.dot(Mjz, xc0) + ) + deltaY[i, j, :] = xcj - xf0 + np.dot(Mjy, xf0) - np.dot(Mjy, xc0) + deltaZ[i, j, :] = xcj - xf0 + np.dot(Mjz, xf0) - np.dot(Mjz, xc0) - delta = deltaX + deltaY + deltaZ + # delta = np.mean((deltaX, deltaY, deltaZ), axis=0) + delta = deltaX ptsNew = self.filletComp.surfPtsOrig.copy() pts0 = self.filletComp.surfPtsOrig From f0a0fd15d0a5c6dd9d1b0579aee62c88b9185291 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Mon, 6 Jan 2025 21:41:08 -0500 Subject: [PATCH 106/110] options for rotation vector --- pygeo/parameterization/DVGeoMulti.py | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index cb8757c5..15ab66b6 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -245,7 +245,7 @@ def addIntersection( remeshBwd=True, anisotropy=[1.0, 1.0, 1.0], tangency=False, - rotate=False, + rotate=None, ): """ Method that defines intersections between components. @@ -4076,7 +4076,7 @@ def project(self, ptSetName, newPts): self.filletComp.surfPts[ii] = self.filletComp.surfPtsOrig[ii] + disp - elif self.rotate: + elif self.rotate is not None: newCurveCoords = np.vstack((self.compA.curvePts, self.compB.curvePts)) curvePtCoords = np.vstack((self.compA.curvePtsOrig, self.compB.curvePtsOrig)) @@ -4123,21 +4123,20 @@ def project(self, ptSetName, newPts): Mjy = rotMatY[j] Mjz = rotMatZ[j] - deltaX[i, j, :] = ( - xcj - - xf0 - + np.dot(Mjx, xf0) - # + np.dot(Mjy, xf0) - # + np.dot(Mjz, xf0) - - np.dot(Mjx, xc0) - # - np.dot(Mjy, xc0) - # - np.dot(Mjz, xc0) - ) + deltaX[i, j, :] = xcj - xf0 + np.dot(Mjx, xf0) - np.dot(Mjx, xc0) deltaY[i, j, :] = xcj - xf0 + np.dot(Mjy, xf0) - np.dot(Mjy, xc0) deltaZ[i, j, :] = xcj - xf0 + np.dot(Mjz, xf0) - np.dot(Mjz, xc0) # delta = np.mean((deltaX, deltaY, deltaZ), axis=0) - delta = deltaX + rotate = self.rotate.lower() + if rotate == "x": + delta = deltaX + elif rotate == "y": + delta = deltaY + elif rotate == "z": + delta = deltaZ + else: + print("no!") ptsNew = self.filletComp.surfPtsOrig.copy() pts0 = self.filletComp.surfPtsOrig From 92aff842da85e0165335e5bd6072d31152ce789d Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Fri, 23 May 2025 10:36:44 -0400 Subject: [PATCH 107/110] hide vscode junk, results of mphys tests, loose dat files --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 9774ab5d..d57c5df2 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,7 @@ reg_tests/pygeo_reg.orig doc/_build *testflo_report.out input_files -.isort.cfg \ No newline at end of file +.isort.cfg +reports/ +.vscode/ +*.dat \ No newline at end of file From c663cc4fd3766064bcee4489546262594a39df3f Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Sat, 27 Sep 2025 11:02:15 -0400 Subject: [PATCH 108/110] isort --- pygeo/parameterization/DVGeoMulti.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index f89e7f6d..a5197f21 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1,15 +1,14 @@ # Standard Python modules from collections import OrderedDict +from copy import deepcopy # External modules from baseclasses.utils import Error from mpi4py import MPI import numpy as np +from pyspline.utils import closeTecplot, openTecplot, writeTecplot1D from scipy import sparse from scipy.spatial.distance import cdist -from copy import deepcopy -from pyspline.utils import closeTecplot, openTecplot, writeTecplot1D - try: # External modules From 33bd04ab1f19cd3f7ee072bcd1814505e99384ac Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Sat, 27 Sep 2025 11:04:15 -0400 Subject: [PATCH 109/110] format --- pygeo/parameterization/DVGeoMulti.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index a5197f21..5ccd4527 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -1632,9 +1632,9 @@ def __init__(self, compA, compB, distTol, DVGeo, project, dtype=float): # flag to determine if we want to project nodes after intersection treatment self.projectFlag = project - if dtype == float: + if dtype is float: self.mpi_type = MPI.DOUBLE - elif dtype == complex: + elif dtype is complex: self.mpi_type = MPI.C_DOUBLE_COMPLEX def setSurface(self, comm): From a32a99718e07590f43e65066391c7bad471b4c02 Mon Sep 17 00:00:00 2001 From: Hannah Hajdik Date: Sat, 27 Sep 2025 11:07:23 -0400 Subject: [PATCH 110/110] format 2 --- pygeo/parameterization/DVGeoMulti.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pygeo/parameterization/DVGeoMulti.py b/pygeo/parameterization/DVGeoMulti.py index 5ccd4527..b02adc77 100644 --- a/pygeo/parameterization/DVGeoMulti.py +++ b/pygeo/parameterization/DVGeoMulti.py @@ -4138,7 +4138,6 @@ def sens(self, dIdPt, ptSetName, comm): return compSens def project(self, ptSetName, newPts): - # update the pointset unless we haven't figured out the intersections yet if len(self.compA.curvePts) > 0: # TODO change to a first project flag or something if self.tangency: