diff --git a/geos-processing/src/geos/processing/generic_processing_tools/AttributeMapping.py b/geos-processing/src/geos/processing/generic_processing_tools/AttributeMapping.py index ba7e5852..598eedc0 100644 --- a/geos-processing/src/geos/processing/generic_processing_tools/AttributeMapping.py +++ b/geos-processing/src/geos/processing/generic_processing_tools/AttributeMapping.py @@ -33,6 +33,7 @@ .. code-block:: python + import logging from geos.processing.generic_processing_tools.AttributeMapping import AttributeMapping # Filter inputs. @@ -57,7 +58,13 @@ attributeMappingFilter.setLoggerHandler( yourHandler ) # Do calculations. - attributeMappingFilter.applyFilter() + try: + attributeMappingFilter.applyFilter() + except( ValueError, AttributeError ) as e: + attributeMappingFilter.logger.error( f"The filter { attributeMappingFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { attributeMappingFilter.logger.name } failed due to: { e }" + attributeMappingFilter.logger.critical( mess, exc_info=True ) """ loggerTitle: str = "Attribute Mapping" @@ -130,70 +137,61 @@ def getElementMap( self: Self ) -> dict[ int, npt.NDArray[ np.int64 ] ]: """ return self.ElementMap - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Transfer global attributes from a source mesh to a final mesh. Mapping the piece of the attributes to transfer. - Returns: - boolean (bool): True if calculation successfully ended, False otherwise. + Raises: + ValueError: Errors with the input attributeNames or the input mesh. + AttributeError: Errors with the attribute of the mesh. """ self.logger.info( f"Apply filter { self.logger.name }." ) - try: - if len( self.attributeNames ) == 0: - raise ValueError( f"Please enter at least one { self.piece } attribute to transfer." ) + if len( self.attributeNames ) == 0: + raise ValueError( f"Please enter at least one { self.piece } attribute to transfer." ) - attributesInMeshFrom: set[ str ] = getAttributeSet( self.meshFrom, self.onPoints ) - wrongAttributeNames: set[ str ] = self.attributeNames.difference( attributesInMeshFrom ) - if len( wrongAttributeNames ) > 0: - raise AttributeError( - f"The { self.piece } attributes { wrongAttributeNames } are not present in the source mesh." ) + attributesInMeshFrom: set[ str ] = getAttributeSet( self.meshFrom, self.onPoints ) + wrongAttributeNames: set[ str ] = self.attributeNames.difference( attributesInMeshFrom ) + if len( wrongAttributeNames ) > 0: + raise AttributeError( + f"The { self.piece } attributes { wrongAttributeNames } are not present in the source mesh." ) + + attributesInMeshTo: set[ str ] = getAttributeSet( self.meshTo, self.onPoints ) + attributesAlreadyInMeshTo: set[ str ] = self.attributeNames.intersection( attributesInMeshTo ) + if len( attributesAlreadyInMeshTo ) > 0: + raise AttributeError( + f"The { self.piece } attributes { attributesAlreadyInMeshTo } are already present in the final mesh." ) - attributesInMeshTo: set[ str ] = getAttributeSet( self.meshTo, self.onPoints ) - attributesAlreadyInMeshTo: set[ str ] = self.attributeNames.intersection( attributesInMeshTo ) - if len( attributesAlreadyInMeshTo ) > 0: + if isinstance( self.meshFrom, vtkMultiBlockDataSet ): + partialAttributes: list[ str ] = [] + for attributeName in self.attributeNames: + if not isAttributeGlobal( self.meshFrom, attributeName, self.onPoints ): + partialAttributes.append( attributeName ) + + if len( partialAttributes ) > 0: raise AttributeError( - f"The { self.piece } attributes { attributesAlreadyInMeshTo } are already present in the final mesh." - ) + f"All { self.piece } attributes to transfer must be global, { partialAttributes } are partials." ) - if isinstance( self.meshFrom, vtkMultiBlockDataSet ): - partialAttributes: list[ str ] = [] - for attributeName in self.attributeNames: - if not isAttributeGlobal( self.meshFrom, attributeName, self.onPoints ): - partialAttributes.append( attributeName ) + self.ElementMap = computeElementMapping( self.meshFrom, self.meshTo, self.onPoints ) + sharedElement: bool = False + for key in self.ElementMap: + if np.any( self.ElementMap[ key ] > -1 ): + sharedElement = True - if len( partialAttributes ) > 0: - raise AttributeError( - f"All { self.piece } attributes to transfer must be global, { partialAttributes } are partials." - ) + if not sharedElement: + raise ValueError( f"The two meshes do not have any shared { self.piece }." ) - self.ElementMap = computeElementMapping( self.meshFrom, self.meshTo, self.onPoints ) - sharedElement: bool = False - for key in self.ElementMap: - if np.any( self.ElementMap[ key ] > -1 ): - sharedElement = True + for attributeName in self.attributeNames: + # TODO:: Modify arrayModifiers function to raise error. + if not transferAttributeWithElementMap( self.meshFrom, self.meshTo, self.ElementMap, attributeName, + self.onPoints, self.logger ): + raise ValueError( f"Fail to transfer the attribute { attributeName }." ) - if not sharedElement: - raise ValueError( f"The two meshes do not have any shared { self.piece }." ) + # Log the output message. + self._logOutputMessage() - for attributeName in self.attributeNames: - # TODO:: Modify arrayModifiers function to raise error. - if not transferAttributeWithElementMap( self.meshFrom, self.meshTo, self.ElementMap, attributeName, - self.onPoints, self.logger ): - raise - - # Log the output message. - self._logOutputMessage() - except ( TypeError, ValueError, AttributeError ) as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + return def _logOutputMessage( self: Self ) -> None: """Create and log result messages of the filter.""" diff --git a/geos-processing/src/geos/processing/generic_processing_tools/CreateConstantAttributePerRegion.py b/geos-processing/src/geos/processing/generic_processing_tools/CreateConstantAttributePerRegion.py index c5fc2c38..df737474 100644 --- a/geos-processing/src/geos/processing/generic_processing_tools/CreateConstantAttributePerRegion.py +++ b/geos-processing/src/geos/processing/generic_processing_tools/CreateConstantAttributePerRegion.py @@ -66,7 +66,13 @@ createConstantAttributePerRegionFilter.addLoggerHandler( yourHandler ) # Do calculations. - createConstantAttributePerRegionFilter.applyFilter() + try: + createConstantAttributePerRegionFilter.applyFilter() + except ( ValueError, AttributeError ) as e: + createConstantAttributePerRegionFilter.logger.error( f"The filter { createConstantAttributePerRegionFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { createConstantAttributePerRegionFilter.logger.name } failed due to: { e }" + createConstantAttributePerRegionFilter.logger.critical( mess, exc_info=True ) """ loggerTitle: str = "Create Constant Attribute Per Region" @@ -149,132 +155,127 @@ def setLoggerHandler( self: Self, handler: logging.Handler ) -> None: self.logger.warning( "The logger already has an handler, to use yours set the argument 'speHandler' to True" " during the filter initialization." ) - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Create a constant attribute per region in the mesh. - Returns: - boolean (bool): True if calculation successfully ended, False otherwise. + Raises: + ValueError: Errors with the input value for the region index or errors during the creation of the new attribute. + AttributeError: Errors with the attribute of the mesh. """ self.logger.info( f"Apply filter { self.logger.name }." ) # Add the handler to count warnings messages. self.logger.addHandler( self.counter ) - try: - # Check the validity of the attribute region. - if self.onPoints is None: - raise AttributeError( f"{ self.regionName } is not in the mesh." ) + # Check the validity of the attribute region. + if self.onPoints is None: + raise AttributeError( f"The attribute { self.regionName } is not in the mesh." ) - if self.onBoth: - raise ValueError( - f"There are two attributes named { self.regionName }, one on points and the other on cells. The region attribute must be unique." - ) + if self.onBoth: + raise AttributeError( + f"There are two attributes named { self.regionName }, one on points and the other on cells. The region attribute must be unique." + ) - nbComponentsRegion: int = getNumberOfComponents( self.mesh, self.regionName, self.onPoints ) - if nbComponentsRegion != 1: - raise ValueError( f"The region attribute { self.regionName } has to many components, one is requires." ) + nbComponentsRegion: int = getNumberOfComponents( self.mesh, self.regionName, self.onPoints ) + if nbComponentsRegion != 1: + raise AttributeError( f"The region attribute { self.regionName } has to many components, one is requires." ) - self._setInfoRegion() - # Check if the number of components and number of values for the region indexes are coherent. - for index in self.dictRegionValues: - if len( self.dictRegionValues[ index ] ) != self.nbComponents: - raise ValueError( - f"The number of value given for the region index { index } is not correct. You must set a value for each component, in this case { self.nbComponents }." - ) - - listIndexes: list[ Any ] = list( self.dictRegionValues.keys() ) - validIndexes: list[ Any ] = [] - invalidIndexes: list[ Any ] = [] - regionArray: npt.NDArray[ Any ] - newArray: npt.NDArray[ Any ] - if isinstance( self.mesh, vtkMultiBlockDataSet ): - # Check if the attribute region is global. - if not isAttributeGlobal( self.mesh, self.regionName, self.onPoints ): - raise AttributeError( f"The region attribute { self.regionName } has to be global." ) - - validIndexes, invalidIndexes = checkValidValuesInMultiBlock( self.mesh, self.regionName, listIndexes, - self.onPoints ) - if len( validIndexes ) == 0: - if len( self.dictRegionValues ) == 0: - self.logger.warning( "No region indexes entered." ) - else: - self.logger.warning( - f"The region indexes entered are not in the region attribute { self.regionName }." ) - - if not createConstantAttributeMultiBlock( self.mesh, - self.defaultValue, - self.newAttributeName, - componentNames=self.componentNames, - onPoints=self.onPoints, - logger=self.logger ): - raise + self._setInfoRegion() + # Check if the number of components and number of values for the region indexes are coherent. + for index in self.dictRegionValues: + if len( self.dictRegionValues[ index ] ) != self.nbComponents: + raise ValueError( + f"The number of value given for the region index { index } is not correct. You must set a value for each component, in this case { self.nbComponents }." + ) + listIndexes: list[ Any ] = list( self.dictRegionValues.keys() ) + validIndexes: list[ Any ] = [] + invalidIndexes: list[ Any ] = [] + regionArray: npt.NDArray[ Any ] + newArray: npt.NDArray[ Any ] + if isinstance( self.mesh, vtkMultiBlockDataSet ): + # Check if the attribute region is global. + if not isAttributeGlobal( self.mesh, self.regionName, self.onPoints ): + raise AttributeError( f"The region attribute { self.regionName } has to be global." ) + + validIndexes, invalidIndexes = checkValidValuesInMultiBlock( self.mesh, self.regionName, listIndexes, + self.onPoints ) + if len( validIndexes ) == 0: + if len( self.dictRegionValues ) == 0: + self.logger.warning( "No region index entered." ) else: - if len( invalidIndexes ) > 0: - self.logger.warning( - f"The region indexes { invalidIndexes } are not in the region attribute { self.regionName }." - ) - - # Parse the mesh to add the attribute on each dataset. - listFlatIdDataSet: list[ int ] = getBlockElementIndexesFlatten( self.mesh ) - for flatIdDataSet in listFlatIdDataSet: - dataSet: vtkDataSet = vtkDataSet.SafeDownCast( self.mesh.GetDataSet( flatIdDataSet ) ) - - regionArray = getArrayInObject( dataSet, self.regionName, self.onPoints ) - newArray = self._createArrayFromRegionArrayWithValueMap( regionArray ) - if not createAttribute( dataSet, - newArray, - self.newAttributeName, - componentNames=self.componentNames, - onPoints=self.onPoints, - logger=self.logger ): - raise + self.logger.warning( + f"The region indexes entered are not in the region attribute { self.regionName }." ) + + if not createConstantAttributeMultiBlock( self.mesh, + self.defaultValue, + self.newAttributeName, + componentNames=self.componentNames, + onPoints=self.onPoints, + logger=self.logger ): + raise ValueError( + f"Something went wrong with the creation of the attribute { self.newAttributeName }." ) else: - validIndexes, invalidIndexes = checkValidValuesInDataSet( self.mesh, self.regionName, listIndexes, - self.onPoints ) - if len( validIndexes ) == 0: - if len( self.dictRegionValues ) == 0: - self.logger.warning( "No region indexes entered." ) - else: - self.logger.warning( - f"The region indexes entered are not in the region attribute { self.regionName }." ) - - if not createConstantAttributeDataSet( self.mesh, - self.defaultValue, - self.newAttributeName, - componentNames=self.componentNames, - onPoints=self.onPoints, - logger=self.logger ): - raise + if len( invalidIndexes ) > 0: + self.logger.warning( + f"The region indexes { invalidIndexes } are not in the region attribute { self.regionName }." ) - else: - if len( invalidIndexes ) > 0: - self.logger.warning( - f"The region indexes { invalidIndexes } are not in the region attribute { self.regionName }." - ) + # Parse the mesh to add the attribute on each dataset. + listFlatIdDataSet: list[ int ] = getBlockElementIndexesFlatten( self.mesh ) + for flatIdDataSet in listFlatIdDataSet: + dataSet: vtkDataSet = vtkDataSet.SafeDownCast( self.mesh.GetDataSet( flatIdDataSet ) ) - regionArray = getArrayInObject( self.mesh, self.regionName, self.onPoints ) + regionArray = getArrayInObject( dataSet, self.regionName, self.onPoints ) newArray = self._createArrayFromRegionArrayWithValueMap( regionArray ) - if not createAttribute( self.mesh, + if not createAttribute( dataSet, newArray, self.newAttributeName, componentNames=self.componentNames, onPoints=self.onPoints, logger=self.logger ): - raise - - # Log the output message. - self._logOutputMessage( validIndexes ) - except ( ValueError, AttributeError ) as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + raise ValueError( + f"Something went wrong with the creation of the attribute { self.newAttributeName }." ) + + else: + validIndexes, invalidIndexes = checkValidValuesInDataSet( self.mesh, self.regionName, listIndexes, + self.onPoints ) + if len( validIndexes ) == 0: + if len( self.dictRegionValues ) == 0: + self.logger.warning( "No region index entered." ) + else: + self.logger.warning( + f"The region indexes entered are not in the region attribute { self.regionName }." ) + + if not createConstantAttributeDataSet( self.mesh, + self.defaultValue, + self.newAttributeName, + componentNames=self.componentNames, + onPoints=self.onPoints, + logger=self.logger ): + raise ValueError( + f"Something went wrong with the creation of the attribute { self.newAttributeName }." ) + + else: + if len( invalidIndexes ) > 0: + self.logger.warning( + f"The region indexes { invalidIndexes } are not in the region attribute { self.regionName }." ) + + regionArray = getArrayInObject( self.mesh, self.regionName, self.onPoints ) + newArray = self._createArrayFromRegionArrayWithValueMap( regionArray ) + if not createAttribute( self.mesh, + newArray, + self.newAttributeName, + componentNames=self.componentNames, + onPoints=self.onPoints, + logger=self.logger ): + raise ValueError( + f"Something went wrong with the creation of the attribute { self.newAttributeName }." ) + + # Log the output message. + self._logOutputMessage( validIndexes ) + + return def _setInfoRegion( self: Self ) -> None: """Update self.dictRegionValues and set self.defaultValue. diff --git a/geos-processing/src/geos/processing/generic_processing_tools/FillPartialArrays.py b/geos-processing/src/geos/processing/generic_processing_tools/FillPartialArrays.py index 22d7275a..2851d033 100644 --- a/geos-processing/src/geos/processing/generic_processing_tools/FillPartialArrays.py +++ b/geos-processing/src/geos/processing/generic_processing_tools/FillPartialArrays.py @@ -49,7 +49,13 @@ fillPartialArraysFilter.setLoggerHandler( yourHandler ) # Do calculations. - fillPartialArraysFilter.applyFilter() + try: + fillPartialArraysFilter.applyFilter() + except ( ValueError, AttributeError ) as e: + fillPartialArraysFilter.logger.error( f"The filter { fillPartialArraysFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { fillPartialArraysFilter.logger.name } failed due to: { e }" + fillPartialArraysFilter.logger.critical( mess, exc_info=True ) """ loggerTitle: str = "Fill Partial Attribute" @@ -106,40 +112,34 @@ def setLoggerHandler( self: Self, handler: logging.Handler ) -> None: self.logger.warning( "The logger already has an handler, to use yours set the argument 'speHandler' to True" " during the filter initialization." ) - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Create a constant attribute per region in the mesh. - Returns: - boolean (bool): True if calculation successfully ended, False otherwise. + Raise: + AttributeError: Error with attributes to fill. + ValueError: Error during the filling of the attribute. """ self.logger.info( f"Apply filter { self.logger.name }." ) - try: - onPoints: Union[ None, bool ] - onBoth: bool - for attributeName in self.dictAttributesValues: - onPoints, onBoth = getAttributePieceInfo( self.multiBlockDataSet, attributeName ) - if onPoints is None: - raise ValueError( f"{ attributeName } is not in the mesh." ) - - if onBoth: - raise ValueError( - f"There is two attribute named { attributeName }, one on points and the other on cells. The attribute name must be unique." - ) - - if not fillPartialAttributes( self.multiBlockDataSet, - attributeName, - onPoints=onPoints, - listValues=self.dictAttributesValues[ attributeName ], - logger=self.logger ): - raise - - self.logger.info( f"The filter { self.logger.name } succeed." ) - except ( ValueError, AttributeError ) as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + + onPoints: Union[ None, bool ] + onBoth: bool + for attributeName in self.dictAttributesValues: + onPoints, onBoth = getAttributePieceInfo( self.multiBlockDataSet, attributeName ) + if onPoints is None: + raise AttributeError( f"The attribute { attributeName } is not in the mesh." ) + + if onBoth: + raise AttributeError( + f"There is two attribute named { attributeName }, one on points and the other on cells. The attribute name must be unique." + ) + + if not fillPartialAttributes( self.multiBlockDataSet, + attributeName, + onPoints=onPoints, + listValues=self.dictAttributesValues[ attributeName ], + logger=self.logger ): + raise ValueError( "Something went wrong with the filling of partial attributes" ) + + self.logger.info( f"The filter { self.logger.name } succeed." ) + + return diff --git a/geos-processing/src/geos/processing/generic_processing_tools/MergeBlockEnhanced.py b/geos-processing/src/geos/processing/generic_processing_tools/MergeBlockEnhanced.py index 4630d642..87bc1bbb 100644 --- a/geos-processing/src/geos/processing/generic_processing_tools/MergeBlockEnhanced.py +++ b/geos-processing/src/geos/processing/generic_processing_tools/MergeBlockEnhanced.py @@ -8,7 +8,6 @@ from geos.utils.Logger import ( Logger, getLogger ) from geos.mesh.utils.multiblockModifiers import mergeBlocks -from geos.utils.Errors import VTKError from vtkmodules.vtkCommonDataModel import vtkMultiBlockDataSet, vtkUnstructuredGrid @@ -32,6 +31,7 @@ from geos.processing.generic_processing_tools.MergeBlockEnhanced import MergeBlockEnhanced import logging + from geos.utils.Errors import VTKError # Define filter inputs multiblockdataset: vtkMultiblockDataSet @@ -45,7 +45,13 @@ mergeBlockEnhancedFilter.setLoggerHandler( yourHandler ) # Do calculations - mergeBlockEnhancedFilter.applyFilter() + try: + mergeBlockEnhancedFilter.applyFilter() + except VTKError as e: + mergeBlockEnhancedFilter.logger.error( f"The filter { mergeBlockEnhancedFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { mergeBlockEnhancedFilter.logger.name } failed due to: { e }" + mergeBlockEnhancedFilter.logger.critical( mess, exc_info=True ) # Get the merged mesh mergeBlockEnhancedFilter.getOutput() @@ -99,29 +105,21 @@ def setLoggerHandler( self: Self, handler: logging.Handler ) -> None: self.logger.warning( "The logger already has an handler, to use yours set the argument 'speHandler' to True" " during the filter initialization." ) - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Merge the blocks of a multiblock dataset mesh. - Returns: - bool: True if the blocks were successfully merged, False otherwise. + Raise: + VTKError (geos.utils.Errors): Errors captured if any from the VTK log. """ self.logger.info( f"Applying filter { self.logger.name }." ) - try: - outputMesh: vtkUnstructuredGrid - outputMesh = mergeBlocks( self.inputMesh, keepPartialAttributes=True, logger=self.logger ) - self.outputMesh = outputMesh - - self.logger.info( f"The filter { self.logger.name } succeeded." ) - except VTKError as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + outputMesh: vtkUnstructuredGrid + outputMesh = mergeBlocks( self.inputMesh, keepPartialAttributes=True, logger=self.logger ) + self.outputMesh = outputMesh + + self.logger.info( f"The filter { self.logger.name } succeeded." ) + + return def getOutput( self: Self ) -> vtkUnstructuredGrid: """Get the merged mesh. diff --git a/geos-processing/src/geos/processing/generic_processing_tools/SplitMesh.py b/geos-processing/src/geos/processing/generic_processing_tools/SplitMesh.py index 8cbe9d02..faa030d1 100644 --- a/geos-processing/src/geos/processing/generic_processing_tools/SplitMesh.py +++ b/geos-processing/src/geos/processing/generic_processing_tools/SplitMesh.py @@ -41,7 +41,13 @@ splitMeshFilter.setLoggerHandler( yourHandler ) # Do calculations - splitMeshFilter.applyFilter() + try: + splitMeshFilter.applyFilter() + except ( TypeError, AttributeError ) as e: + splitMeshFilter.logger.error( f"The filter {splitMeshFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { splitMeshFilter.logger.name } failed due to: { e }" + splitMeshFilter.logger.critical( mess, exc_info=True ) # Get splitted mesh outputMesh: vtkUnstructuredGrid = splitMeshFilter.getOutput() @@ -93,88 +99,80 @@ def setLoggerHandler( self: Self, handler: logging.Handler ) -> None: self.logger.warning( "The logger already has an handler, to use yours set the argument 'speHandler' to True" " during the filter initialization." ) - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Apply the filter SplitMesh. - Returns: - bool: True if the filter succeeded, False otherwise. + Raises: + TypeError: Errors due to objects with the wrong type. + AttributeError: Errors with cell data. """ self.logger.info( f"Applying filter { self.logger.name }." ) - try: - # Count the number of cells before splitting. Then we will be able to know how many new cells and points - # to allocate because each cell type is splitted in a known number of new cells and points. - nbCells: int = self.inputMesh.GetNumberOfCells() - counts: CellTypeCounts = self._getCellCounts() - if counts.getTypeCount( VTK_WEDGE ) != 0: - raise TypeError( "Input mesh contains wedges that are not currently supported." ) - - nbPolygon: int = counts.getTypeCount( VTK_POLYGON ) - nbPolyhedra: int = counts.getTypeCount( VTK_POLYHEDRON ) - # Current implementation only supports meshes composed of either polygons or polyhedra - if nbPolyhedra * nbPolygon != 0: - raise TypeError( - "Input mesh is composed of both polygons and polyhedra, but it must contains only one of the two." ) - - nbTet: int = counts.getTypeCount( VTK_TETRA ) # will divide into 8 tets - nbPyr: int = counts.getTypeCount( VTK_PYRAMID ) # will divide into 6 pyramids and 4 tets so 10 new cells - nbHex: int = counts.getTypeCount( VTK_HEXAHEDRON ) # will divide into 8 hexes - nbTriangles: int = counts.getTypeCount( VTK_TRIANGLE ) # will divide into 4 triangles - nbQuad: int = counts.getTypeCount( VTK_QUAD ) # will divide into 4 quads - nbNewPoints: int = 0 - nbNewPoints = nbHex * 19 + nbTet * 6 + nbPyr * 9 if nbPolyhedra > 0 else nbTriangles * 3 + nbQuad * 5 - nbNewCells: int = nbHex * 8 + nbTet * 8 + nbPyr * 10 + nbTriangles * 4 + nbQuad * 4 - - self.points = vtkPoints() - self.points.DeepCopy( self.inputMesh.GetPoints() ) - self.points.Resize( self.inputMesh.GetNumberOfPoints() + nbNewPoints ) - - self.cells = vtkCellArray() - self.cells.AllocateExact( nbNewCells, 8 ) - self.originalId = vtkIdTypeArray() - self.originalId.SetName( "OriginalID" ) - self.originalId.Allocate( nbNewCells ) - self.cellTypes = [] - - # Define cell type to splitting method mapping - splitMethods = { - VTK_HEXAHEDRON: self._splitHexahedron, - VTK_TETRA: self._splitTetrahedron, - VTK_PYRAMID: self._splitPyramid, - VTK_TRIANGLE: self._splitTriangle, - VTK_QUAD: self._splitQuad, - } - for c in range( nbCells ): - cell: vtkCell = self.inputMesh.GetCell( c ) - cellType: int = cell.GetCellType() - splitMethod = splitMethods.get( cellType ) - if splitMethod is not None: - splitMethod( cell, c ) - else: - raise TypeError( - f"Cell type { vtkCellTypes.GetClassNameFromTypeId( cellType ) } is not supported." ) - - # Add points and cells - self.outputMesh.SetPoints( self.points ) - self.outputMesh.SetCells( self.cellTypes, self.cells ) - - # Add attribute saving original cell ids - cellArrays: vtkCellData = self.outputMesh.GetCellData() - if cellArrays is None: - raise AttributeError( "Cell data is undefined." ) - cellArrays.AddArray( self.originalId ) - - # Transfer all cell arrays - self._transferCellArrays( self.outputMesh ) - self.logger.info( f"The filter { self.logger.name } succeeded." ) - except ( TypeError, AttributeError ) as e: - self.logger.error( f"The filter {self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + # Count the number of cells before splitting. Then we will be able to know how many new cells and points + # to allocate because each cell type is splitted in a known number of new cells and points. + nbCells: int = self.inputMesh.GetNumberOfCells() + counts: CellTypeCounts = self._getCellCounts() + if counts.getTypeCount( VTK_WEDGE ) != 0: + raise TypeError( "Input mesh contains wedges that are not currently supported." ) + + nbPolygon: int = counts.getTypeCount( VTK_POLYGON ) + nbPolyhedra: int = counts.getTypeCount( VTK_POLYHEDRON ) + # Current implementation only supports meshes composed of either polygons or polyhedra + if nbPolyhedra * nbPolygon != 0: + raise TypeError( + "Input mesh is composed of both polygons and polyhedra, but it must contains only one of the two." ) + + nbTet: int = counts.getTypeCount( VTK_TETRA ) # will divide into 8 tets + nbPyr: int = counts.getTypeCount( VTK_PYRAMID ) # will divide into 6 pyramids and 4 tets so 10 new cells + nbHex: int = counts.getTypeCount( VTK_HEXAHEDRON ) # will divide into 8 hexes + nbTriangles: int = counts.getTypeCount( VTK_TRIANGLE ) # will divide into 4 triangles + nbQuad: int = counts.getTypeCount( VTK_QUAD ) # will divide into 4 quads + nbNewPoints: int = 0 + nbNewPoints = nbHex * 19 + nbTet * 6 + nbPyr * 9 if nbPolyhedra > 0 else nbTriangles * 3 + nbQuad * 5 + nbNewCells: int = nbHex * 8 + nbTet * 8 + nbPyr * 10 + nbTriangles * 4 + nbQuad * 4 + + self.points = vtkPoints() + self.points.DeepCopy( self.inputMesh.GetPoints() ) + self.points.Resize( self.inputMesh.GetNumberOfPoints() + nbNewPoints ) + + self.cells = vtkCellArray() + self.cells.AllocateExact( nbNewCells, 8 ) + self.originalId = vtkIdTypeArray() + self.originalId.SetName( "OriginalID" ) + self.originalId.Allocate( nbNewCells ) + self.cellTypes = [] + + # Define cell type to splitting method mapping + splitMethods = { + VTK_HEXAHEDRON: self._splitHexahedron, + VTK_TETRA: self._splitTetrahedron, + VTK_PYRAMID: self._splitPyramid, + VTK_TRIANGLE: self._splitTriangle, + VTK_QUAD: self._splitQuad, + } + for c in range( nbCells ): + cell: vtkCell = self.inputMesh.GetCell( c ) + cellType: int = cell.GetCellType() + splitMethod = splitMethods.get( cellType ) + if splitMethod is not None: + splitMethod( cell, c ) + else: + raise TypeError( f"Cell type { vtkCellTypes.GetClassNameFromTypeId( cellType ) } is not supported." ) + + # Add points and cells + self.outputMesh.SetPoints( self.points ) + self.outputMesh.SetCells( self.cellTypes, self.cells ) + + # Add attribute saving original cell ids + cellArrays: vtkCellData = self.outputMesh.GetCellData() + if cellArrays is None: + raise AttributeError( "Cell data is undefined." ) + cellArrays.AddArray( self.originalId ) + + # Transfer all cell arrays + self._transferCellArrays( self.outputMesh ) + self.logger.info( f"The filter { self.logger.name } succeeded." ) + + return def getOutput( self: Self ) -> vtkUnstructuredGrid: """Get the splitted mesh computed.""" @@ -190,8 +188,7 @@ def _getCellCounts( self: Self ) -> CellTypeCounts: self.inputMesh, self.speHandler ) if self.speHandler and len( cellTypeCounterEnhancedFilter.logger.handlers ) == 0: cellTypeCounterEnhancedFilter.setLoggerHandler( self.handler ) - if not cellTypeCounterEnhancedFilter.applyFilter(): - raise + cellTypeCounterEnhancedFilter.applyFilter() return cellTypeCounterEnhancedFilter.GetCellTypeCountsObject() def _addMidPoint( self: Self, ptA: int, ptB: int ) -> int: @@ -447,6 +444,9 @@ def _transferCellArrays( self: Self, splittedMesh: vtkUnstructuredGrid ) -> None Args: splittedMesh (vtkUnstructuredGrid): Splitted mesh. + + Raises: + AttributeError: Cell attribute splitted are not defined. """ cellData: vtkCellData = self.inputMesh.GetCellData() if cellData is None: diff --git a/geos-processing/src/geos/processing/post_processing/GeomechanicsCalculator.py b/geos-processing/src/geos/processing/post_processing/GeomechanicsCalculator.py index 2e7677a9..ff9a0d88 100644 --- a/geos-processing/src/geos/processing/post_processing/GeomechanicsCalculator.py +++ b/geos-processing/src/geos/processing/post_processing/GeomechanicsCalculator.py @@ -94,7 +94,13 @@ geomechanicsCalculatorFilter.physicalConstants.frictionAngle = frictionAngle # Do calculations - geomechanicsCalculatorFilter.applyFilter() + try: + geomechanicsCalculatorFilter.applyFilter() + except ( ValueError, AttributeError ) as e: + geomechanicsCalculatorFilter.logger.error( f"The filter { geomechanicsCalculatorFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { geomechanicsCalculatorFilter.logger.name } failed due to: { e }" + geomechanicsCalculatorFilter.logger.critical( mess, exc_info=True ) # Get the mesh with the geomechanics properties computed as attribute output: vtkUnstructuredGrid @@ -705,54 +711,48 @@ def __init__( self.logger.setLevel( logging.INFO ) self.logger.propagate = False - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Compute the geomechanics properties and create attributes on the mesh. - Returns: - bool: True if the filter succeeded, False otherwise. + Raises: + AttributeError: A mandatory attribute is missing. + ValueError: Something went wrong during the creation of an attribute. """ self.logger.info( f"Apply filter { self.logger.name }." ) - try: - self._checkMandatoryProperties() - self._computeBasicProperties() - - if self.doComputeAdvancedProperties: - self._computeAdvancedProperties() - - # Create an attribute on the mesh for each geomechanics properties computed: - for attribute in self._attributesToCreate: - attributeName: str = attribute.attributeName - onPoints: bool = attribute.isOnPoints - array: npt.NDArray[ np.float64 ] | None - if attribute in ELASTIC_MODULI: - array = self._elasticModuli.getElasticModulusValue( attributeName ) - elif attribute in BASIC_PROPERTIES: - array = self._basicProperties.getBasicPropertyValue( attributeName ) - elif attribute in ADVANCED_PROPERTIES: - array = self._advancedProperties.getAdvancedPropertyValue( attributeName ) - componentNames: tuple[ str, ...] = () - if attribute.nbComponent == 6: - componentNames = ComponentNameEnum.XYZ.value - - createAttribute( self.output, - array, - attributeName, - componentNames=componentNames, - onPoints=onPoints, - logger=self.logger ) - - self.logger.info( "All the geomechanics properties have been added to the mesh." ) - self.logger.info( f"The filter { self.logger.name } succeeded." ) - except ( ValueError, TypeError, NameError ) as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + self._checkMandatoryProperties() + self._computeBasicProperties() + + if self.doComputeAdvancedProperties: + self._computeAdvancedProperties() + + # Create an attribute on the mesh for each geomechanics properties computed: + for attribute in self._attributesToCreate: + attributeName: str = attribute.attributeName + onPoints: bool = attribute.isOnPoints + array: npt.NDArray[ np.float64 ] | None + if attribute in ELASTIC_MODULI: + array = self._elasticModuli.getElasticModulusValue( attributeName ) + elif attribute in BASIC_PROPERTIES: + array = self._basicProperties.getBasicPropertyValue( attributeName ) + elif attribute in ADVANCED_PROPERTIES: + array = self._advancedProperties.getAdvancedPropertyValue( attributeName ) + componentNames: tuple[ str, ...] = () + if attribute.nbComponent == 6: + componentNames = ComponentNameEnum.XYZ.value + + if not createAttribute( self.output, + array, + attributeName, + componentNames=componentNames, + onPoints=onPoints, + logger=self.logger ): + raise ValueError( f"Something went wrong during the creation of the attribute { attributeName }." ) + + self.logger.info( "All the geomechanics properties have been added to the mesh." ) + self.logger.info( f"The filter { self.logger.name } succeeded." ) + + return def getOutput( self: Self ) -> vtkUnstructuredGrid: """Get the mesh with the geomechanics properties computed as attributes. @@ -798,6 +798,9 @@ def _checkMandatoryProperties( self: Self ) -> None: - The density named "density" - The effective stress named "stressEffective" - The initial effective stress named "stressEffectiveInitial" + + Raises: + AttributeError: A mandatory attribute is missing. """ mess: str for elasticModulus in ELASTIC_MODULI: @@ -821,7 +824,7 @@ def _checkMandatoryProperties( self: Self ) -> None: self.computeYoungPoisson = True else: mess = f"{ BULK_MODULUS.attributeName } or { SHEAR_MODULUS.attributeName } are missing to compute geomechanics properties." - raise ValueError( mess ) + raise AttributeError( mess ) elif self._elasticModuli.bulkModulus is None and self._elasticModuli.shearModulus is None: if self._elasticModuli.youngModulus is not None and self._elasticModuli.poissonRatio is not None: self._elasticModuli.bulkModulus = fcts.bulkModulus( self._elasticModuli.youngModulus, @@ -833,10 +836,10 @@ def _checkMandatoryProperties( self: Self ) -> None: self.computeYoungPoisson = False else: mess = f"{ YOUNG_MODULUS.attributeName } or { POISSON_RATIO.attributeName } are missing to compute geomechanics properties." - raise ValueError( mess ) + raise AttributeError( mess ) else: mess = f"{ BULK_MODULUS.attributeName } and { SHEAR_MODULUS.attributeName } or { YOUNG_MODULUS.attributeName } and { POISSON_RATIO.attributeName } are mandatory to compute geomechanics properties." - raise ValueError( mess ) + raise AttributeError( mess ) # Check the presence of the elastic moduli at the initial time. if self._elasticModuli.bulkModulusT0 is None: @@ -846,7 +849,7 @@ def _checkMandatoryProperties( self: Self ) -> None: self._attributesToCreate.append( BULK_MODULUS_T0 ) else: mess = f"{ BULK_MODULUS_T0.attributeName } or { YOUNG_MODULUS_T0.attributeName } and { POISSON_RATIO_T0.attributeName } are mandatory to compute geomechanics properties." - raise ValueError( mess ) + raise AttributeError( mess ) # Check the presence of the other mandatory properties for mandatoryAttribute in MANDATORY_PROPERTIES: @@ -854,7 +857,7 @@ def _checkMandatoryProperties( self: Self ) -> None: mandatoryAttributeOnPoints: bool = mandatoryAttribute.isOnPoints if not isAttributeInObject( self.output, mandatoryAttributeName, mandatoryAttributeOnPoints ): mess = f"The mandatory property { mandatoryAttributeName } is missing to compute geomechanical properties." - raise ValueError( mess ) + raise AttributeError( mess ) else: self._mandatoryProperties.setMandatoryPropertyValue( mandatoryAttributeName, @@ -1059,6 +1062,9 @@ def _computeTotalStresses( self: Self ) -> None: Total stress is computed at the initial and current time steps. Total stress ratio is computed at current time step only. + + Raises: + AttributeError: A mandatory attribute is missing. """ # Compute the total stress at the initial time step. self._doComputeTotalStressInitial() @@ -1072,7 +1078,7 @@ def _computeTotalStresses( self: Self ) -> None: STRESS_TOTAL ) else: mess = f"{ STRESS_TOTAL.attributeName } has not been computed, geomechanics property { STRESS_EFFECTIVE.attributeName } or { BIOT_COEFFICIENT.attributeName } are missing." - raise ValueError( mess ) + raise AttributeError( mess ) # Compute the total stress ratio. if self._basicProperties.totalStress is not None: @@ -1186,7 +1192,11 @@ def _computeElasticStrain( self: Self ) -> None: return def _computeReservoirStressPathReal( self: Self ) -> None: - """Compute reservoir stress paths.""" + """Compute reservoir stress paths. + + Raises: + AttributeError: A mandatory attribute is missing. + """ # create delta stress attribute for QC if not isAttributeInObject( self.output, STRESS_TOTAL_DELTA.attributeName, STRESS_TOTAL_DELTA.isOnPoints ): if self._basicProperties.totalStress is not None and self._basicProperties.totalStressT0 is not None: @@ -1194,7 +1204,7 @@ def _computeReservoirStressPathReal( self: Self ) -> None: self._attributesToCreate.append( STRESS_TOTAL_DELTA ) else: mess: str = f"{ STRESS_TOTAL_DELTA.attributeName } has not been computed, geomechanics properties { STRESS_TOTAL.attributeName } or { STRESS_TOTAL_T0.attributeName } are missing." - raise ValueError( mess ) + raise AttributeError( mess ) else: self._basicProperties.deltaTotalStress = getArrayInObject( self.output, STRESS_TOTAL_DELTA.attributeName, STRESS_TOTAL_DELTA.isOnPoints ) @@ -1244,7 +1254,11 @@ def _computeEffectiveStressRatioOed( self: Self ) -> None: return def _computeCriticalTotalStressRatio( self: Self ) -> None: - """Compute fracture index and fracture threshold.""" + """Compute fracture index and fracture threshold. + + Raises: + AttributeError: A mandatory attribute is missing. + """ mess: str if not isAttributeInObject( self.output, CRITICAL_TOTAL_STRESS_RATIO.attributeName, CRITICAL_TOTAL_STRESS_RATIO.isOnPoints ): @@ -1255,7 +1269,7 @@ def _computeCriticalTotalStressRatio( self: Self ) -> None: self._attributesToCreate.append( CRITICAL_TOTAL_STRESS_RATIO ) else: mess = f"{ CRITICAL_TOTAL_STRESS_RATIO.attributeName } has not been computed, geomechanics property { STRESS_TOTAL.attributeName } is missing." - raise ValueError( mess ) + raise AttributeError( mess ) else: self._advancedProperties.criticalTotalStressRatio = getArrayInObject( self.output, CRITICAL_TOTAL_STRESS_RATIO.attributeName, CRITICAL_TOTAL_STRESS_RATIO.isOnPoints ) @@ -1275,7 +1289,7 @@ def _computeCriticalTotalStressRatio( self: Self ) -> None: self._attributesToCreate.append( TOTAL_STRESS_RATIO_THRESHOLD ) else: mess = f"{ TOTAL_STRESS_RATIO_THRESHOLD.attributeName } has not been computed, geomechanics property { STRESS_TOTAL.attributeName } is missing." - raise ValueError( mess ) + raise AttributeError( mess ) else: self._advancedProperties.stressRatioThreshold = getArrayInObject( self.output, TOTAL_STRESS_RATIO_THRESHOLD.attributeName, TOTAL_STRESS_RATIO_THRESHOLD.isOnPoints ) @@ -1286,7 +1300,11 @@ def _computeCriticalTotalStressRatio( self: Self ) -> None: return def _computeCriticalPorePressure( self: Self ) -> None: - """Compute the critical pore pressure and the pressure index.""" + """Compute the critical pore pressure and the pressure index. + + Raises: + AttributeError: A mandatory attribute is missing. + """ if not isAttributeInObject( self.output, CRITICAL_PORE_PRESSURE.attributeName, CRITICAL_PORE_PRESSURE.isOnPoints ): if self._basicProperties.totalStress is not None: @@ -1297,7 +1315,7 @@ def _computeCriticalPorePressure( self: Self ) -> None: else: mess: str mess = f"{ CRITICAL_PORE_PRESSURE.attributeName } has not been computed, geomechanics property { STRESS_TOTAL.attributeName } is missing." - raise ValueError( mess ) + raise AttributeError( mess ) else: self._advancedProperties.criticalPorePressure = getArrayInObject( self.output, CRITICAL_PORE_PRESSURE.attributeName, diff --git a/geos-processing/src/geos/processing/post_processing/GeosBlockExtractor.py b/geos-processing/src/geos/processing/post_processing/GeosBlockExtractor.py index 13d451e4..7f0792e1 100644 --- a/geos-processing/src/geos/processing/post_processing/GeosBlockExtractor.py +++ b/geos-processing/src/geos/processing/post_processing/GeosBlockExtractor.py @@ -48,7 +48,13 @@ geosBlockExtractor.setLoggerHandler( yourHandler ) # Do calculations - geosBlockExtractor.applyFilter() + try: + geosBlockExtractor.applyFilter() + except ( ValueError, TypeError ) as e: + geosBlockExtractor.logger.error( f"The filter { geosBlockExtractor.logger.name } failed due to: { e }." ) + except Exception as e: + mess: str = f"The filter { geosBlockExtractor.logger.name } failed du to: { e }" + geosBlockExtractor.logger.critical( mess, exc_info=True ) # Get the multiBlockDataSet with blocks of the extracted domain. geosDomainExtracted: vtkMultiBlockDataSet @@ -128,6 +134,9 @@ def setExtractedDomain( self: Self, geosDomainName: GeosDomainNameEnum, Args: geosDomainName (GeosDomainNameEnum): Name of the GEOS domain. multiBlockDataSet (vtkMultiBlockDataSet): The mesh to set. + + Raises: + ValueError: The mesh is not a GEOS domain. """ if geosDomainName.value == "CellElementRegion": self.volume = multiBlockDataSet @@ -196,35 +205,27 @@ def setLoggerHandler( self: Self, handler: logging.Handler ) -> None: "The logger already has an handler, to use yours set the argument 'speHandler' to True during the filter initialization." ) - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Extract the volume, the fault or the well domain of the mesh from GEOS. - Returns: - bool: True if the filter succeeded, False otherwise. + Raises: + ValueError: The mesh extracted is not a GEOS domain. + TypeError: The mesh extracted has the wrong dimension. """ self.logger.info( f"Apply filter { self.logger.name }." ) - try: - extractGeosDomain: GeosExtractDomainBlock = GeosExtractDomainBlock() - extractGeosDomain.SetInputData( self.geosMesh ) - - domainNames: list = [] - for domain in self.domainToExtract: - extractGeosDomain.RemoveAllIndices() - extractGeosDomain.AddGeosDomainName( domain ) - extractGeosDomain.Update() - self.extractedGeosDomain.setExtractedDomain( domain, extractGeosDomain.GetOutput() ) - domainNames.append( domain.value ) - - self.logger.info( f"The GEOS domain { domainNames } have been extracted." ) - self.logger.info( f"The filter { self.logger.name } succeeded." ) - - except ( ValueError, TypeError ) as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }." ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + extractGeosDomain: GeosExtractDomainBlock = GeosExtractDomainBlock() + extractGeosDomain.SetInputData( self.geosMesh ) + + domainNames: list = [] + for domain in self.domainToExtract: + extractGeosDomain.RemoveAllIndices() + extractGeosDomain.AddGeosDomainName( domain ) + extractGeosDomain.Update() + self.extractedGeosDomain.setExtractedDomain( domain, extractGeosDomain.GetOutput() ) + domainNames.append( domain.value ) + + self.logger.info( f"The GEOS domain { domainNames } have been extracted." ) + self.logger.info( f"The filter { self.logger.name } succeeded." ) + + return diff --git a/geos-processing/src/geos/processing/post_processing/GeosBlockMerge.py b/geos-processing/src/geos/processing/post_processing/GeosBlockMerge.py index f581ee20..7caab5a8 100644 --- a/geos-processing/src/geos/processing/post_processing/GeosBlockMerge.py +++ b/geos-processing/src/geos/processing/post_processing/GeosBlockMerge.py @@ -7,7 +7,6 @@ from vtkmodules.vtkCommonDataModel import vtkCompositeDataSet, vtkMultiBlockDataSet, vtkPolyData, vtkUnstructuredGrid -from geos.utils.Errors import VTKError from geos.utils.Logger import ( Logger, getLogger ) from geos.utils.GeosOutputsConstants import ( PHASE_SEP, PhaseTypeEnum, FluidPrefixEnum, PostProcessingOutputsEnum, getRockSuffixRenaming ) @@ -37,6 +36,7 @@ .. code-block:: python from geos.processing.post_processing.GeosBlockMerge import GeosBlockMerge + from geos.utils.Errors import VTKError # Filter inputs. inputMesh: vtkMultiBlockDataSet @@ -53,7 +53,13 @@ mergeBlockFilter.setLoggerHandler( yourHandler ) # Do calculations - mergeBlockFilter.applyFilter() + try: + mergeBlockFilter.applyFilter() + except ( ValueError, VTKError ) as e: + mergeBlockFilter.logger.error( f"The filter { mergeBlockFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { mergeBlockFilter.logger.name } failed due to: { e }" + mergeBlockFilter.logger.critical( mess, exc_info=True ) # Get the multiBlockDataSet with one dataSet per region outputMesh: vtkMultiBlockDataSet = mergeBlockFilter.getOutput() @@ -125,69 +131,64 @@ def getOutput( self: Self ) -> vtkMultiBlockDataSet: """Get the mesh with the composite blocks merged.""" return self.outputMesh - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Apply the filter on the mesh. - Returns: - bool: True if the filter succeeded, False otherwise. + Raises: + ValueError: Something went wrong during the creation of an attribute. + VTKError: Error raises during the call of VTK function. """ self.logger.info( f"Apply filter { self.logger.name }." ) - try: - # Display phase names - self.computePhaseNames() - for phase, phaseNames in self.phaseNameDict.items(): - if len( phaseNames ) > 0: - self.logger.info( f"Identified { phase } phase(s) are: { phaseNames }." ) - else: - self.logger.info( f"No { phase } phase has been identified." ) - - # Parse all the composite blocks - compositeBlockIndexesToMerge: dict[ str, int ] = getElementaryCompositeBlockIndexes( self.inputMesh ) - nbBlocks: int = len( compositeBlockIndexesToMerge ) - self.outputMesh.SetNumberOfBlocks( nbBlocks ) - for newIndex, ( blockName, blockIndex ) in enumerate( compositeBlockIndexesToMerge.items() ): - # Set the name of the composite block - self.outputMesh.GetMetaData( newIndex ).Set( vtkCompositeDataSet.NAME(), blockName ) - - # Merge blocks - blockToMerge: vtkMultiBlockDataSet = extractBlock( self.inputMesh, blockIndex ) - volumeMesh: vtkUnstructuredGrid = mergeBlocks( blockToMerge, - keepPartialAttributes=True, - logger=self.logger ) - - # Create index attribute keeping the index in initial mesh - if not createConstantAttribute( volumeMesh, [ blockIndex ], - PostProcessingOutputsEnum.BLOCK_INDEX.attributeName, - onPoints=False, - logger=self.logger ): - self.logger.warning( "BlockIndex attribute was not created." ) - - # Rename attributes - self.renameAttributes( volumeMesh ) - - # Convert the volume mesh to a surface mesh - if self.convertFaultToSurface: - if not isTriangulate( volumeMesh ): - volumeMesh.ShallowCopy( triangulateMesh( volumeMesh, self.logger ) ) - surfaceMesh: vtkPolyData = convertUnstructuredGridToPolyData( volumeMesh, self.logger ) - surfaceMesh.ShallowCopy( computeNormals( surfaceMesh, logger=self.logger ) ) - surfaceMesh.ShallowCopy( computeTangents( surfaceMesh, logger=self.logger ) ) - # Add the merged block to the output mesh - self.outputMesh.SetBlock( newIndex, surfaceMesh ) - else: - self.outputMesh.SetBlock( newIndex, volumeMesh ) - - self.logger.info( f"The filter { self.logger.name } succeeded." ) - except ( ValueError, TypeError, RuntimeError, AssertionError, VTKError ) as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + # Display phase names + self.computePhaseNames() + for phase, phaseNames in self.phaseNameDict.items(): + if len( phaseNames ) > 0: + self.logger.info( f"Identified { phase } phase(s) are: { phaseNames }." ) + else: + self.logger.info( f"No { phase } phase has been identified." ) + + # Parse all the composite blocks + compositeBlockIndexesToMerge: dict[ str, int ] = getElementaryCompositeBlockIndexes( self.inputMesh ) + nbBlocks: int = len( compositeBlockIndexesToMerge ) + self.outputMesh.SetNumberOfBlocks( nbBlocks ) + for newIndex, ( blockName, blockIndex ) in enumerate( compositeBlockIndexesToMerge.items() ): + # Set the name of the composite block + self.outputMesh.GetMetaData( newIndex ).Set( vtkCompositeDataSet.NAME(), blockName ) + + # Merge blocks + blockToMerge: vtkMultiBlockDataSet = extractBlock( self.inputMesh, blockIndex ) + volumeMesh: vtkUnstructuredGrid = mergeBlocks( blockToMerge, + keepPartialAttributes=True, + logger=self.logger ) + + # Create index attribute keeping the index in initial mesh + if not createConstantAttribute( volumeMesh, [ blockIndex ], + PostProcessingOutputsEnum.BLOCK_INDEX.attributeName, + onPoints=False, + logger=self.logger ): + raise ValueError( + f"Something went wrong during the creation of the attribute { PostProcessingOutputsEnum.BLOCK_INDEX.attributeName }." + ) + + # Rename attributes + self.renameAttributes( volumeMesh ) + + # Convert the volume mesh to a surface mesh + if self.convertFaultToSurface: + if not isTriangulate( volumeMesh ): + volumeMesh.ShallowCopy( triangulateMesh( volumeMesh, self.logger ) ) + surfaceMesh: vtkPolyData = convertUnstructuredGridToPolyData( volumeMesh, self.logger ) + surfaceMesh.ShallowCopy( computeNormals( surfaceMesh, logger=self.logger ) ) + surfaceMesh.ShallowCopy( computeTangents( surfaceMesh, logger=self.logger ) ) + # Add the merged block to the output mesh + self.outputMesh.SetBlock( newIndex, surfaceMesh ) + else: + self.outputMesh.SetBlock( newIndex, volumeMesh ) + + self.logger.info( f"The filter { self.logger.name } succeeded." ) + + return def renameAttributes( self: Self, diff --git a/geos-processing/src/geos/processing/post_processing/SurfaceGeomechanics.py b/geos-processing/src/geos/processing/post_processing/SurfaceGeomechanics.py index ed0a5a62..087dd426 100644 --- a/geos-processing/src/geos/processing/post_processing/SurfaceGeomechanics.py +++ b/geos-processing/src/geos/processing/post_processing/SurfaceGeomechanics.py @@ -40,6 +40,7 @@ .. code-block:: python from geos.processing.post_processing.SurfaceGeomechanics import SurfaceGeomechanics + from geos.utils.Errors import VTKError # filter inputs inputMesh: vtkPolyData @@ -61,7 +62,13 @@ sg.SetFrictionAngle( frictionAngle ) # Do calculations - sg.applyFilter() + try: + sg.applyFilter() + except ( ValueError, VTKError, AttributeError, AssertionError ) as e: + sg.logger.error( f"The filter { sg.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { sg.logger.name } failed due to: { e }" + sg.logger.critical( mess, exc_info=True ) # Get output object output: vtkPolyData = sg.GetOutputMesh() @@ -221,11 +228,14 @@ def GetNewAttributeNames( self: Self ) -> set[ str ]: """ return self.newAttributeNames - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Compute Geomechanical properties on input surface. - Returns: - int: 1 if calculation successfully ended, 0 otherwise. + Raises: + ValueError: Errors during the creation of an attribute. + VTKError: Error raises during the call of VTK function. + AttributeError: Attributes must be on cell. + AssertionError: Something went wrong during the shearCapacityUtilization computation. """ msg = f"Applying filter {self.logger.name}" if self.name is not None: @@ -235,28 +245,28 @@ def applyFilter( self: Self ) -> bool: self.logger.info( msg ) - try: - self.outputMesh = vtkPolyData() - self.outputMesh.ShallowCopy( self.inputMesh ) + self.outputMesh = vtkPolyData() + self.outputMesh.ShallowCopy( self.inputMesh ) - # Conversion of attributes from Normal/Tangent basis to xyz basis - if self.convertAttributesOn: - self.logger.info( "Conversion of attributes from local to XYZ basis." ) - self.convertAttributesFromLocalToXYZBasis() + # Conversion of attributes from Normal/Tangent basis to xyz basis + if self.convertAttributesOn: + self.logger.info( "Conversion of attributes from local to XYZ basis." ) + self.convertAttributesFromLocalToXYZBasis() - # Compute shear capacity utilization - self.computeShearCapacityUtilization() + # Compute shear capacity utilization + self.computeShearCapacityUtilization() - self.logger.info( f"Filter {self.logger.name} successfully applied on surface {self.name}." ) - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False + self.logger.info( f"Filter {self.logger.name} successfully applied on surface {self.name}." ) - return True + return def convertAttributesFromLocalToXYZBasis( self: Self ) -> None: - """Convert attributes from local to XYZ basis.""" + """Convert attributes from local to XYZ basis. + + Raises: + ValueError: Something went wrong during the creation of an attribute. + AttributeError: Attributes must be on cell. + """ # Get the list of attributes to convert and filter attributesToConvert: set[ str ] = self.__filterAttributesToConvert() @@ -273,7 +283,7 @@ def convertAttributesFromLocalToXYZBasis( self: Self ) -> None: continue if self.attributeOnPoints: - raise ValueError( + raise AttributeError( "This filter can only convert cell attributes from local to XYZ basis, not point attributes." ) localArray: npt.NDArray[ np.float64 ] = getArrayInObject( self.outputMesh, attrNameLocal, self.attributeOnPoints ) @@ -290,7 +300,7 @@ def convertAttributesFromLocalToXYZBasis( self: Self ) -> None: self.logger.info( f"Attribute {attrNameXYZ} added to the output mesh." ) self.newAttributeNames.add( attrNameXYZ ) else: - raise + raise ValueError( f"Something went wrong during the creation of the attribute { attrNameXYZ }." ) return @@ -343,7 +353,7 @@ def __computeXYZCoordinates( for i, cellAttribute in enumerate( attrArray ): if len( cellAttribute ) not in ( 3, 6, 9 ): raise ValueError( - f"Inconsistent number of components for attribute. Expected 3, 6 or 9 but got { len( cellAttribute.shape ) }." + f"Inconsistent number of components for attribute. Expected 3, 6 or 9 but went { len( cellAttribute.shape ) }." ) # Compute attribute XYZ components @@ -356,7 +366,12 @@ def __computeXYZCoordinates( return attrXYZ def computeShearCapacityUtilization( self: Self ) -> None: - """Compute the shear capacity utilization (SCU) on surface.""" + """Compute the shear capacity utilization (SCU) on surface. + + Raises: + ValueError: Something went wrong during the creation of an attribute. + AssertionError: Something went wrong during the shearCapacityUtilization computation. + """ SCUAttributeName: str = PostProcessingOutputsEnum.SCU.attributeName if not isAttributeInObject( self.outputMesh, SCUAttributeName, self.attributeOnPoints ): @@ -367,18 +382,13 @@ def computeShearCapacityUtilization( self: Self ) -> None: # Computation of the shear capacity utilization (SCU) # TODO: better handling of errors in shearCapacityUtilization - try: - scuAttribute: npt.NDArray[ np.float64 ] = fcts.shearCapacityUtilization( - traction, self.rockCohesion, self.frictionAngle ) - except AssertionError: - self.logger.error( f"Failed to compute {SCUAttributeName}." ) - raise + scuAttribute: npt.NDArray[ np.float64 ] = fcts.shearCapacityUtilization( traction, self.rockCohesion, + self.frictionAngle ) # Create attribute if not createAttribute( self.outputMesh, scuAttribute, SCUAttributeName, (), self.attributeOnPoints, logger=self.logger ): - self.logger.error( f"Failed to create attribute {SCUAttributeName}." ) - raise + raise ValueError( f"Failed to create attribute {SCUAttributeName}." ) else: self.logger.info( "SCU computed and added to the output mesh." ) self.newAttributeNames.add( SCUAttributeName ) diff --git a/geos-processing/src/geos/processing/pre_processing/CellTypeCounterEnhanced.py b/geos-processing/src/geos/processing/pre_processing/CellTypeCounterEnhanced.py index 3aa8953a..3938a597 100644 --- a/geos-processing/src/geos/processing/pre_processing/CellTypeCounterEnhanced.py +++ b/geos-processing/src/geos/processing/pre_processing/CellTypeCounterEnhanced.py @@ -34,7 +34,13 @@ cellTypeCounterEnhancedFilter.setLoggerHandler( yourHandler ) # Do calculations - cellTypeCounterEnhancedFilter.applyFilter() + try: + cellTypeCounterEnhancedFilter.applyFilter() + except TypeError as e: + cellTypeCounterEnhancedFilter.logger.error( f"The filter { cellTypeCounterEnhancedFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { cellTypeCounterEnhancedFilter.logger.name } failed due to: { e }" + cellTypeCounterEnhancedFilter.logger.critical( mess, exc_info=True ) # Get result counts: CellTypeCounts = cellTypeCounterEnhancedFilter.GetCellTypeCountsObject() @@ -86,45 +92,38 @@ def setLoggerHandler( self: Self, handler: logging.Handler ) -> None: self.logger.warning( "The logger already has an handler, to use yours set the argument 'speHandler'" " to True during the filter initialization." ) - def applyFilter( self: Self ) -> bool: + def applyFilter( self: Self ) -> None: """Apply CellTypeCounterEnhanced filter. - Returns: - bool: True if the filter succeeded, False otherwise. + Raises: + TypeError: Errors with the type of the cells. """ self.logger.info( f"Apply filter { self.logger.name }." ) - try: - # compute cell type counts - self._counts.reset() - self._counts.setTypeCount( VTK_VERTEX, self.inputMesh.GetNumberOfPoints() ) - for i in range( self.inputMesh.GetNumberOfCells() ): - cell: vtkCell = self.inputMesh.GetCell( i ) - self._counts.addType( cell.GetCellType() ) - - # create output table - # first reset output table - self.outTable.RemoveAllRows() - self.outTable.RemoveAllColumns() - self.outTable.SetNumberOfRows( 1 ) - - # create columns per types - for cellType in getAllCellTypes(): - array: vtkIntArray = vtkIntArray() - array.SetName( vtkCellTypes.GetClassNameFromTypeId( cellType ) ) - array.SetNumberOfComponents( 1 ) - array.SetNumberOfValues( 1 ) - array.SetValue( 0, self._counts.getTypeCount( cellType ) ) - self.outTable.AddColumn( array ) - self.logger.info( f"The filter { self.logger.name } succeeded." ) - except TypeError as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False - - return True + + # Compute cell type counts + self._counts.reset() + self._counts.setTypeCount( VTK_VERTEX, self.inputMesh.GetNumberOfPoints() ) + for i in range( self.inputMesh.GetNumberOfCells() ): + cell: vtkCell = self.inputMesh.GetCell( i ) + self._counts.addType( cell.GetCellType() ) + + # Create output table + # First reset output table + self.outTable.RemoveAllRows() + self.outTable.RemoveAllColumns() + self.outTable.SetNumberOfRows( 1 ) + + # Create columns per types + for cellType in getAllCellTypes(): + array: vtkIntArray = vtkIntArray() + array.SetName( vtkCellTypes.GetClassNameFromTypeId( cellType ) ) + array.SetNumberOfComponents( 1 ) + array.SetNumberOfValues( 1 ) + array.SetValue( 0, self._counts.getTypeCount( cellType ) ) + self.outTable.AddColumn( array ) + self.logger.info( f"The filter { self.logger.name } succeeded." ) + + return def GetCellTypeCountsObject( self: Self ) -> CellTypeCounts: """Get CellTypeCounts object. diff --git a/geos-processing/src/geos/processing/pre_processing/MeshQualityEnhanced.py b/geos-processing/src/geos/processing/pre_processing/MeshQualityEnhanced.py index 6c454c69..de5fb1d8 100644 --- a/geos-processing/src/geos/processing/pre_processing/MeshQualityEnhanced.py +++ b/geos-processing/src/geos/processing/pre_processing/MeshQualityEnhanced.py @@ -64,7 +64,13 @@ meshQualityEnhancedFilter.SetOtherMeshQualityMetrics(otherQualityMetrics) # Do calculations - meshQualityEnhancedFilter.applyFilter() + try: + meshQualityEnhancedFilter.applyFilter() + except ( ValueError, IndexError, TypeError, AttributeError ) as e: + meshQualityEnhancedFilter.logger.error( f"The filter { meshQualityEnhancedFilter.logger.name } failed due to: { e }" ) + except Exception as e: + mess: str = f"The filter { meshQualityEnhancedFilter.logger.name } failed due to: { e }" + meshQualityEnhancedFilter.logger.critical( mess, exc_info=True ) # Get output mesh quality report outputMesh: vtkUnstructuredGrid = meshQualityEnhancedFilter.getOutput() @@ -282,39 +288,28 @@ def getComputedMetricsFromCellType( self: Self, cellType: int ) -> Optional[ set metrics = metrics.intersection( computedMetrics ) return metrics if commonComputedMetricsExists else None - def applyFilter( self: Self ) -> bool: - """Apply MeshQualityEnhanced filter. - - Returns: - bool: True if the filter succeeded, False otherwise. - """ + def applyFilter( self: Self ) -> None: + """Apply MeshQualityEnhanced filter.""" self.logger.info( f"Apply filter { self.logger.name }." ) - try: - self._outputMesh.ShallowCopy( self.inputMesh ) - # Compute cell type counts - self._computeCellTypeCounts() - # Compute metrics and associated attributes - self._evaluateMeshQualityAll() + self._outputMesh.ShallowCopy( self.inputMesh ) + # Compute cell type counts + self._computeCellTypeCounts() - # Compute stats summary - self._updateStatsSummary() + # Compute metrics and associated attributes + self._evaluateMeshQualityAll() - # Create field data - self._createFieldDataStatsSummary() + # Compute stats summary + self._updateStatsSummary() - self._outputMesh.Modified() + # Create field data + self._createFieldDataStatsSummary() - self.logger.info( f"The filter { self.logger.name } succeeded." ) - except ( ValueError, IndexError, TypeError, AttributeError ) as e: - self.logger.error( f"The filter { self.logger.name } failed.\n{ e }" ) - return False - except Exception as e: - mess: str = f"The filter { self.logger.name } failed.\n{ e }" - self.logger.critical( mess, exc_info=True ) - return False + self._outputMesh.Modified() + + self.logger.info( f"The filter { self.logger.name } succeeded." ) - return True + return def getOutput( self: Self ) -> vtkUnstructuredGrid: """Get the mesh computed with the stats.""" @@ -326,8 +321,7 @@ def _computeCellTypeCounts( self: Self ) -> None: self._outputMesh, self.speHandler ) if self.speHandler and len( cellTypeCounterEnhancedFilter.logger.handlers ) == 0: cellTypeCounterEnhancedFilter.setLoggerHandler( self.handler ) - if not cellTypeCounterEnhancedFilter.applyFilter(): - raise + cellTypeCounterEnhancedFilter.applyFilter() counts: CellTypeCounts = cellTypeCounterEnhancedFilter.GetCellTypeCountsObject() if counts is None: diff --git a/geos-processing/tests/test_AttributeMapping.py b/geos-processing/tests/test_AttributeMapping.py index 5a421a27..9494a2cc 100644 --- a/geos-processing/tests/test_AttributeMapping.py +++ b/geos-processing/tests/test_AttributeMapping.py @@ -30,4 +30,49 @@ def test_AttributeMapping( fillAllPartialAttributes( meshFrom ) attributeMappingFilter: AttributeMapping = AttributeMapping( meshFrom, meshTo, attributeNames, onPoints ) - assert attributeMappingFilter.applyFilter() + attributeMappingFilter.applyFilter() + + +@pytest.mark.parametrize( + "meshFromName, meshToName, attributeNames, onPoints", + [ + ( "dataset", "emptydataset", { "Fault" }, False ), # Attribute not in the mesh from + ( "dataset", "dataset", { "GLOBAL_IDS_CELLS" }, False ), # Attribute on both meshes + ( "multiblock", "emptymultiblock", { "FAULT" }, False ), # Partial attribute in the mesh from + ] ) +def test_AttributeMappingRaisesAttributeError( + dataSetTest: Any, + meshFromName: str, + meshToName: str, + attributeNames: set[ str ], + onPoints: bool, +) -> None: + """Test the fails of the filter with attributes issues.""" + meshFrom: Union[ vtkDataSet, vtkMultiBlockDataSet ] = dataSetTest( meshFromName ) + meshTo: Union[ vtkDataSet, vtkMultiBlockDataSet ] = dataSetTest( meshToName ) + attributeMappingFilter: AttributeMapping = AttributeMapping( meshFrom, meshTo, attributeNames, onPoints ) + + with pytest.raises( AttributeError ): + attributeMappingFilter.applyFilter() + + +@pytest.mark.parametrize( + "meshFromName, meshToName, attributeNames, onPoints", + [ + ( "dataset", "emptydataset", {}, False ), # no attribute to map + ( "dataset", "emptyFracture", { "FAULT" }, False ), # meshes with same type but different cells dimension + ] ) +def test_AttributeMappingRaisesValueError( + dataSetTest: Any, + meshFromName: str, + meshToName: str, + attributeNames: set[ str ], + onPoints: bool, +) -> None: + """Test the fails of the filter with input value issue.""" + meshFrom: Union[ vtkDataSet, vtkMultiBlockDataSet ] = dataSetTest( meshFromName ) + meshTo: Union[ vtkDataSet, vtkMultiBlockDataSet ] = dataSetTest( meshToName ) + attributeMappingFilter: AttributeMapping = AttributeMapping( meshFrom, meshTo, attributeNames, onPoints ) + + with pytest.raises( ValueError ): + attributeMappingFilter.applyFilter() diff --git a/geos-processing/tests/test_CellTypeCounterEnhanced.py b/geos-processing/tests/test_CellTypeCounterEnhanced.py index 7783ce70..7883592c 100644 --- a/geos-processing/tests/test_CellTypeCounterEnhanced.py +++ b/geos-processing/tests/test_CellTypeCounterEnhanced.py @@ -60,7 +60,7 @@ def test_CellTypeCounterEnhanced_single( test_case: TestCase ) -> None: test_case (TestCase): Test case """ cellTypeCounterEnhancedFilter: CellTypeCounterEnhanced = CellTypeCounterEnhanced( test_case.mesh ) - assert cellTypeCounterEnhancedFilter.applyFilter() + cellTypeCounterEnhancedFilter.applyFilter() countsObs: CellTypeCounts = cellTypeCounterEnhancedFilter.GetCellTypeCountsObject() assert countsObs is not None, "CellTypeCounts is undefined" @@ -118,7 +118,7 @@ def test_CellTypeCounterEnhanced_multi( test_case: TestCase ) -> None: test_case (TestCase): Test case """ cellTypeCounterEnhancedFilter: CellTypeCounterEnhanced = CellTypeCounterEnhanced( test_case.mesh ) - assert cellTypeCounterEnhancedFilter.applyFilter() + cellTypeCounterEnhancedFilter.applyFilter() countsObs: CellTypeCounts = cellTypeCounterEnhancedFilter.GetCellTypeCountsObject() assert countsObs is not None, "CellTypeCounts is undefined" diff --git a/geos-processing/tests/test_CreateConstantAttributePerRegion.py b/geos-processing/tests/test_CreateConstantAttributePerRegion.py index 98896763..388fee0c 100644 --- a/geos-processing/tests/test_CreateConstantAttributePerRegion.py +++ b/geos-processing/tests/test_CreateConstantAttributePerRegion.py @@ -13,87 +13,149 @@ @pytest.mark.parametrize( - "meshType, newAttributeName, regionName, dictRegionValues, componentNames, componentNamesTest, valueNpType, succeed", + "meshType, newAttributeName, regionName, dictRegionValues, componentNames, componentNamesTest, valueNpType", [ # Test the name of the new attribute (new on the mesh, one present on the other piece). ## For vtkDataSet. - ( "dataset", "newAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32, True ), - ( "dataset", "CellAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32, True ), + ( "dataset", "newAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32 ), + ( "dataset", "CellAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32 ), ## For vtkMultiBlockDataSet. - ( "multiblock", "newAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32, True ), - ( "multiblock", "CellAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32, True ), - ( "multiblock", "GLOBAL_IDS_CELLS", "GLOBAL_IDS_POINTS", {}, (), (), np.float32, True ), + ( "multiblock", "newAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32 ), + ( "multiblock", "CellAttribute", "GLOBAL_IDS_POINTS", {}, (), (), np.float32 ), + ( "multiblock", "GLOBAL_IDS_CELLS", "GLOBAL_IDS_POINTS", {}, (), (), np.float32 ), # Test if the region attribute is on cells or on points. - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.float32, True ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.float32 ), # Test the component name. - ( "dataset", "newAttribute", "FAULT", {}, ( "X" ), (), np.float32, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), ( "Component0", "Component1" ), np.float32, True ), - ( "dataset", "newAttribute", "FAULT", {}, ( "X" ), ( "Component0", "Component1" ), np.float32, True ), - ( "dataset", "newAttribute", "FAULT", {}, ( "X", "Y" ), ( "X", "Y" ), np.float32, True ), - ( "dataset", "newAttribute", "FAULT", {}, ( "X", "Y", "Z" ), ( "X", "Y" ), np.float32, True ), + ( "dataset", "newAttribute", "FAULT", {}, ( "X" ), (), np.float32 ), + ( "dataset", "newAttribute", "FAULT", {}, (), ( "Component0", "Component1" ), np.float32 ), + ( "dataset", "newAttribute", "FAULT", {}, ( "X" ), ( "Component0", "Component1" ), np.float32 ), + ( "dataset", "newAttribute", "FAULT", {}, ( "X", "Y" ), ( "X", "Y" ), np.float32 ), + ( "dataset", "newAttribute", "FAULT", {}, ( "X", "Y", "Z" ), ( "X", "Y" ), np.float32 ), # Test the type of value. - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int8, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int16, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int32, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int64, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint8, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint16, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint32, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint64, True ), - ( "dataset", "newAttribute", "FAULT", {}, (), (), np.float64, True ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int8 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int16 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int32 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.int64 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint8 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint16 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint32 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.uint64 ), + ( "dataset", "newAttribute", "FAULT", {}, (), (), np.float64 ), # Test index/value. ( "dataset", "newAttribute", "FAULT", { 0: [ 0 ], - 100: [ 1 ] - }, (), (), np.float32, True ), + 100: [ 1 ], + }, (), (), np.float32 ), ( "dataset", "newAttribute", "FAULT", { 0: [ 0 ], 100: [ 1 ], - 101: [ 2 ] - }, (), (), np.float32, True ), + 101: [ 2 ], + }, (), (), np.float32 ), ( "dataset", "newAttribute", "FAULT", { 0: [ 0 ], 100: [ 1 ], 101: [ 2 ], - 2: [ 3 ] - }, (), (), np.float32, True ), + 2: [ 3 ], + }, (), (), np.float32 ), ( "dataset", "newAttribute", "FAULT", { 0: [ 0, 0 ], - 100: [ 1, 1 ] - }, (), ( "Component0", "Component1" ), np.float32, True ), + 100: [ 1, 1 ], + }, (), ( "Component0", "Component1" ), np.float32 ), ( "dataset", "newAttribute", "FAULT", { 0: [ 0, 0 ], 100: [ 1, 1 ], - 101: [ 2, 2 ] - }, (), ( "Component0", "Component1" ), np.float32, True ), + 101: [ 2, 2 ], + }, (), ( "Component0", "Component1" ), np.float32 ), ( "dataset", "newAttribute", "FAULT", { 0: [ 0, 0 ], 100: [ 1, 1 ], 101: [ 2, 2 ], - 2: [ 3, 3 ] - }, (), ( "Component0", "Component1" ), np.float32, True ), - # Test common error. - ## Number of components. + 2: [ 3, 3 ], + }, (), ( "Component0", "Component1" ), np.float32 ), + ] ) +def test_CreateConstantAttributePerRegion( + dataSetTest: Union[ vtkMultiBlockDataSet, vtkDataSet ], + meshType: str, + newAttributeName: str, + regionName: str, + dictRegionValues: dict[ Any, Any ], + componentNames: tuple[ str, ...], + componentNamesTest: tuple[ str, ...], + valueNpType: int, +) -> None: + """Test CreateConstantAttributePerRegion.""" + mesh: Union[ vtkMultiBlockDataSet, vtkDataSet ] = dataSetTest( meshType ) + nbComponents: int = len( componentNamesTest ) + if nbComponents == 0: # If the attribute has one component, the component has no name. + nbComponents += 1 + + createConstantAttributePerRegionFilter: CreateConstantAttributePerRegion = CreateConstantAttributePerRegion( + mesh, + regionName, + dictRegionValues, + newAttributeName, + valueNpType=valueNpType, + nbComponents=nbComponents, + componentNames=componentNames, + ) + + createConstantAttributePerRegionFilter.applyFilter() + + +@pytest.mark.parametrize( + "meshType, newAttributeName, regionName, dictRegionValues, componentNames, componentNamesTest, valueNpType", + [ + ( "dataset", "newAttribute", "PERM", {}, (), (), np.float32 ), # Region attribute has too many components + ( "multiblock", "newAttribute", "FAULT", {}, (), (), np.float32 ), # Region attribute is partial. + ] ) +def test_CreateConstantAttributePerRegionRaisesAttributeError( + dataSetTest: Union[ vtkMultiBlockDataSet, vtkDataSet ], + meshType: str, + newAttributeName: str, + regionName: str, + dictRegionValues: dict[ Any, Any ], + componentNames: tuple[ str, ...], + componentNamesTest: tuple[ str, ...], + valueNpType: int, +) -> None: + """Test tes fails of CreateConstantAttributePerRegion with attributes issues.""" + mesh: Union[ vtkMultiBlockDataSet, vtkDataSet ] = dataSetTest( meshType ) + nbComponents: int = len( componentNamesTest ) + if nbComponents == 0: # If the attribute has one component, the component has no name. + nbComponents += 1 + + createConstantAttributePerRegionFilter: CreateConstantAttributePerRegion = CreateConstantAttributePerRegion( + mesh, + regionName, + dictRegionValues, + newAttributeName, + valueNpType=valueNpType, + nbComponents=nbComponents, + componentNames=componentNames, + ) + + with pytest.raises( AttributeError ): + createConstantAttributePerRegionFilter.applyFilter() + + +@pytest.mark.parametrize( + "meshType, newAttributeName, regionName, dictRegionValues, componentNames, componentNamesTest, valueNpType", + [ ( "dataset", "newAttribute", "FAULT", { 0: [ 0 ], - 100: [ 1, 1 ] - }, (), (), np.float32, False ), # Number of value inconsistent. + 100: [ 1, 1 ], + }, (), (), np.float32 ), # Number of value inconsistent. ( "dataset", "newAttribute", "FAULT", { 0: [ 0, 0 ], - 100: [ 1, 1 ] - }, (), (), np.float32, False ), # More values than components. + 100: [ 1, 1 ], + }, (), (), np.float32 ), # More values than components. ( "dataset", "newAttribute", "FAULT", { 0: [ 0 ], - 100: [ 1 ] - }, ( "X", "Y" ), ( "X", "Y" ), np.float32, False ), # More components than value. - ## Attribute name. - ( "dataset", "PERM", "FAULT", {}, (), (), np.float32, False ), # The attribute name already exist. - ## Region attribute. - ( "dataset", "newAttribute", "PERM", {}, (), - (), np.float32, False ), # Region attribute has too many components. - ( "multiblock", "newAttribute", "FAULT", {}, (), (), np.float32, False ), # Region attribute is partial. + 100: [ 1 ], + }, ( "X", "Y" ), ( "X", "Y" ), np.float32 ), # More components than value. + ( "dataset", "PERM", "FAULT", {}, (), (), np.float32 ), # The attribute name already exist in the mesh. ] ) -def test_CreateConstantAttributePerRegion( +def test_CreateConstantAttributePerRegionRaisesValueError( dataSetTest: Union[ vtkMultiBlockDataSet, vtkDataSet ], meshType: str, newAttributeName: str, @@ -102,9 +164,8 @@ def test_CreateConstantAttributePerRegion( componentNames: tuple[ str, ...], componentNamesTest: tuple[ str, ...], valueNpType: int, - succeed: bool, ) -> None: - """Test CreateConstantAttributePerRegion.""" + """Test the fails of CreateConstantAttributePerRegion with inputs value issues.""" mesh: Union[ vtkMultiBlockDataSet, vtkDataSet ] = dataSetTest( meshType ) nbComponents: int = len( componentNamesTest ) if nbComponents == 0: # If the attribute has one component, the component has no name. @@ -120,4 +181,5 @@ def test_CreateConstantAttributePerRegion( componentNames=componentNames, ) - assert createConstantAttributePerRegionFilter.applyFilter() == succeed + with pytest.raises( ValueError ): + createConstantAttributePerRegionFilter.applyFilter() diff --git a/geos-processing/tests/test_FillPartialArrays.py b/geos-processing/tests/test_FillPartialArrays.py index e5720d0f..0c0f1f9a 100644 --- a/geos-processing/tests/test_FillPartialArrays.py +++ b/geos-processing/tests/test_FillPartialArrays.py @@ -50,4 +50,18 @@ def test_FillPartialArrays( multiBlockDataSet: vtkMultiBlockDataSet = dataSetTest( "multiblock" ) fillPartialArraysFilter: FillPartialArrays = FillPartialArrays( multiBlockDataSet, dictAttributesValues ) - assert fillPartialArraysFilter.applyFilter() + fillPartialArraysFilter.applyFilter() + + +def test_FillPartialArraysRaises( dataSetTest: vtkMultiBlockDataSet, ) -> None: + """Test the raise of FillPartialArray.""" + multiBlockDataSet: vtkMultiBlockDataSet = dataSetTest( "multiblock" ) + fillPartialArraysFilter: FillPartialArrays + + with pytest.raises( AttributeError ): + fillPartialArraysFilter = FillPartialArrays( multiBlockDataSet, { "poro": None } ) + fillPartialArraysFilter.applyFilter() + + with pytest.raises( ValueError ): + fillPartialArraysFilter = FillPartialArrays( multiBlockDataSet, { "PORO": [ 4, 4, 4 ] } ) + fillPartialArraysFilter.applyFilter() diff --git a/geos-processing/tests/test_GeosExtractBlock.py b/geos-processing/tests/test_GeosExtractBlock.py index bfdecf63..2f47b1b2 100644 --- a/geos-processing/tests/test_GeosExtractBlock.py +++ b/geos-processing/tests/test_GeosExtractBlock.py @@ -26,7 +26,7 @@ def test_GeosExtractBlock( multiBlockDataSet: vtkMultiBlockDataSet = dataSetTest( "meshGeosExtractBlockTmp" ) geosBlockExtractor: GeosBlockExtractor = GeosBlockExtractor( multiBlockDataSet, extractFault, extractWell ) - assert geosBlockExtractor.applyFilter() + geosBlockExtractor.applyFilter() extractedVolume: vtkMultiBlockDataSet = geosBlockExtractor.extractedGeosDomain.volume extractedFault: vtkMultiBlockDataSet = geosBlockExtractor.extractedGeosDomain.fault diff --git a/geos-processing/tests/test_MergeBlocksEnhanced.py b/geos-processing/tests/test_MergeBlocksEnhanced.py index ce56b5c3..677a99d9 100644 --- a/geos-processing/tests/test_MergeBlocksEnhanced.py +++ b/geos-processing/tests/test_MergeBlocksEnhanced.py @@ -7,6 +7,9 @@ from vtkmodules.vtkCommonDataModel import vtkMultiBlockDataSet from geos.processing.generic_processing_tools.MergeBlockEnhanced import MergeBlockEnhanced +from unittest import TestCase +from geos.utils.Errors import VTKError + import vtk from packaging.version import Version @@ -15,8 +18,15 @@ def test_MergeBlocksEnhancedFilter( dataSetTest: vtkMultiBlockDataSet, ) -> None """Test MergeBlockEnhanced vtk filter.""" multiBlockDataset: vtkMultiBlockDataSet = dataSetTest( "multiblockGeosOutput" ) mergeBlockEnhancedFilter: MergeBlockEnhanced = MergeBlockEnhanced( multiBlockDataset ) - assert mergeBlockEnhancedFilter.applyFilter() + mergeBlockEnhancedFilter.applyFilter() + + +class RaiseMergeBlocksEnhanced( TestCase ): + """Test failure on empty multiBlockDataSet.""" - if Version( vtk.__version__ ) < Version( "9.5" ): - failedMergeBlockEnhancedFilter: MergeBlockEnhanced = MergeBlockEnhanced( vtkMultiBlockDataSet() ) - assert not failedMergeBlockEnhancedFilter.applyFilter() + def test_TypeError( self ) -> None: + """Test raise of TypeError.""" + multiBlockDataset = vtkMultiBlockDataSet() # should fail on empty data + mergeBlockEnhancedFilter: MergeBlockEnhanced = MergeBlockEnhanced( multiBlockDataset ) + if Version( vtk.__version__ ) < Version( "9.5" ): + self.assertRaises( VTKError, mergeBlockEnhancedFilter.applyFilter ) diff --git a/geos-processing/tests/test_MeshQualityEnhanced.py b/geos-processing/tests/test_MeshQualityEnhanced.py index 2938c736..6b3cb678 100644 --- a/geos-processing/tests/test_MeshQualityEnhanced.py +++ b/geos-processing/tests/test_MeshQualityEnhanced.py @@ -142,7 +142,7 @@ def test_MeshQualityEnhanced( test_case: TestCase ) -> None: meshQualityEnhancedFilter.SetWedgeMetrics( test_case.qualityMetrics ) elif test_case.cellType == VTK_HEXAHEDRON: meshQualityEnhancedFilter.SetHexaMetrics( test_case.qualityMetrics ) - assert meshQualityEnhancedFilter.applyFilter() + meshQualityEnhancedFilter.applyFilter() # test method getComputedMetricsFromCellType for i, cellType in enumerate( getAllCellTypesExtended() ): diff --git a/geos-processing/tests/test_SplitMesh.py b/geos-processing/tests/test_SplitMesh.py index fb175b30..c97f24be 100644 --- a/geos-processing/tests/test_SplitMesh.py +++ b/geos-processing/tests/test_SplitMesh.py @@ -143,7 +143,7 @@ def test_single_cell_split( test_case: TestCase ) -> None: """ cellTypeName: str = vtkCellTypes.GetClassNameFromTypeId( test_case.cellType ) splitMeshFilter: SplitMesh = SplitMesh( test_case.mesh ) - assert splitMeshFilter.applyFilter() + splitMeshFilter.applyFilter() output: vtkUnstructuredGrid = splitMeshFilter.getOutput() assert output is not None, "Output mesh is undefined." pointsOut: vtkPoints = output.GetPoints() @@ -234,7 +234,7 @@ def test_multi_cells_mesh_split() -> None: # Apply the split filter splitMeshFilter = SplitMesh( input_mesh ) - assert splitMeshFilter.applyFilter() + splitMeshFilter.applyFilter() output: vtkUnstructuredGrid = splitMeshFilter.getOutput() assert output is not None, "Output mesh should be defined" @@ -296,7 +296,7 @@ def test_multi_polygon_mesh_split() -> None: # Apply the split filter splitMeshFilter = SplitMesh( input_mesh ) - assert splitMeshFilter.applyFilter() + splitMeshFilter.applyFilter() output: vtkUnstructuredGrid = splitMeshFilter.getOutput() assert output is not None, "Output mesh should be defined" diff --git a/geos-processing/tests/test_SurfaceGeomechanics.py b/geos-processing/tests/test_SurfaceGeomechanics.py index 7623e802..e42f3110 100644 --- a/geos-processing/tests/test_SurfaceGeomechanics.py +++ b/geos-processing/tests/test_SurfaceGeomechanics.py @@ -69,7 +69,7 @@ def test_SurfaceGeomechanics() -> None: sgFilter: SurfaceGeomechanics = SurfaceGeomechanics( testCase.mesh ) - assert sgFilter.applyFilter() + sgFilter.applyFilter() mesh: vtkPolyData = sgFilter.GetOutputMesh() assert mesh.GetCellData().HasArray( "SCU" ) @@ -81,4 +81,4 @@ def test_failingSurfaceGeomechanics() -> None: failingCase: TriangulatedSurfaceTestCase = TriangulatedSurfaceTestCase( pointsCoords, triangles, None ) sgFilter: SurfaceGeomechanics = SurfaceGeomechanics( failingCase.mesh ) with pytest.raises( AssertionError ): - assert sgFilter.applyFilter() + sgFilter.applyFilter() diff --git a/geos-pv/src/geos/pv/plugins/PVAttributeMapping.py b/geos-pv/src/geos/pv/plugins/PVAttributeMapping.py index cb5af13f..b6d239d8 100644 --- a/geos-pv/src/geos/pv/plugins/PVAttributeMapping.py +++ b/geos-pv/src/geos/pv/plugins/PVAttributeMapping.py @@ -186,7 +186,14 @@ def RequestData( if len( attributeMappingFilter.logger.handlers ) == 0: attributeMappingFilter.setLoggerHandler( VTKHandler() ) - attributeMappingFilter.applyFilter() - self.clearAttributeNames = True + try: + attributeMappingFilter.applyFilter() + self.clearAttributeNames = True + except ( ValueError, AttributeError ) as e: + attributeMappingFilter.logger.error( + f"The filter { attributeMappingFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { attributeMappingFilter.logger.name } failed due to:\n{ e }" + attributeMappingFilter.logger.critical( mess, exc_info=True ) return 1 diff --git a/geos-pv/src/geos/pv/plugins/PVCellTypeCounterEnhanced.py b/geos-pv/src/geos/pv/plugins/PVCellTypeCounterEnhanced.py index bbf6178c..1724f778 100644 --- a/geos-pv/src/geos/pv/plugins/PVCellTypeCounterEnhanced.py +++ b/geos-pv/src/geos/pv/plugins/PVCellTypeCounterEnhanced.py @@ -137,7 +137,9 @@ def RequestData( cellTypeCounterEnhancedFilter: CellTypeCounterEnhanced = CellTypeCounterEnhanced( inputMesh, True ) if len( cellTypeCounterEnhancedFilter.logger.handlers ) == 0: cellTypeCounterEnhancedFilter.setLoggerHandler( VTKHandler() ) - if cellTypeCounterEnhancedFilter.applyFilter(): + + try: + cellTypeCounterEnhancedFilter.applyFilter() outputTable.ShallowCopy( cellTypeCounterEnhancedFilter.getOutput() ) # print counts in Output Messages view @@ -152,4 +154,12 @@ def RequestData( cellTypeCounterEnhancedFilter.logger.info( f"File {self._filename} was successfully written." ) except Exception as e: cellTypeCounterEnhancedFilter.logger.info( f"Error while exporting the file due to:\n{ e }" ) + + except TypeError as e: + cellTypeCounterEnhancedFilter.logger.error( + f"The filter { cellTypeCounterEnhancedFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { cellTypeCounterEnhancedFilter.logger.name } failed due to:\n{ e }" + cellTypeCounterEnhancedFilter.logger.critical( mess, exc_info=True ) + return 1 diff --git a/geos-pv/src/geos/pv/plugins/PVCreateConstantAttributePerRegion.py b/geos-pv/src/geos/pv/plugins/PVCreateConstantAttributePerRegion.py index 95a7af0c..ea44eaf3 100644 --- a/geos-pv/src/geos/pv/plugins/PVCreateConstantAttributePerRegion.py +++ b/geos-pv/src/geos/pv/plugins/PVCreateConstantAttributePerRegion.py @@ -289,7 +289,14 @@ def ApplyFilter( self, inputMesh: vtkDataSet, outputMesh: vtkDataSet ) -> None: if len( createConstantAttributePerRegionFilter.logger.handlers ) == 0: createConstantAttributePerRegionFilter.setLoggerHandler( VTKHandler() ) - createConstantAttributePerRegionFilter.applyFilter() + try: + createConstantAttributePerRegionFilter.applyFilter() + except ( ValueError, AttributeError ) as e: + createConstantAttributePerRegionFilter.logger.error( + f"The filter { createConstantAttributePerRegionFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { createConstantAttributePerRegionFilter.logger.name } failed due to:\n{ e }" + createConstantAttributePerRegionFilter.logger.critical( mess, exc_info=True ) self.clearDictRegion = True diff --git a/geos-pv/src/geos/pv/plugins/PVFillPartialArrays.py b/geos-pv/src/geos/pv/plugins/PVFillPartialArrays.py index ee7fa1b1..cec56556 100644 --- a/geos-pv/src/geos/pv/plugins/PVFillPartialArrays.py +++ b/geos-pv/src/geos/pv/plugins/PVFillPartialArrays.py @@ -108,7 +108,14 @@ def ApplyFilter( self, inputMesh: vtkMultiBlockDataSet, outputMesh: vtkMultiBloc if len( fillPartialArraysFilter.logger.handlers ) == 0: fillPartialArraysFilter.setLoggerHandler( VTKHandler() ) - fillPartialArraysFilter.applyFilter() + try: + fillPartialArraysFilter.applyFilter() + except ( ValueError, AttributeError ) as e: + fillPartialArraysFilter.logger.error( + f"The filter { fillPartialArraysFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { fillPartialArraysFilter.logger.name } failed due to:\n{ e }" + fillPartialArraysFilter.logger.critical( mess, exc_info=True ) self.clearDictAttributesValues = True diff --git a/geos-pv/src/geos/pv/plugins/PVGeomechanicsCalculator.py b/geos-pv/src/geos/pv/plugins/PVGeomechanicsCalculator.py index d6b4fa4d..72995dd7 100644 --- a/geos-pv/src/geos/pv/plugins/PVGeomechanicsCalculator.py +++ b/geos-pv/src/geos/pv/plugins/PVGeomechanicsCalculator.py @@ -234,6 +234,7 @@ def ApplyFilter( """ geomechanicsCalculatorFilter: GeomechanicsCalculator outputMesh.ShallowCopy( inputMesh ) + mess: str if isinstance( outputMesh, vtkUnstructuredGrid ): geomechanicsCalculatorFilter = GeomechanicsCalculator( outputMesh, @@ -249,8 +250,16 @@ def ApplyFilter( geomechanicsCalculatorFilter.physicalConstants.rockCohesion = self.rockCohesion geomechanicsCalculatorFilter.physicalConstants.frictionAngle = self.frictionAngle - if geomechanicsCalculatorFilter.applyFilter(): + try: + geomechanicsCalculatorFilter.applyFilter() outputMesh.ShallowCopy( geomechanicsCalculatorFilter.getOutput() ) + except ( ValueError, AttributeError ) as e: + geomechanicsCalculatorFilter.logger.error( + f"The filter { geomechanicsCalculatorFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess = f"The filter { geomechanicsCalculatorFilter.logger.name } failed due to:\n{ e }" + geomechanicsCalculatorFilter.logger.critical( mess, exc_info=True ) + elif isinstance( outputMesh, vtkMultiBlockDataSet ): volumeBlockIndexes: list[ int ] = getBlockElementIndexesFlatten( outputMesh ) for blockIndex in volumeBlockIndexes: @@ -274,9 +283,16 @@ def ApplyFilter( geomechanicsCalculatorFilter.physicalConstants.rockCohesion = self.rockCohesion geomechanicsCalculatorFilter.physicalConstants.frictionAngle = self.frictionAngle - if geomechanicsCalculatorFilter.applyFilter(): + try: + geomechanicsCalculatorFilter.applyFilter() volumeBlock.ShallowCopy( geomechanicsCalculatorFilter.getOutput() ) volumeBlock.Modified() + except ( ValueError, AttributeError ) as e: + geomechanicsCalculatorFilter.logger.error( + f"The filter { geomechanicsCalculatorFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess = f"The filter { geomechanicsCalculatorFilter.logger.name } failed due to:\n{ e }" + geomechanicsCalculatorFilter.logger.critical( mess, exc_info=True ) outputMesh.Modified() diff --git a/geos-pv/src/geos/pv/plugins/PVGeomechanicsWorkflow.py b/geos-pv/src/geos/pv/plugins/PVGeomechanicsWorkflow.py index 3f674fa6..77029c3b 100644 --- a/geos-pv/src/geos/pv/plugins/PVGeomechanicsWorkflow.py +++ b/geos-pv/src/geos/pv/plugins/PVGeomechanicsWorkflow.py @@ -15,6 +15,7 @@ update_paths() +from geos.utils.Errors import VTKError from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, DEFAULT_GRAIN_BULK_MODULUS, DEFAULT_ROCK_COHESION, WATER_DENSITY ) @@ -334,16 +335,12 @@ def RequestData( self.logger.info( f"The plugin { self.logger.name } succeeded." ) - except AssertionError as e: - mess: str = "Geomechanics workflow failed due to:" - self.logger.error( mess ) - self.logger.error( str( e ) ) - return 0 + except ( ValueError, VTKError, AttributeError, AssertionError ) as e: + self.logger.error( f"The plugin { self.logger.name } failed due to:\n{ e }" ) except Exception as e: - mess1: str = "Geomechanics workflow failed due to:" - self.logger.critical( mess1 ) - self.logger.critical( e, exc_info=True ) - return 0 + mess: str = f"The filter { self.logger.name } failed due to:\n{ e }" + self.logger.critical( mess, exc_info=True ) + return 1 def applyPVGeosBlockExtractAndMerge( self: Self ) -> None: diff --git a/geos-pv/src/geos/pv/plugins/PVGeosBlockExtractAndMerge.py b/geos-pv/src/geos/pv/plugins/PVGeosBlockExtractAndMerge.py index a2de08ea..1587f9e2 100644 --- a/geos-pv/src/geos/pv/plugins/PVGeosBlockExtractAndMerge.py +++ b/geos-pv/src/geos/pv/plugins/PVGeosBlockExtractAndMerge.py @@ -21,6 +21,7 @@ from geos.mesh.utils.arrayModifiers import ( copyAttribute, createCellCenterAttribute ) from geos.mesh.utils.multiblockHelpers import getBlockNames +from geos.utils.Errors import VTKError from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, GeosDomainNameEnum, getAttributeToTransferFromInitialTime ) @@ -253,25 +254,35 @@ def RequestData( Returns: int: 1 if calculation successfully ended, 0 otherwise. """ - try: - inputMesh: vtkMultiBlockDataSet = vtkMultiBlockDataSet.GetData( inInfoVec[ 0 ] ) - executive = self.GetExecutive() + inputMesh: vtkMultiBlockDataSet = vtkMultiBlockDataSet.GetData( inInfoVec[ 0 ] ) + executive = self.GetExecutive() + mess: str - # First time step, compute the initial properties (useful for geomechanics analyses) - if self.requestDataStep == 0: - self.logger.info( "Apply the plugin for the first time step to get the initial properties." ) + # First time step, compute the initial properties (useful for geomechanics analyses) + if self.requestDataStep == 0: + self.logger.info( + f"Apply the plugin { self.logger.name } for the first time step to get the initial properties." ) + try: doExtractAndMerge( inputMesh, self.outputCellsT0, vtkMultiBlockDataSet(), vtkMultiBlockDataSet(), self.extractFault, self.extractWell ) request.Set( executive.CONTINUE_EXECUTING(), 1 ) - - # Current time step, extract, merge, rename and transfer properties - if self.requestDataStep == self.currentTimeStepIndex: - self.logger.info( f"Apply the filter for the current time step: { self.currentTimeStepIndex }." ) - outputCells: vtkMultiBlockDataSet = self.GetOutputData( outInfoVec, 0 ) - outputFaults: vtkMultiBlockDataSet = self.GetOutputData( - outInfoVec, 1 ) if self.extractFault else vtkMultiBlockDataSet() - outputWells: vtkMultiBlockDataSet = self.GetOutputData( - outInfoVec, 2 ) if self.extractWell else vtkMultiBlockDataSet() + except ( ValueError, VTKError ) as e: + self.logger.error( f"The plugin { self.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess = f"The plugin { self.logger.name } failed due to:\n{ e }" + self.logger.critical( mess, exc_info=True ) + + # Current time step, extract, merge, rename and transfer properties + if self.requestDataStep == self.currentTimeStepIndex: + self.logger.info( + f"Apply the plugin { self.logger.name } for the current time step: { self.currentTimeStepIndex }." ) + outputCells: vtkMultiBlockDataSet = self.GetOutputData( outInfoVec, 0 ) + outputFaults: vtkMultiBlockDataSet = self.GetOutputData( + outInfoVec, 1 ) if self.extractFault else vtkMultiBlockDataSet() + outputWells: vtkMultiBlockDataSet = self.GetOutputData( outInfoVec, + 2 ) if self.extractWell else vtkMultiBlockDataSet() + + try: doExtractAndMerge( inputMesh, outputCells, outputFaults, outputWells, self.extractFault, self.extractWell ) @@ -293,12 +304,10 @@ def RequestData( self.requestDataStep = -2 self.logger.info( f"The plugin { self.logger.name } succeeded." ) - except AssertionError as e: - self.logger.error( f"The plugin failed.\n{e}" ) - return 0 - except Exception as e: - mess1: str = "Block extraction and merge failed due to:" - self.logger.critical( mess1 ) - self.logger.critical( e, exc_info=True ) - return 0 + except ( ValueError, VTKError ) as e: + self.logger.error( f"The plugin { self.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess = f"The plugin { self.logger.name } failed due to:\n{ e }" + self.logger.critical( mess, exc_info=True ) + return 1 diff --git a/geos-pv/src/geos/pv/plugins/PVMergeBlocksEnhanced.py b/geos-pv/src/geos/pv/plugins/PVMergeBlocksEnhanced.py index ea96de82..b4faec3d 100644 --- a/geos-pv/src/geos/pv/plugins/PVMergeBlocksEnhanced.py +++ b/geos-pv/src/geos/pv/plugins/PVMergeBlocksEnhanced.py @@ -24,6 +24,7 @@ update_paths() from geos.processing.generic_processing_tools.MergeBlockEnhanced import MergeBlockEnhanced +from geos.utils.Errors import VTKError __doc__ = """ Merge Blocks Keeping Partial Attributes is a Paraview plugin filter that allows to merge blocks from a multiblock dataset while keeping partial attributes. @@ -116,8 +117,15 @@ def RequestData( if len( mergeBlockEnhancedFilter.logger.handlers ) == 0: mergeBlockEnhancedFilter.setLoggerHandler( VTKHandler() ) - if mergeBlockEnhancedFilter.applyFilter(): + try: + mergeBlockEnhancedFilter.applyFilter() outputMesh.ShallowCopy( mergeBlockEnhancedFilter.getOutput() ) outputMesh.Modified() + except VTKError as e: + mergeBlockEnhancedFilter.logger.error( + f"The filter { mergeBlockEnhancedFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { mergeBlockEnhancedFilter.logger.name } failed due to:\n{ e }" + mergeBlockEnhancedFilter.logger.critical( mess, exc_info=True ) return 1 diff --git a/geos-pv/src/geos/pv/plugins/PVMeshQualityEnhanced.py b/geos-pv/src/geos/pv/plugins/PVMeshQualityEnhanced.py index 4debf0a4..06149776 100644 --- a/geos-pv/src/geos/pv/plugins/PVMeshQualityEnhanced.py +++ b/geos-pv/src/geos/pv/plugins/PVMeshQualityEnhanced.py @@ -239,8 +239,8 @@ def ApplyFilter( self, inputMesh: vtkUnstructuredGrid, outputMesh: vtkUnstructur wedgeMetrics=wedgeMetrics, hexaMetrics=hexaMetrics ) meshQualityEnhancedFilter.SetOtherMeshQualityMetrics( otherMetrics ) - if meshQualityEnhancedFilter.applyFilter(): - + try: + meshQualityEnhancedFilter.applyFilter() outputMesh.ShallowCopy( meshQualityEnhancedFilter.getOutput() ) # save to file if asked @@ -249,6 +249,13 @@ def ApplyFilter( self, inputMesh: vtkUnstructuredGrid, outputMesh: vtkUnstructur logger: logging.Logger = meshQualityEnhancedFilter.logger self.saveFile( stats, logger ) self._blockIndex += 1 + except ( ValueError, IndexError, TypeError, AttributeError ) as e: + meshQualityEnhancedFilter.logger.error( + f"The filter { meshQualityEnhancedFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { meshQualityEnhancedFilter.logger.name } failed due to:\n{ e }" + meshQualityEnhancedFilter.logger.critical( mess, exc_info=True ) + return def saveFile( diff --git a/geos-pv/src/geos/pv/plugins/PVSplitMesh.py b/geos-pv/src/geos/pv/plugins/PVSplitMesh.py index 6b149538..b677bf6d 100644 --- a/geos-pv/src/geos/pv/plugins/PVSplitMesh.py +++ b/geos-pv/src/geos/pv/plugins/PVSplitMesh.py @@ -54,7 +54,14 @@ def ApplyFilter( self: Self, inputMesh: vtkPointSet, outputMesh: vtkPointSet ) - splitMeshFilter: SplitMesh = SplitMesh( inputMesh, True ) if len( splitMeshFilter.logger.handlers ) == 0: splitMeshFilter.setLoggerHandler( VTKHandler() ) - if splitMeshFilter.applyFilter(): + + try: + splitMeshFilter.applyFilter() outputMesh.ShallowCopy( splitMeshFilter.getOutput() ) + except ( TypeError, AttributeError ) as e: + splitMeshFilter.logger.error( f"The filter {splitMeshFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { splitMeshFilter.logger.name } failed due to:\n{ e }" + splitMeshFilter.logger.critical( mess, exc_info=True ) return diff --git a/geos-pv/src/geos/pv/plugins/PVSurfaceGeomechanics.py b/geos-pv/src/geos/pv/plugins/PVSurfaceGeomechanics.py index 0fc204a7..99d2570c 100644 --- a/geos-pv/src/geos/pv/plugins/PVSurfaceGeomechanics.py +++ b/geos-pv/src/geos/pv/plugins/PVSurfaceGeomechanics.py @@ -20,6 +20,7 @@ update_paths() +from geos.utils.Errors import VTKError from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, DEFAULT_ROCK_COHESION ) from geos.processing.post_processing.SurfaceGeomechanics import SurfaceGeomechanics from geos.mesh.utils.multiblockHelpers import ( getBlockElementIndexesFlatten, getBlockFromFlatIndex ) @@ -126,8 +127,9 @@ def ApplyFilter( self: Self, inputMesh: vtkMultiBlockDataSet, outputMesh: vtkMul sgFilter.SetRockCohesion( self._getRockCohesion() ) sgFilter.SetFrictionAngle( self._getFrictionAngle() ) - if sgFilter.applyFilter(): + try: + sgFilter.applyFilter() outputSurface: vtkPolyData = sgFilter.GetOutputMesh() # add attributes to output surface mesh @@ -136,6 +138,11 @@ def ApplyFilter( self: Self, inputMesh: vtkMultiBlockDataSet, outputMesh: vtkMul surfaceBlock.GetCellData().AddArray( attr ) surfaceBlock.GetCellData().Modified() surfaceBlock.Modified() + except ( ValueError, VTKError, AttributeError, AssertionError ) as e: + sgFilter.logger.error( f"The filter { sgFilter.logger.name } failed due to:\n{ e }" ) + except Exception as e: + mess: str = f"The filter { sgFilter.logger.name } failed due to:\n{ e }" + sgFilter.logger.critical( mess, exc_info=True ) outputMesh.Modified() return diff --git a/geos-pv/src/geos/pv/utils/workflowFunctions.py b/geos-pv/src/geos/pv/utils/workflowFunctions.py index b4fd87aa..1e2a07aa 100644 --- a/geos-pv/src/geos/pv/utils/workflowFunctions.py +++ b/geos-pv/src/geos/pv/utils/workflowFunctions.py @@ -36,8 +36,9 @@ def doExtractAndMerge( speHandler=True ) if not blockExtractor.logger.hasHandlers(): blockExtractor.setLoggerHandler( VTKHandler() ) - if not blockExtractor.applyFilter(): - raise + + blockExtractor.applyFilter() + # recover output objects from GeosBlockExtractor filter and merge internal blocks volumeBlockExtracted: vtkMultiBlockDataSet = blockExtractor.extractedGeosDomain.volume outputCells.ShallowCopy( mergeBlocksFilter( volumeBlockExtracted, False, "Volume" ) ) @@ -77,8 +78,7 @@ def mergeBlocksFilter( mergeBlockFilter: GeosBlockMerge = GeosBlockMerge( mesh, convertSurfaces, True, loggerName ) if not mergeBlockFilter.logger.hasHandlers(): mergeBlockFilter.setLoggerHandler( VTKHandler() ) - if not mergeBlockFilter.applyFilter(): - raise + mergeBlockFilter.applyFilter() mergedBlocks: vtkMultiBlockDataSet = vtkMultiBlockDataSet() mergedBlocks.ShallowCopy( mergeBlockFilter.getOutput() ) return mergedBlocks