diff --git a/src/sempy_labs/_model_bpa_bulk.py b/src/sempy_labs/_model_bpa_bulk.py index 70a8a5dd..49377639 100644 --- a/src/sempy_labs/_model_bpa_bulk.py +++ b/src/sempy_labs/_model_bpa_bulk.py @@ -28,6 +28,7 @@ def run_model_bpa_bulk( workspace: Optional[str | UUID | List[str | UUID]] = None, skip_models: Optional[str | List[str]] = ["ModelBPA", "Fabric Capacity Metrics"], skip_models_in_workspace: Optional[dict] = None, + return_df: bool = False, ): """ Runs the semantic model Best Practice Analyzer across all semantic models in a workspace (or all accessible workspaces). @@ -54,6 +55,7 @@ def run_model_bpa_bulk( "Workspace A": ["Dataset1", "Dataset2"], "Workspace B": ["Dataset5", "Dataset 8"], } + return_df: bool = False (default: False) – If set to True, it will return the BPA rules as a DataFrame for the specified workspaces.` """ if not lakehouse_attached(): @@ -66,6 +68,9 @@ def run_model_bpa_bulk( skip_models.extend(["ModelBPA", "Fabric Capacity Metrics"]) + #Declaring a tempDf to return the BPARules + tempDf = pd.DataFrame() + now = datetime.datetime.now() output_table = "modelbparesults" lakeT = get_lakehouse_tables() @@ -175,6 +180,9 @@ def run_model_bpa_bulk( for key, value in icons.bpa_schema.items() } + #Appending all the dfs to tempDf + tempDf = pd.concat([tempDf, df], ignore_index=True) + save_as_delta_table( dataframe=df, delta_table_name=output_table, @@ -185,6 +193,9 @@ def run_model_bpa_bulk( print( f"{icons.green_dot} Saved BPA results to the '{output_table}' delta table." ) + #If `return_df` is set to `True`, it will return the BPA rules result as a DataFrame. + if(return_df): + return tempDf print(f"{icons.green_dot} Bulk BPA scan complete.") @@ -227,6 +238,9 @@ def create_model_bpa_semantic_model( lakehouse=lakehouse, workspace=lakehouse_workspace_id ) + + + # Generate the shared expression based on the lakehouse and lakehouse workspace expr = generate_shared_expression( item_name=lakehouse_name,