Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions src/sempy_labs/_model_bpa_bulk.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ def run_model_bpa_bulk(
workspace: Optional[str | UUID | List[str | UUID]] = None,
skip_models: Optional[str | List[str]] = ["ModelBPA", "Fabric Capacity Metrics"],
skip_models_in_workspace: Optional[dict] = None,
return_df: bool = False,
):
"""
Runs the semantic model Best Practice Analyzer across all semantic models in a workspace (or all accessible workspaces).
Expand All @@ -54,6 +55,7 @@ def run_model_bpa_bulk(
"Workspace A": ["Dataset1", "Dataset2"],
"Workspace B": ["Dataset5", "Dataset 8"],
}
return_df: bool = False (default: False) – If set to True, it will return the BPA rules as a DataFrame for the specified workspaces.`
"""

if not lakehouse_attached():
Expand All @@ -66,6 +68,9 @@ def run_model_bpa_bulk(

skip_models.extend(["ModelBPA", "Fabric Capacity Metrics"])

#Declaring a tempDf to return the BPARules
tempDf = pd.DataFrame()

now = datetime.datetime.now()
output_table = "modelbparesults"
lakeT = get_lakehouse_tables()
Expand Down Expand Up @@ -175,6 +180,9 @@ def run_model_bpa_bulk(
for key, value in icons.bpa_schema.items()
}

#Appending all the dfs to tempDf
tempDf = pd.concat([tempDf, df], ignore_index=True)

save_as_delta_table(
dataframe=df,
delta_table_name=output_table,
Expand All @@ -185,6 +193,9 @@ def run_model_bpa_bulk(
print(
f"{icons.green_dot} Saved BPA results to the '{output_table}' delta table."
)
#If `return_df` is set to `True`, it will return the BPA rules result as a DataFrame.
if(return_df):
return tempDf

print(f"{icons.green_dot} Bulk BPA scan complete.")

Expand Down Expand Up @@ -227,6 +238,9 @@ def create_model_bpa_semantic_model(
lakehouse=lakehouse, workspace=lakehouse_workspace_id
)




# Generate the shared expression based on the lakehouse and lakehouse workspace
expr = generate_shared_expression(
item_name=lakehouse_name,
Expand Down