Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
88 changes: 79 additions & 9 deletions backend/apps/ifc_validation/tasks/check_programs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from typing import List

from apps.ifc_validation_models.settings import TASK_TIMEOUT_LIMIT
from apps.ifc_validation_models.models import ValidationTask
from apps.ifc_validation_models.models import Model, ValidationTask
from core.settings import ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB

from .logger import logger
from .context import TaskContext
Expand Down Expand Up @@ -44,10 +45,51 @@ def is_schema_error(line):
return True

def check_schema(context:TaskContext):
proc = run_subprocess(
task = context.task,
command = [sys.executable, "-m", "ifcopenshell.validate", "--json", "--rules", "--fields", context.file_path ]
)
if context.rdb_file_path_if_exists == context.file_path:
# No conversion to RocksDB has been made
proc = run_subprocess(
task = context.task,
command = [sys.executable, "-m", "ifcopenshell.validate", "--json", "--rules", "--fields", context.file_path ]
)
else:
# We have a RocksDB file, which is functionally almost the same
# except that certain errors are only present in SPF which have
# been captured in a separate log file, which needs to be blended
# in into the stream of other messages.
proc = run_subprocess(
task = context.task,
command=[
sys.executable,
"-c",
f"""
import json
import ifcopenshell
from ifcopenshell.validate import *

logger = json_logger()

spf_filename = {json.dumps(context.file_path)}
file = ifcopenshell.open({json.dumps(context.rdb_file_path_if_exists)})
log_filename = {json.dumps(context.log_file_path_if_exists)}
if log_filename:
log_content = open(log_filename).read()
if log_content:
# certain errors are only present when interacting with SPF, these
# are captured during the conversion to RocksDB and now emitted.
log_internal_cpp_errors(None, spf_filename, logger, log_content=log_content)

validate(file, logger, True)

def conv(x):
if isinstance(x, ifcopenshell.entity_instance):
return x.get_info(scalar_only=True)
else:
return str(x)

for x in logger.statements:
print(json.dumps(x, default=conv))
"""])

output = list(filter(is_schema_error, proc.stdout.split("\n")))
success = proc.returncode >= 0
valid = len(output) == 0
Expand Down Expand Up @@ -108,7 +150,7 @@ def check_prerequisites(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
"--file-name", context.file_path,
"--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "CRITICAL",
"--purepythonparser"
Expand All @@ -124,7 +166,7 @@ def check_normative_ia(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
"--file-name", context.file_path,
"--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "IMPLEMENTER_AGREEMENT"
]
Expand All @@ -139,7 +181,7 @@ def check_normative_ip(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
"--file-name", context.file_path,
"--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "INFORMAL_PROPOSITION"
]
Expand All @@ -154,7 +196,7 @@ def check_industry_practices(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
"--file-name", context.file_path,
"--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "INDUSTRY_PRACTICE"
]
Expand All @@ -173,6 +215,34 @@ def check_proc_success_or_fail(proc, task):
raise RuntimeError(error_message)
return proc.stdout

def check_rocksdb_conversion(context: TaskContext):
if os.path.getsize(context.file_path) > ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB * 1024 * 1024:
rdb_file_path = '/tmp/' + os.path.basename(context.file_path) + '.rdb'
log_file_path = context.file_path + '.log'
try:
run_subprocess(
task = context.task,
command=[
sys.executable,
"-c",
f"""
import ifcopenshell
ifcopenshell.ifcopenshell_wrapper.set_log_format_json()
ifcopenshell.convert_path_to_rocksdb(
{json.dumps(context.file_path)},
{json.dumps(rdb_file_path)})
with open({json.dumps(log_file_path)}, 'w') as f:
f.write(ifcopenshell.get_log())
"""
]
)
context.result = Model.Status.VALID if os.path.exists(rdb_file_path) else Model.Status.INVALID
except:
context.result = Model.Status.INVALID
context.result = Model.Status.NOT_APPLICABLE
return context


def run_subprocess(
task: ValidationTask,
command: List[str],
Expand Down
33 changes: 15 additions & 18 deletions backend/apps/ifc_validation/tasks/configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,30 +44,27 @@ def _load_function(module, prefix, type):
)

# define task info
header_syntax = make_task(type=ValidationTask.Type.HEADER_SYNTAX, increment=5, field='status_header_syntax', stage="serial")
header = make_task(type=ValidationTask.Type.HEADER, increment=10, field='status_header', stage="serial")
syntax = make_task(type=ValidationTask.Type.SYNTAX, increment=5, field='status_syntax', stage="serial")
prerequisites = make_task(type=ValidationTask.Type.PREREQUISITES, increment=10, field='status_prereq', stage="serial")
schema = make_task(type=ValidationTask.Type.SCHEMA, increment=10, field='status_schema')
digital_signatures = make_task(type=ValidationTask.Type.DIGITAL_SIGNATURES, increment=5, field='status_signatures')
bsdd = make_task(type=ValidationTask.Type.BSDD, increment=0, field='status_bsdd')
normative_ia = make_task(type=ValidationTask.Type.NORMATIVE_IA, increment=20, field='status_ia')
normative_ip = make_task(type=ValidationTask.Type.NORMATIVE_IP, increment=20, field='status_ip')
industry_practices = make_task(type=ValidationTask.Type.INDUSTRY_PRACTICES, increment=10, field='status_industry_practices')
instance_completion = make_task(type=ValidationTask.Type.INSTANCE_COMPLETION, increment=5, field=None, stage="final")
ALL_TASKS = [
rocksdb_conv := make_task(type=ValidationTask.Type.ROCKSDB_CONVERSION, increment=5, field='status_rocksdb_conversion', stage="serial"),
header_syntax := make_task(type=ValidationTask.Type.HEADER_SYNTAX, increment=5, field='status_header_syntax', stage="serial"),
header := make_task(type=ValidationTask.Type.HEADER, increment=5, field='status_header', stage="serial"),
syntax := make_task(type=ValidationTask.Type.SYNTAX, increment=5, field='status_syntax', stage="serial"),
prerequisites := make_task(type=ValidationTask.Type.PREREQUISITES, increment=10, field='status_prereq', stage="serial"),
schema := make_task(type=ValidationTask.Type.SCHEMA, increment=10, field='status_schema'),
digital_signatures := make_task(type=ValidationTask.Type.DIGITAL_SIGNATURES, increment=5, field='status_signatures'),
bsdd := make_task(type=ValidationTask.Type.BSDD, increment=0, field='status_bsdd'),
normative_ia := make_task(type=ValidationTask.Type.NORMATIVE_IA, increment=20, field='status_ia'),
normative_ip := make_task(type=ValidationTask.Type.NORMATIVE_IP, increment=20, field='status_ip'),
industry_practices := make_task(type=ValidationTask.Type.INDUSTRY_PRACTICES, increment=10, field='status_industry_practices'),
instance_completion := make_task(type=ValidationTask.Type.INSTANCE_COMPLETION, increment=5, field=None, stage="final"),
]

# block tasks on error
post_tasks = [digital_signatures, schema, normative_ia, normative_ip, industry_practices, instance_completion]
post_tasks = [t for t in ALL_TASKS if t.execution_stage != "serial"]
header_syntax.blocks = [header, syntax, prerequisites] + post_tasks
syntax.blocks = post_tasks.copy()
prerequisites.blocks = post_tasks.copy()

# register
ALL_TASKS = [
header_syntax, header, syntax, prerequisites,
schema, digital_signatures, bsdd,
normative_ia, normative_ip, industry_practices, instance_completion,
]
class TaskRegistry:
def __init__(self, config_map: dict[str, TaskConfig]):
self._configs = config_map
Expand Down
19 changes: 18 additions & 1 deletion backend/apps/ifc_validation/tasks/context.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
from typing import Any, Optional
from dataclasses import dataclass
from apps.ifc_validation_models.models import ValidationRequest, ValidationTask
Expand All @@ -8,4 +9,20 @@ class TaskContext:
request: ValidationRequest # the current request
task: ValidationTask #the current task
file_path: str # for IFC files
result: Optional[Any] = None # result from execution layer
result: Optional[Any] = None # result from execution layer

@property
def rdb_file_path_if_exists(self):
# fn = self.file_path + ".rdb"
fn = "/tmp/" + os.path.basename(self.file_path) + ".rdb"
if os.path.exists(fn):
return fn
else:
return self.file_path

@property
def log_file_path_if_exists(self):
if os.path.exists(self.file_path + ".log"):
return self.file_path + ".log"
else:
return None
3 changes: 2 additions & 1 deletion backend/apps/ifc_validation/tasks/processing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@
from .schema import process_schema
from .header import process_header
from .digital_signatures import process_digital_signatures
from .bsdd import process_bsdd
from .bsdd import process_bsdd
from .rocksdb import process_rocksdb_conversion
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

def process_instance_completion(context:TaskContext):
# the current task doesn't have any execution layer and links instance ids to outcomes
ifc_file = ifcopenshell.open(context.file_path)
ifc_file = ifcopenshell.open(context.rdb_file_path_if_exists)
with transaction.atomic():
model_id = context.request.model.id
model_instances = ModelInstance.objects.filter(model_id=model_id, ifc_type__in=[None, ''])
Expand Down
13 changes: 13 additions & 0 deletions backend/apps/ifc_validation/tasks/processing/rocksdb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@

import json

from apps.ifc_validation_models.models import Model
from .. import TaskContext, with_model


def process_rocksdb_conversion(context:TaskContext):

with with_model(context.request.id) as model:
model.status_rocksdb_conversion = context.result
model.save(update_fields=['status_rocksdb_conversion'])
return f"agg_status = {Model.Status(context.result).label}\nmessages = {context.result}"
3 changes: 3 additions & 0 deletions backend/apps/ifc_validation/tasks/task_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ def ifc_file_validation_task(self, id, file_name, *args, **kwargs):
workflow_completed = on_workflow_completed.s(id=id, file_name=file_name)

serial_tasks = chain(
rocksdb_conv_subtask.s(id=id, file_name=file_name),
header_syntax_validation_subtask.s(id=id, file_name=file_name),
header_validation_subtask.s(id=id, file_name=file_name),
syntax_validation_subtask.s(id=id, file_name=file_name),
Expand Down Expand Up @@ -223,3 +224,5 @@ def ifc_file_validation_task(self, id, file_name, *args, **kwargs):
bsdd_validation_subtask = task_factory(ValidationTask.Type.BSDD)

industry_practices_subtask = task_factory(ValidationTask.Type.INDUSTRY_PRACTICES)

rocksdb_conv_subtask = task_factory(ValidationTask.Type.ROCKSDB_CONVERSION)
3 changes: 2 additions & 1 deletion backend/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,8 @@

# Uploaded files
MAX_FILES_PER_UPLOAD = 100
MAX_FILE_SIZE_IN_MB = int(os.environ.get("MAX_FILE_SIZE_IN_MB", 256)) # default to 256 MB
MAX_FILE_SIZE_IN_MB = int(os.environ.get("MAX_FILE_SIZE_IN_MB", 512)) # default to 512 MB
ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB = int(os.environ.get("ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB", 256))
MEDIA_URL = '/files/'
MEDIA_ROOT = os.environ.get('MEDIA_ROOT', '/files_storage')
try:
Expand Down
31 changes: 30 additions & 1 deletion frontend/src/Dz.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,33 @@ import Snackbar from '@mui/material/Snackbar';
import Alert from '@mui/material/Alert';
import { PageContext } from './Page';
import { getCookieValue } from './Cookies';
import { AlertTitle } from "@mui/material";
import WarningAmberIcon from "@mui/icons-material/WarningAmber";

function FileSizeWarning() {
return (
<Alert
severity="warning"
icon={<WarningAmberIcon fontSize="inherit" />}
sx={{ borderRadius: 2, mb: 2 }}
>
<AlertTitle>Notice</AlertTitle>
Files of over <strong>128 MB</strong> trigger an experimental processing pipeline.
Please report any issues to <a href="mailto:[email protected]">[email protected]</a>.
</Alert>
);
}

function Dz() {

const MAX_FILE_SIZE_IN_MB = 256;
const MAX_FILE_SIZE_IN_MB = 512;
const EXPERIMENTAL_FILE_SIZE_IN_MB = 256;
const TOAST_DURATION = 5000; // ms

const context = useContext(PageContext);

const [showExperimentalWarning, setShowExperimentalWarning] = React.useState(false);

const [showErrorToast, setShowErrorToast] = React.useState({
open: false,
fileName: '',
Expand Down Expand Up @@ -44,6 +63,12 @@ function Dz() {
headers: { 'x-csrf-token': getCookieValue('csrftoken') }
});

function showWarningWhenFileExceedsExperimentalTreshold() {
setShowExperimentalWarning(
dz.files.some(f => f.size > EXPERIMENTAL_FILE_SIZE_IN_MB * 1024 * 1024)
);
}

dz.on("success", function (file, response) {
if (window.location.href.split("/").at(-1) !== "dashboard"){
window.location = response.url;
Expand Down Expand Up @@ -76,6 +101,9 @@ function Dz() {
pb.style.width = `${w}px`;
});

dz.on("addedfiles", showWarningWhenFileExceedsExperimentalTreshold);
dz.on("removedfile", showWarningWhenFileExceedsExperimentalTreshold);

var submitButton = document.querySelector("#submit");
submitButton.addEventListener("click", function () {
const pb = document.querySelector(".dropzone .progress-bar");
Expand All @@ -89,6 +117,7 @@ function Dz() {
return (
<div>
<div className="submit-area" id="ifc_tab">
{showExperimentalWarning && <FileSizeWarning />}
<form action={context.sandboxId?`${FETCH_PATH}/api/sandbox/${context.sandboxId}`:`${FETCH_PATH}/api/`} className="dropzone" id="ifc_dropzone">
<div className="progress-bar"></div>
<div className="dz-message" data-dz-message><span><i className="material-icons">file_upload</i> Click or drop files here to upload for validation</span></div>
Expand Down