diff --git a/backend/apps/ifc_validation/tasks/check_programs.py b/backend/apps/ifc_validation/tasks/check_programs.py
index 57cdf15..551460a 100644
--- a/backend/apps/ifc_validation/tasks/check_programs.py
+++ b/backend/apps/ifc_validation/tasks/check_programs.py
@@ -5,7 +5,8 @@
from typing import List
from apps.ifc_validation_models.settings import TASK_TIMEOUT_LIMIT
-from apps.ifc_validation_models.models import ValidationTask
+from apps.ifc_validation_models.models import Model, ValidationTask
+from core.settings import ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB
from .logger import logger
from .context import TaskContext
@@ -44,10 +45,51 @@ def is_schema_error(line):
return True
def check_schema(context:TaskContext):
- proc = run_subprocess(
- task = context.task,
- command = [sys.executable, "-m", "ifcopenshell.validate", "--json", "--rules", "--fields", context.file_path ]
- )
+ if context.rdb_file_path_if_exists == context.file_path:
+ # No conversion to RocksDB has been made
+ proc = run_subprocess(
+ task = context.task,
+ command = [sys.executable, "-m", "ifcopenshell.validate", "--json", "--rules", "--fields", context.file_path ]
+ )
+ else:
+ # We have a RocksDB file, which is functionally almost the same
+ # except that certain errors are only present in SPF which have
+ # been captured in a separate log file, which needs to be blended
+ # in into the stream of other messages.
+ proc = run_subprocess(
+ task = context.task,
+ command=[
+ sys.executable,
+ "-c",
+ f"""
+import json
+import ifcopenshell
+from ifcopenshell.validate import *
+
+logger = json_logger()
+
+spf_filename = {json.dumps(context.file_path)}
+file = ifcopenshell.open({json.dumps(context.rdb_file_path_if_exists)})
+log_filename = {json.dumps(context.log_file_path_if_exists)}
+if log_filename:
+ log_content = open(log_filename).read()
+ if log_content:
+ # certain errors are only present when interacting with SPF, these
+ # are captured during the conversion to RocksDB and now emitted.
+ log_internal_cpp_errors(None, spf_filename, logger, log_content=log_content)
+
+validate(file, logger, True)
+
+def conv(x):
+ if isinstance(x, ifcopenshell.entity_instance):
+ return x.get_info(scalar_only=True)
+ else:
+ return str(x)
+
+for x in logger.statements:
+ print(json.dumps(x, default=conv))
+"""])
+
output = list(filter(is_schema_error, proc.stdout.split("\n")))
success = proc.returncode >= 0
valid = len(output) == 0
@@ -108,7 +150,7 @@ def check_prerequisites(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
- "--file-name", context.file_path,
+ "--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "CRITICAL",
"--purepythonparser"
@@ -124,7 +166,7 @@ def check_normative_ia(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
- "--file-name", context.file_path,
+ "--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "IMPLEMENTER_AGREEMENT"
]
@@ -139,7 +181,7 @@ def check_normative_ip(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
- "--file-name", context.file_path,
+ "--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "INFORMAL_PROPOSITION"
]
@@ -154,7 +196,7 @@ def check_industry_practices(context:TaskContext):
command = [
sys.executable,
os.path.join(checks_dir, "check_gherkin.py"),
- "--file-name", context.file_path,
+ "--file-name", context.rdb_file_path_if_exists,
"--task-id", str(context.task.id),
"--rule-type", "INDUSTRY_PRACTICE"
]
@@ -173,6 +215,34 @@ def check_proc_success_or_fail(proc, task):
raise RuntimeError(error_message)
return proc.stdout
+def check_rocksdb_conversion(context: TaskContext):
+ if os.path.getsize(context.file_path) > ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB * 1024 * 1024:
+ rdb_file_path = '/tmp/' + os.path.basename(context.file_path) + '.rdb'
+ log_file_path = context.file_path + '.log'
+ try:
+ run_subprocess(
+ task = context.task,
+ command=[
+ sys.executable,
+ "-c",
+ f"""
+import ifcopenshell
+ifcopenshell.ifcopenshell_wrapper.set_log_format_json()
+ifcopenshell.convert_path_to_rocksdb(
+ {json.dumps(context.file_path)},
+ {json.dumps(rdb_file_path)})
+with open({json.dumps(log_file_path)}, 'w') as f:
+ f.write(ifcopenshell.get_log())
+"""
+ ]
+ )
+ context.result = Model.Status.VALID if os.path.exists(rdb_file_path) else Model.Status.INVALID
+ except:
+ context.result = Model.Status.INVALID
+ context.result = Model.Status.NOT_APPLICABLE
+ return context
+
+
def run_subprocess(
task: ValidationTask,
command: List[str],
diff --git a/backend/apps/ifc_validation/tasks/configs.py b/backend/apps/ifc_validation/tasks/configs.py
index 029b992..19662e0 100644
--- a/backend/apps/ifc_validation/tasks/configs.py
+++ b/backend/apps/ifc_validation/tasks/configs.py
@@ -44,30 +44,27 @@ def _load_function(module, prefix, type):
)
# define task info
-header_syntax = make_task(type=ValidationTask.Type.HEADER_SYNTAX, increment=5, field='status_header_syntax', stage="serial")
-header = make_task(type=ValidationTask.Type.HEADER, increment=10, field='status_header', stage="serial")
-syntax = make_task(type=ValidationTask.Type.SYNTAX, increment=5, field='status_syntax', stage="serial")
-prerequisites = make_task(type=ValidationTask.Type.PREREQUISITES, increment=10, field='status_prereq', stage="serial")
-schema = make_task(type=ValidationTask.Type.SCHEMA, increment=10, field='status_schema')
-digital_signatures = make_task(type=ValidationTask.Type.DIGITAL_SIGNATURES, increment=5, field='status_signatures')
-bsdd = make_task(type=ValidationTask.Type.BSDD, increment=0, field='status_bsdd')
-normative_ia = make_task(type=ValidationTask.Type.NORMATIVE_IA, increment=20, field='status_ia')
-normative_ip = make_task(type=ValidationTask.Type.NORMATIVE_IP, increment=20, field='status_ip')
-industry_practices = make_task(type=ValidationTask.Type.INDUSTRY_PRACTICES, increment=10, field='status_industry_practices')
-instance_completion = make_task(type=ValidationTask.Type.INSTANCE_COMPLETION, increment=5, field=None, stage="final")
+ALL_TASKS = [
+ rocksdb_conv := make_task(type=ValidationTask.Type.ROCKSDB_CONVERSION, increment=5, field='status_rocksdb_conversion', stage="serial"),
+ header_syntax := make_task(type=ValidationTask.Type.HEADER_SYNTAX, increment=5, field='status_header_syntax', stage="serial"),
+ header := make_task(type=ValidationTask.Type.HEADER, increment=5, field='status_header', stage="serial"),
+ syntax := make_task(type=ValidationTask.Type.SYNTAX, increment=5, field='status_syntax', stage="serial"),
+ prerequisites := make_task(type=ValidationTask.Type.PREREQUISITES, increment=10, field='status_prereq', stage="serial"),
+ schema := make_task(type=ValidationTask.Type.SCHEMA, increment=10, field='status_schema'),
+ digital_signatures := make_task(type=ValidationTask.Type.DIGITAL_SIGNATURES, increment=5, field='status_signatures'),
+ bsdd := make_task(type=ValidationTask.Type.BSDD, increment=0, field='status_bsdd'),
+ normative_ia := make_task(type=ValidationTask.Type.NORMATIVE_IA, increment=20, field='status_ia'),
+ normative_ip := make_task(type=ValidationTask.Type.NORMATIVE_IP, increment=20, field='status_ip'),
+ industry_practices := make_task(type=ValidationTask.Type.INDUSTRY_PRACTICES, increment=10, field='status_industry_practices'),
+ instance_completion := make_task(type=ValidationTask.Type.INSTANCE_COMPLETION, increment=5, field=None, stage="final"),
+]
# block tasks on error
-post_tasks = [digital_signatures, schema, normative_ia, normative_ip, industry_practices, instance_completion]
+post_tasks = [t for t in ALL_TASKS if t.execution_stage != "serial"]
header_syntax.blocks = [header, syntax, prerequisites] + post_tasks
syntax.blocks = post_tasks.copy()
prerequisites.blocks = post_tasks.copy()
-# register
-ALL_TASKS = [
- header_syntax, header, syntax, prerequisites,
- schema, digital_signatures, bsdd,
- normative_ia, normative_ip, industry_practices, instance_completion,
-]
class TaskRegistry:
def __init__(self, config_map: dict[str, TaskConfig]):
self._configs = config_map
diff --git a/backend/apps/ifc_validation/tasks/context.py b/backend/apps/ifc_validation/tasks/context.py
index ae918bb..94d278b 100644
--- a/backend/apps/ifc_validation/tasks/context.py
+++ b/backend/apps/ifc_validation/tasks/context.py
@@ -1,3 +1,4 @@
+import os
from typing import Any, Optional
from dataclasses import dataclass
from apps.ifc_validation_models.models import ValidationRequest, ValidationTask
@@ -8,4 +9,20 @@ class TaskContext:
request: ValidationRequest # the current request
task: ValidationTask #the current task
file_path: str # for IFC files
- result: Optional[Any] = None # result from execution layer
+ result: Optional[Any] = None # result from execution layer
+
+ @property
+ def rdb_file_path_if_exists(self):
+ # fn = self.file_path + ".rdb"
+ fn = "/tmp/" + os.path.basename(self.file_path) + ".rdb"
+ if os.path.exists(fn):
+ return fn
+ else:
+ return self.file_path
+
+ @property
+ def log_file_path_if_exists(self):
+ if os.path.exists(self.file_path + ".log"):
+ return self.file_path + ".log"
+ else:
+ return None
diff --git a/backend/apps/ifc_validation/tasks/processing/__init__.py b/backend/apps/ifc_validation/tasks/processing/__init__.py
index 40fea8e..a3df5e1 100644
--- a/backend/apps/ifc_validation/tasks/processing/__init__.py
+++ b/backend/apps/ifc_validation/tasks/processing/__init__.py
@@ -10,4 +10,5 @@
from .schema import process_schema
from .header import process_header
from .digital_signatures import process_digital_signatures
-from .bsdd import process_bsdd
\ No newline at end of file
+from .bsdd import process_bsdd
+from .rocksdb import process_rocksdb_conversion
\ No newline at end of file
diff --git a/backend/apps/ifc_validation/tasks/processing/instance_completion.py b/backend/apps/ifc_validation/tasks/processing/instance_completion.py
index 1e26e6c..bd847b3 100644
--- a/backend/apps/ifc_validation/tasks/processing/instance_completion.py
+++ b/backend/apps/ifc_validation/tasks/processing/instance_completion.py
@@ -7,7 +7,7 @@
def process_instance_completion(context:TaskContext):
# the current task doesn't have any execution layer and links instance ids to outcomes
- ifc_file = ifcopenshell.open(context.file_path)
+ ifc_file = ifcopenshell.open(context.rdb_file_path_if_exists)
with transaction.atomic():
model_id = context.request.model.id
model_instances = ModelInstance.objects.filter(model_id=model_id, ifc_type__in=[None, ''])
diff --git a/backend/apps/ifc_validation/tasks/processing/rocksdb.py b/backend/apps/ifc_validation/tasks/processing/rocksdb.py
new file mode 100644
index 0000000..cd344c6
--- /dev/null
+++ b/backend/apps/ifc_validation/tasks/processing/rocksdb.py
@@ -0,0 +1,13 @@
+
+import json
+
+from apps.ifc_validation_models.models import Model
+from .. import TaskContext, with_model
+
+
+def process_rocksdb_conversion(context:TaskContext):
+
+ with with_model(context.request.id) as model:
+ model.status_rocksdb_conversion = context.result
+ model.save(update_fields=['status_rocksdb_conversion'])
+ return f"agg_status = {Model.Status(context.result).label}\nmessages = {context.result}"
diff --git a/backend/apps/ifc_validation/tasks/task_runner.py b/backend/apps/ifc_validation/tasks/task_runner.py
index f21d7df..f994848 100644
--- a/backend/apps/ifc_validation/tasks/task_runner.py
+++ b/backend/apps/ifc_validation/tasks/task_runner.py
@@ -172,6 +172,7 @@ def ifc_file_validation_task(self, id, file_name, *args, **kwargs):
workflow_completed = on_workflow_completed.s(id=id, file_name=file_name)
serial_tasks = chain(
+ rocksdb_conv_subtask.s(id=id, file_name=file_name),
header_syntax_validation_subtask.s(id=id, file_name=file_name),
header_validation_subtask.s(id=id, file_name=file_name),
syntax_validation_subtask.s(id=id, file_name=file_name),
@@ -223,3 +224,5 @@ def ifc_file_validation_task(self, id, file_name, *args, **kwargs):
bsdd_validation_subtask = task_factory(ValidationTask.Type.BSDD)
industry_practices_subtask = task_factory(ValidationTask.Type.INDUSTRY_PRACTICES)
+
+rocksdb_conv_subtask = task_factory(ValidationTask.Type.ROCKSDB_CONVERSION)
diff --git a/backend/core/settings.py b/backend/core/settings.py
index 6bbd250..ff6a424 100644
--- a/backend/core/settings.py
+++ b/backend/core/settings.py
@@ -280,7 +280,8 @@
# Uploaded files
MAX_FILES_PER_UPLOAD = 100
-MAX_FILE_SIZE_IN_MB = int(os.environ.get("MAX_FILE_SIZE_IN_MB", 256)) # default to 256 MB
+MAX_FILE_SIZE_IN_MB = int(os.environ.get("MAX_FILE_SIZE_IN_MB", 512)) # default to 512 MB
+ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB = int(os.environ.get("ROCKSDB_FILE_SIZE_THRESHOLD_IN_MB", 256))
MEDIA_URL = '/files/'
MEDIA_ROOT = os.environ.get('MEDIA_ROOT', '/files_storage')
try:
diff --git a/frontend/src/Dz.js b/frontend/src/Dz.js
index c7b9ccb..1d9b7ae 100644
--- a/frontend/src/Dz.js
+++ b/frontend/src/Dz.js
@@ -6,14 +6,33 @@ import Snackbar from '@mui/material/Snackbar';
import Alert from '@mui/material/Alert';
import { PageContext } from './Page';
import { getCookieValue } from './Cookies';
+import { AlertTitle } from "@mui/material";
+import WarningAmberIcon from "@mui/icons-material/WarningAmber";
+
+function FileSizeWarning() {
+ return (
+