From 2cc776511724531f31fc5dcb4ca4c6c8e47bd202 Mon Sep 17 00:00:00 2001 From: Joshua Lochner <26504141+xenova@users.noreply.github.com> Date: Sun, 27 Apr 2025 11:05:42 -0400 Subject: [PATCH] Attempt to slim model if onnx check model fails --- scripts/quantize.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/scripts/quantize.py b/scripts/quantize.py index 3f73f0916..3341a88a3 100644 --- a/scripts/quantize.py +++ b/scripts/quantize.py @@ -220,6 +220,12 @@ def quantize_fp16( disable_shape_infer=disable_shape_infer, op_block_list=blocked_ops, ) + try: + # Check the model + onnx.checker.check_model(model_fp16, full_check=True) + except Exception as e: + import onnxslim + model_fp16 = onnxslim.slim(model_fp16) check_and_save_model(model_fp16, save_path)