From d1c7dfd9ea87b1bd08301a8cf65ed79baee9bab9 Mon Sep 17 00:00:00 2001 From: kobros-tech Date: Fri, 13 Feb 2026 19:15:57 +0300 Subject: [PATCH 1/4] [16.0][ADD] json_export_engine Universal JSON schema builder, REST API, webhooks and scheduled exports --- json_export_engine/README.rst | 178 ++++++ json_export_engine/__init__.py | 3 + json_export_engine/__manifest__.py | 40 ++ json_export_engine/controllers/__init__.py | 1 + json_export_engine/controllers/main.py | 189 +++++++ json_export_engine/models/__init__.py | 6 + .../models/json_export_endpoint.py | 108 ++++ json_export_engine/models/json_export_log.py | 40 ++ .../models/json_export_schedule.py | 237 ++++++++ .../models/json_export_schema.py | 421 ++++++++++++++ .../models/json_export_webhook.py | 180 ++++++ .../models/json_export_webhook_header.py | 19 + json_export_engine/readme/CONFIGURE.md | 44 ++ json_export_engine/readme/CONTRIBUTORS.md | 1 + json_export_engine/readme/DESCRIPTION.md | 30 + .../security/ir.model.access.csv | 10 + .../security/json_export_engine_security.xml | 24 + .../static/description/index.html | 512 ++++++++++++++++++ .../static/src/json_export_widget.esm.js | 161 ++++++ .../static/src/json_export_widget.xml | 60 ++ json_export_engine/tests/__init__.py | 10 + json_export_engine/tests/common.py | 65 +++ json_export_engine/tests/test_controller.py | 217 ++++++++ .../tests/test_json_export_endpoint.py | 122 +++++ .../tests/test_json_export_schedule.py | 168 ++++++ .../tests/test_json_export_schema.py | 235 ++++++++ .../tests/test_json_export_webhook.py | 172 ++++++ json_export_engine/tests/test_tools.py | 154 ++++++ json_export_engine/tools/__init__.py | 2 + json_export_engine/tools/resolver.py | 61 +++ json_export_engine/tools/serializer.py | 43 ++ .../views/json_export_endpoint_views.xml | 83 +++ .../views/json_export_log_views.xml | 124 +++++ .../views/json_export_schedule_views.xml | 98 ++++ .../views/json_export_schema_views.xml | 202 +++++++ .../views/json_export_webhook_views.xml | 95 ++++ json_export_engine/views/menu.xml | 67 +++ requirements.txt | 1 + .../odoo/addons/json_export_engine | 1 + setup/json_export_engine/setup.py | 6 + 40 files changed, 4190 insertions(+) create mode 100644 json_export_engine/README.rst create mode 100644 json_export_engine/__init__.py create mode 100644 json_export_engine/__manifest__.py create mode 100644 json_export_engine/controllers/__init__.py create mode 100644 json_export_engine/controllers/main.py create mode 100644 json_export_engine/models/__init__.py create mode 100644 json_export_engine/models/json_export_endpoint.py create mode 100644 json_export_engine/models/json_export_log.py create mode 100644 json_export_engine/models/json_export_schedule.py create mode 100644 json_export_engine/models/json_export_schema.py create mode 100644 json_export_engine/models/json_export_webhook.py create mode 100644 json_export_engine/models/json_export_webhook_header.py create mode 100644 json_export_engine/readme/CONFIGURE.md create mode 100644 json_export_engine/readme/CONTRIBUTORS.md create mode 100644 json_export_engine/readme/DESCRIPTION.md create mode 100644 json_export_engine/security/ir.model.access.csv create mode 100644 json_export_engine/security/json_export_engine_security.xml create mode 100644 json_export_engine/static/description/index.html create mode 100644 json_export_engine/static/src/json_export_widget.esm.js create mode 100644 json_export_engine/static/src/json_export_widget.xml create mode 100644 json_export_engine/tests/__init__.py create mode 100644 json_export_engine/tests/common.py create mode 100644 json_export_engine/tests/test_controller.py create mode 100644 json_export_engine/tests/test_json_export_endpoint.py create mode 100644 json_export_engine/tests/test_json_export_schedule.py create mode 100644 json_export_engine/tests/test_json_export_schema.py create mode 100644 json_export_engine/tests/test_json_export_webhook.py create mode 100644 json_export_engine/tests/test_tools.py create mode 100644 json_export_engine/tools/__init__.py create mode 100644 json_export_engine/tools/resolver.py create mode 100644 json_export_engine/tools/serializer.py create mode 100644 json_export_engine/views/json_export_endpoint_views.xml create mode 100644 json_export_engine/views/json_export_log_views.xml create mode 100644 json_export_engine/views/json_export_schedule_views.xml create mode 100644 json_export_engine/views/json_export_schema_views.xml create mode 100644 json_export_engine/views/json_export_webhook_views.xml create mode 100644 json_export_engine/views/menu.xml create mode 120000 setup/json_export_engine/odoo/addons/json_export_engine create mode 100644 setup/json_export_engine/setup.py diff --git a/json_export_engine/README.rst b/json_export_engine/README.rst new file mode 100644 index 00000000000..f9fe50d75df --- /dev/null +++ b/json_export_engine/README.rst @@ -0,0 +1,178 @@ +================== +JSON Export Engine +================== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:702b7b1fb50020a0fe3b2b7364b734dd03d97049df863f61f2eee19b4be88af0 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/licence-AGPL--3-blue.png + :target: http://www.gnu.org/licenses/agpl-3.0-standalone.html + :alt: License: AGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fserver--tools-lightgray.png?logo=github + :target: https://github.com/OCA/server-tools/tree/16.0/json_export_engine + :alt: OCA/server-tools +.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png + :target: https://translation.odoo-community.org/projects/server-tools-16-0/server-tools-16-0-json_export_engine + :alt: Translate me on Weblate +.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png + :target: https://runboat.odoo-community.org/builds?repo=OCA/server-tools&target_branch=16.0 + :alt: Try me on Runboat + +|badge1| |badge2| |badge3| |badge4| |badge5| + +Universal JSON / Schema Export Engine for Odoo. + +This module provides a complete framework for exporting data from any +Odoo model as structured JSON, with support for: + +- **Dynamic Schema Builder**: Use Odoo's built-in export field selector + to interactively choose which fields (including nested relational + fields) to include in your JSON output. No code required. + +- **JSON Schema Generation**: Auto-generates a JSON Schema (draft-07) + from the selected fields and model definition, including field types, + nullable markers, selection enums, and nested relational structures. + Available both in the UI and via a dedicated REST endpoint. + +- **REST API Endpoints**: Generate REST-like API endpoints for any + schema with configurable authentication (none, API key, or + session-based), pagination, and CORS support. Each endpoint exposes + both a data URL and a schema URL. + +- **Webhooks**: Push data to external systems automatically when records + are created, updated, or deleted. Supports HMAC-SHA256 signing, custom + headers, and retry with exponential backoff. + +- **Scheduled Exports**: Export data on a schedule (minutes, hours, + days, weeks) as JSON or JSON Lines files, delivered as Odoo + attachments or HTTP POST to an external URL. Supports incremental + exports (only changed records). + +- **Export Logs**: Full audit trail of all export operations (API calls, + webhooks, scheduled exports, manual exports) with timing and error + tracking. + +Minimal dependencies: only requires ``base``, ``web``, and +``jsonifier``. + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + `More details on development status `_ + +**Table of contents** + +.. contents:: + :local: + +Configuration +============= + +**Creating an Export Schema:** + +1. Navigate to **JSON Export Engine > Configuration > Export Schemas**. +2. Create a new schema: + + - Select the target model (e.g., ``res.partner``, + ``product.product``) + - Click **Select Fields to Export** to interactively choose fields + (including nested relational fields) + - Optionally set a domain filter to limit which records are exported + - Configure options: record limit, whether to include the record ID, + and preview count + +3. Click **Refresh Preview** to see sample JSON output. +4. Check the **JSON Schema** tab to see the auto-generated JSON Schema + (draft-07) describing the structure of the exported data. + +**REST Endpoints:** + +1. In the schema form, go to the **Endpoints** tab. + +2. Add an endpoint with a route path (e.g., ``partners``). + +3. Choose authentication type and generate an API key if needed. + +4. Two URLs are generated for each endpoint: + + - **Data URL**: ``https://your-odoo.com/api/json_export/partners`` + returns paginated JSON data + - **Schema URL**: + ``https://your-odoo.com/api/json_export/partners/schema`` returns + the JSON Schema (draft-07) for the endpoint + +5. Pagination parameters: ``?page=1&limit=50`` (capped by the endpoint's + max limit setting). + +**Webhooks:** + +1. In the schema form, go to the **Webhooks** tab. +2. Add a webhook with a destination URL. +3. Select which events trigger the webhook (create, write, delete). +4. Optionally set a secret key for HMAC-SHA256 payload signing. +5. Add custom headers if the receiving system requires them. + +**Scheduled Exports:** + +1. In the schema form, go to the **Schedules** tab. +2. Add a schedule with the desired interval. +3. Choose the output format (JSON or JSON Lines) and destination (Odoo + attachment or HTTP POST). +4. Enable **Incremental** to only export records changed since the last + run. + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* kobros-tech + +Contributors +------------ + +- Mohamed Alkobrosli @ `kobros-tech `__ + +Maintainers +----------- + +This module is maintained by the OCA. + +.. image:: https://odoo-community.org/logo.png + :alt: Odoo Community Association + :target: https://odoo-community.org + +OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use. + +.. |maintainer-kobros-tech| image:: https://github.com/kobros-tech.png?size=40px + :target: https://github.com/kobros-tech + :alt: kobros-tech + +Current `maintainer `__: + +|maintainer-kobros-tech| + +This module is part of the `OCA/server-tools `_ project on GitHub. + +You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute. diff --git a/json_export_engine/__init__.py b/json_export_engine/__init__.py new file mode 100644 index 00000000000..43f0089790a --- /dev/null +++ b/json_export_engine/__init__.py @@ -0,0 +1,3 @@ +from . import controllers +from . import models +from . import tools diff --git a/json_export_engine/__manifest__.py b/json_export_engine/__manifest__.py new file mode 100644 index 00000000000..260f0015b64 --- /dev/null +++ b/json_export_engine/__manifest__.py @@ -0,0 +1,40 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +{ + "name": "JSON Export Engine", + "summary": "Universal JSON schema builder, REST API, webhooks and scheduled exports", + "version": "16.0.1.0.0", + "category": "Tools", + "website": "https://github.com/OCA/server-tools", + "author": "kobros-tech, Odoo Community Association (OCA)", + "maintainers": ["kobros-tech"], + "license": "AGPL-3", + "development_status": "Alpha", + "external_dependencies": { + "python": ["requests"], + }, + "depends": [ + "base", + "web", + "jsonifier", + ], + "data": [ + "security/json_export_engine_security.xml", + "security/ir.model.access.csv", + "views/json_export_schema_views.xml", + "views/json_export_endpoint_views.xml", + "views/json_export_webhook_views.xml", + "views/json_export_schedule_views.xml", + "views/json_export_log_views.xml", + "views/menu.xml", + ], + "assets": { + "web.assets_backend": [ + "json_export_engine/static/src/json_export_widget.xml", + "json_export_engine/static/src/json_export_widget.esm.js", + ], + }, + "installable": True, +} diff --git a/json_export_engine/controllers/__init__.py b/json_export_engine/controllers/__init__.py new file mode 100644 index 00000000000..12a7e529b67 --- /dev/null +++ b/json_export_engine/controllers/__init__.py @@ -0,0 +1 @@ +from . import main diff --git a/json_export_engine/controllers/main.py b/json_export_engine/controllers/main.py new file mode 100644 index 00000000000..b1d64367c0a --- /dev/null +++ b/json_export_engine/controllers/main.py @@ -0,0 +1,189 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import json +import math +import time + +from odoo import http +from odoo.http import Response, request + + +class JsonExportController(http.Controller): + @http.route( + "/api/json_export/", + type="http", + auth="public", + methods=["GET", "OPTIONS"], + csrf=False, + ) + def export_data(self, path, **kwargs): + # Handle CORS preflight + endpoint = self._find_endpoint(path) + if not endpoint: + return self._error_response(404, "Endpoint not found") + + if request.httprequest.method == "OPTIONS": + return self._cors_preflight(endpoint) + + # Authenticate + auth_error = self._check_auth(endpoint) + if auth_error: + return auth_error + + schema = endpoint.schema_id + start_time = time.time() + + try: + # Parse pagination + page = max(int(kwargs.get("page", 1)), 1) + limit = int(kwargs.get("limit", endpoint.default_limit)) + limit = min(max(limit, 1), endpoint.max_limit) + offset = (page - 1) * limit + + # Count total records + domain = schema._get_domain() + model = request.env[schema.model_name].sudo() + total = model.search_count(domain) + + # Fetch and serialize + records = schema.sudo()._get_records(limit=limit, offset=offset) + data = schema.sudo()._serialize_records(records) + + duration = int((time.time() - start_time) * 1000) + + response_data = { + "success": True, + "data": data, + "pagination": { + "page": page, + "limit": limit, + "total": total, + "pages": math.ceil(total / limit) if limit else 0, + }, + "meta": { + "schema": schema.name, + "model": schema.model_name, + "duration_ms": duration, + }, + } + + # Log + schema.sudo()._create_log( + "api", + "success", + len(data), + duration, + request_info=json.dumps( + { + "endpoint": endpoint.name, + "path": path, + "page": page, + "limit": limit, + } + ), + ) + + return self._json_response(response_data, endpoint) + + except Exception as e: + duration = int((time.time() - start_time) * 1000) + schema.sudo()._create_log("api", "error", 0, duration, error_message=str(e)) + return self._error_response(500, "Internal server error") + + @http.route( + "/api/json_export//schema", + type="http", + auth="public", + methods=["GET", "OPTIONS"], + csrf=False, + ) + def export_schema(self, path, **kwargs): + """Serve the JSON Schema for an endpoint.""" + endpoint = self._find_endpoint(path) + if not endpoint: + return self._error_response(404, "Endpoint not found") + + if request.httprequest.method == "OPTIONS": + return self._cors_preflight(endpoint) + + auth_error = self._check_auth(endpoint) + if auth_error: + return auth_error + + schema = endpoint.schema_id + try: + json_schema = schema.sudo()._generate_json_schema() + return self._json_response(json_schema, endpoint) + except Exception: + return self._error_response(500, "Failed to generate schema") + + def _find_endpoint(self, path): + """Lookup active endpoint by route path.""" + path = path.strip("/") + return ( + request.env["json.export.endpoint"] + .sudo() + .search( + [ + ("active", "=", True), + ("route_path", "=", path), + ("schema_id.active", "=", True), + ], + limit=1, + ) + ) + + def _check_auth(self, endpoint): + """Validate authentication. Returns error response or None.""" + if endpoint.auth_type == "none": + return None + + if endpoint.auth_type == "api_key": + api_key = request.httprequest.headers.get( + "X-API-Key" + ) or request.params.get("api_key") + if not api_key or api_key != endpoint.api_key: + return self._error_response(401, "Invalid or missing API key") + return None + + if endpoint.auth_type == "user": + if request.env.user._is_public(): + return self._error_response( + 401, "Authentication required. Please log in." + ) + return None + + return self._error_response(403, "Unknown authentication type") + + def _json_response(self, data, endpoint=None): + """Build a JSON HTTP response with optional CORS headers.""" + body = json.dumps(data, ensure_ascii=False) + headers = {"Content-Type": "application/json"} + if endpoint and endpoint.cors_origin: + headers.update(self._cors_headers(endpoint)) + return Response(body, status=200, headers=headers) + + def _error_response(self, code, message): + """Build a JSON error response.""" + body = json.dumps( + {"success": False, "error": {"code": code, "message": message}}, + ensure_ascii=False, + ) + return Response(body, status=code, headers={"Content-Type": "application/json"}) + + def _cors_preflight(self, endpoint): + """Handle CORS OPTIONS preflight request.""" + headers = self._cors_headers(endpoint) + headers["Access-Control-Max-Age"] = "86400" + return Response("", status=204, headers=headers) + + def _cors_headers(self, endpoint): + """Build CORS headers dict.""" + origin = endpoint.cors_origin or "" + return { + "Access-Control-Allow-Origin": origin, + "Access-Control-Allow-Methods": "GET, OPTIONS", + "Access-Control-Allow-Headers": "Content-Type, X-API-Key", + } diff --git a/json_export_engine/models/__init__.py b/json_export_engine/models/__init__.py new file mode 100644 index 00000000000..ba0061de956 --- /dev/null +++ b/json_export_engine/models/__init__.py @@ -0,0 +1,6 @@ +from . import json_export_schema +from . import json_export_endpoint +from . import json_export_webhook +from . import json_export_webhook_header +from . import json_export_schedule +from . import json_export_log diff --git a/json_export_engine/models/json_export_endpoint.py b/json_export_engine/models/json_export_endpoint.py new file mode 100644 index 00000000000..04bdbe18f22 --- /dev/null +++ b/json_export_engine/models/json_export_endpoint.py @@ -0,0 +1,108 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import re +import secrets + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + + +class JsonExportEndpoint(models.Model): + _name = "json.export.endpoint" + _description = "JSON Export REST Endpoint" + _order = "name" + + name = fields.Char(required=True) + schema_id = fields.Many2one( + "json.export.schema", + string="Export Schema", + required=True, + ondelete="cascade", + ) + active = fields.Boolean(default=True) + route_path = fields.Char( + required=True, + help="URL path segment, e.g. 'products' will map to /api/json_export/products", + ) + full_url = fields.Char( + compute="_compute_full_url", + string="Data URL", + ) + schema_url = fields.Char( + compute="_compute_full_url", + string="Schema URL", + ) + auth_type = fields.Selection( + [ + ("none", "No Authentication"), + ("api_key", "API Key"), + ("user", "Session (Logged-in User)"), + ], + default="api_key", + required=True, + ) + api_key = fields.Char(groups="json_export_engine.group_manager") + default_limit = fields.Integer( + default=50, + help="Default number of records per page.", + ) + max_limit = fields.Integer( + default=500, + help="Maximum number of records per page.", + ) + cors_origin = fields.Char( + string="CORS Origin", + help="Allowed CORS origin, e.g. * or https://example.com", + ) + + @api.depends("route_path") + def _compute_full_url(self): + base_url = self.env["ir.config_parameter"].sudo().get_param("web.base.url") + for rec in self: + if rec.route_path: + path = rec.route_path.strip("/") + rec.full_url = "%s/api/json_export/%s" % (base_url, path) + rec.schema_url = "%s/api/json_export/%s/schema" % (base_url, path) + else: + rec.full_url = "" + rec.schema_url = "" + + @api.constrains("route_path") + def _check_route_path(self): + for rec in self: + if not rec.route_path: + continue + path = rec.route_path.strip("/") + if not re.match(r"^[a-zA-Z0-9_/\-]+$", path): + raise ValidationError( + _( + "Route path may only contain letters, numbers, " + "hyphens, underscores, and slashes." + ) + ) + # Check uniqueness among active endpoints + duplicate = self.search( + [ + ("id", "!=", rec.id), + ("active", "=", True), + ("route_path", "=", path), + ], + limit=1, + ) + if duplicate: + raise ValidationError( + _( + "Route path '%(path)s' is already in use" + " by endpoint '%(endpoint)s'.", + path=path, + endpoint=duplicate.name, + ) + ) + + def action_generate_api_key(self): + """Generate a new random API key.""" + for rec in self: + rec.api_key = secrets.token_hex(32) + return True diff --git a/json_export_engine/models/json_export_log.py b/json_export_engine/models/json_export_log.py new file mode 100644 index 00000000000..89cc9ddb7e2 --- /dev/null +++ b/json_export_engine/models/json_export_log.py @@ -0,0 +1,40 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +from odoo import fields, models + + +class JsonExportLog(models.Model): + _name = "json.export.log" + _description = "JSON Export Log" + _order = "create_date desc" + + schema_id = fields.Many2one( + "json.export.schema", + string="Export Schema", + required=True, + ondelete="cascade", + index=True, + ) + log_type = fields.Selection( + [ + ("api", "API Call"), + ("webhook", "Webhook"), + ("schedule", "Scheduled Export"), + ("preview", "Preview"), + ("manual", "Manual Export"), + ], + required=True, + index=True, + ) + status = fields.Selection( + [("success", "Success"), ("error", "Error")], + required=True, + ) + records_count = fields.Integer(string="Records") + duration_ms = fields.Integer(string="Duration (ms)") + error_message = fields.Text() + request_info = fields.Text( + help="JSON with additional context about the request.", + ) diff --git a/json_export_engine/models/json_export_schedule.py b/json_export_engine/models/json_export_schedule.py new file mode 100644 index 00000000000..75b6dbdf41a --- /dev/null +++ b/json_export_engine/models/json_export_schedule.py @@ -0,0 +1,237 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import base64 +import json +import logging +import time + +import requests + +from odoo import _, api, fields, models +from odoo.exceptions import UserError + +_logger = logging.getLogger(__name__) + + +class JsonExportSchedule(models.Model): + _name = "json.export.schedule" + _description = "JSON Export Schedule" + _order = "name" + + name = fields.Char(required=True) + schema_id = fields.Many2one( + "json.export.schema", + string="Export Schema", + required=True, + ondelete="cascade", + ) + active = fields.Boolean(default=True) + interval_number = fields.Integer(default=1, required=True) + interval_type = fields.Selection( + [ + ("minutes", "Minutes"), + ("hours", "Hours"), + ("days", "Days"), + ("weeks", "Weeks"), + ], + default="hours", + required=True, + ) + cron_id = fields.Many2one( + "ir.cron", + string="Scheduled Action", + readonly=True, + ondelete="set null", + ) + destination_type = fields.Selection( + [ + ("attachment", "File Attachment"), + ("http_post", "HTTP POST"), + ], + default="attachment", + required=True, + ) + destination_url = fields.Char( + string="Destination URL", + help="URL to POST the export data to (when destination type is HTTP POST).", + ) + file_format = fields.Selection( + [ + ("json", "JSON Array"), + ("jsonl", "JSON Lines"), + ], + default="json", + required=True, + ) + incremental = fields.Boolean( + default=True, + help="Only export records modified since the last successful run.", + ) + last_run_date = fields.Datetime(readonly=True) + last_run_status = fields.Selection( + [("success", "Success"), ("error", "Error")], + readonly=True, + ) + last_run_count = fields.Integer(readonly=True) + last_run_error = fields.Text(readonly=True) + + @api.model_create_multi + def create(self, vals_list): + records = super().create(vals_list) + for rec in records: + if rec.active: + rec._create_or_update_cron() + return records + + def write(self, vals): + res = super().write(vals) + cron_fields = { + "active", + "interval_number", + "interval_type", + "name", + "schema_id", + } + if cron_fields & set(vals.keys()): + for rec in self: + rec._create_or_update_cron() + return res + + def unlink(self): + crons = self.mapped("cron_id") + res = super().unlink() + crons.sudo().unlink() + return res + + def _create_or_update_cron(self): + """Create or update the ir.cron record for this schedule.""" + self.ensure_one() + cron_vals = { + "name": "JSON Export: %s" % self.name, + "model_id": self.env["ir.model"] + .sudo() + .search([("model", "=", self._name)], limit=1) + .id, + "state": "code", + "code": "model._cron_run_export(%d)" % self.id, + "interval_number": self.interval_number, + "interval_type": self.interval_type, + "numbercall": -1, + "active": self.active, + "doall": False, + } + if self.cron_id: + self.cron_id.sudo().write(cron_vals) + else: + cron = self.env["ir.cron"].sudo().create(cron_vals) + self.cron_id = cron + + @api.model + def _cron_run_export(self, schedule_id): + """Entry point called by ir.cron.""" + schedule = self.browse(schedule_id) + if schedule.exists() and schedule.active: + schedule._run_scheduled_export() + + def _run_scheduled_export(self): + """Execute the scheduled export.""" + self.ensure_one() + start_time = time.time() + schema = self.schema_id + try: + # Build extra domain for incremental export + extra_domain = [] + if self.incremental and self.last_run_date: + extra_domain = [("write_date", ">", self.last_run_date)] + + records = schema._get_records( + no_limit=True, + extra_domain=extra_domain, + ) + data = schema._serialize_records(records) + + # Format output + if self.file_format == "jsonl": + content = "\n".join( + json.dumps(item, ensure_ascii=False) for item in data + ) + mimetype = "application/x-ndjson" + ext = "jsonl" + else: + content = json.dumps(data, indent=2, ensure_ascii=False) + mimetype = "application/json" + ext = "json" + + # Deliver + if self.destination_type == "attachment": + filename = "scheduled_%s_%s.%s" % ( + schema.model_name.replace(".", "_"), + fields.Datetime.now().strftime("%Y%m%d_%H%M%S"), + ext, + ) + self.env["ir.attachment"].create( + { + "name": filename, + "type": "binary", + "datas": base64.b64encode(content.encode("utf-8")), + "mimetype": mimetype, + "res_model": self._name, + "res_id": self.id, + } + ) + elif self.destination_type == "http_post": + if not self.destination_url: + raise UserError( + _("Destination URL is required for HTTP POST delivery.") + ) + resp = requests.post( + self.destination_url, + data=content, + headers={"Content-Type": mimetype}, + timeout=120, + ) + resp.raise_for_status() + + duration = int((time.time() - start_time) * 1000) + self.write( + { + "last_run_date": fields.Datetime.now(), + "last_run_status": "success", + "last_run_count": len(data), + "last_run_error": False, + } + ) + schema._create_log( + "schedule", + "success", + len(data), + duration, + request_info=json.dumps( + { + "schedule": self.name, + "destination": self.destination_type, + "incremental": self.incremental, + "format": self.file_format, + } + ), + ) + except Exception as e: + duration = int((time.time() - start_time) * 1000) + self.write( + { + "last_run_date": fields.Datetime.now(), + "last_run_status": "error", + "last_run_count": 0, + "last_run_error": str(e), + } + ) + schema._create_log("schedule", "error", 0, duration, error_message=str(e)) + _logger.exception("Scheduled export '%s' failed", self.name) + + def action_run_now(self): + """Manually trigger the scheduled export.""" + self.ensure_one() + self._run_scheduled_export() + return True diff --git a/json_export_engine/models/json_export_schema.py b/json_export_engine/models/json_export_schema.py new file mode 100644 index 00000000000..766da7bc854 --- /dev/null +++ b/json_export_engine/models/json_export_schema.py @@ -0,0 +1,421 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import base64 +import json +import logging +import time + +from odoo import _, api, fields, models +from odoo.exceptions import UserError +from odoo.tools.safe_eval import safe_eval + +from ..tools.resolver import IrExportsResolver +from ..tools.serializer import JsonExportSerializer + +_logger = logging.getLogger(__name__) + + +class JsonExportSchema(models.Model): + _name = "json.export.schema" + _description = "JSON Export Schema" + _order = "name" + + name = fields.Char(required=True) + active = fields.Boolean(default=True) + model_id = fields.Many2one( + "ir.model", + string="Model", + required=True, + ondelete="cascade", + domain=[("transient", "=", False)], + ) + model_name = fields.Char( + related="model_id.model", store=True, readonly=True, index=True + ) + exporter_id = fields.Many2one("ir.exports", string="Field Selector") + domain = fields.Char(string="Record Filter", default="[]") + description = fields.Text() + record_limit = fields.Integer( + default=100, + help="Default maximum number of records returned per request.", + ) + include_record_id = fields.Boolean( + default=True, + help="Always include the record ID in exported data.", + ) + preview_count = fields.Integer( + default=5, help="Number of records to show in the preview." + ) + preview_data = fields.Text(compute="_compute_preview_data", string="Preview") + json_schema = fields.Text(compute="_compute_json_schema", string="JSON Schema") + endpoint_ids = fields.One2many("json.export.endpoint", "schema_id") + webhook_ids = fields.One2many("json.export.webhook", "schema_id") + schedule_ids = fields.One2many("json.export.schedule", "schema_id") + log_ids = fields.One2many("json.export.log", "schema_id") + log_count = fields.Integer(compute="_compute_log_count", string="Logs") + + @api.depends("log_ids") + def _compute_log_count(self): + for rec in self: + rec.log_count = len(rec.log_ids) + + @api.depends( + "model_id", "exporter_id", "domain", "preview_count", "include_record_id" + ) + def _compute_preview_data(self): + for rec in self: + if not rec.model_id or not rec.exporter_id: + rec.preview_data = "" + continue + try: + records = rec._get_records(limit=rec.preview_count or 5) + data = rec._serialize_records(records) + rec.preview_data = json.dumps(data, indent=2, ensure_ascii=False) + except Exception as e: + rec.preview_data = json.dumps( + {"error": str(e)}, indent=2, ensure_ascii=False + ) + + # -- Odoo field type → JSON Schema type mapping -- + FIELD_TYPE_MAP = { + "char": {"type": "string"}, + "text": {"type": "string"}, + "html": {"type": "string"}, + "integer": {"type": "integer"}, + "float": {"type": "number"}, + "monetary": {"type": "number"}, + "boolean": {"type": "boolean"}, + "date": {"type": "string", "format": "date"}, + "datetime": {"type": "string", "format": "date-time"}, + "binary": {"type": "string", "contentEncoding": "base64"}, + "selection": {"type": "string"}, + "reference": {"type": "string"}, + } + + @api.depends("model_id", "exporter_id", "include_record_id") + def _compute_json_schema(self): + for rec in self: + if not rec.model_id or not rec.exporter_id: + rec.json_schema = "" + continue + try: + schema = rec._generate_json_schema() + rec.json_schema = json.dumps(schema, indent=2, ensure_ascii=False) + except Exception as e: + rec.json_schema = json.dumps( + {"error": str(e)}, indent=2, ensure_ascii=False + ) + + def _generate_json_schema(self): + """Generate a JSON Schema (draft-07) from the resolved parser and model fields.""" + self.ensure_one() + parser = self._get_parser() + model = self.env[self.model_name] + properties, required = self._parser_to_schema_properties(parser, model) + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": self.name, + "description": "Auto-generated schema for %s (%s)" + % (self.name, self.model_name), + "type": "object", + "properties": properties, + "required": required, + "additionalProperties": False, + } + + def _parser_to_schema_properties(self, parser, model): + """Convert a jsonify parser list into JSON Schema properties dict. + + :param parser: list like ["id", "name", ("categ_id", ["id", "name"])] + :param model: Odoo model instance for field introspection + :return: (properties_dict, required_list) + """ + properties = {} + required = [] + for item in parser: + if isinstance(item, str): + # Simple field + field_name = item + if field_name in model._fields: + field_obj = model._fields[field_name] + properties[field_name] = self._field_to_schema(field_obj) + if field_obj.required: + required.append(field_name) + elif field_name == "id": + properties["id"] = {"type": "integer", "description": "Record ID"} + required.append("id") + elif isinstance(item, tuple) and len(item) == 2: + # Relational field: ("field_name", ["sub_field1", "sub_field2"]) + field_name, sub_fields = item + if field_name in model._fields: + field_obj = model._fields[field_name] + properties[field_name] = self._relational_field_to_schema( + field_obj, sub_fields + ) + if field_obj.required: + required.append(field_name) + return properties, required + + def _field_to_schema(self, field_obj): + """Convert a single Odoo field to a JSON Schema property.""" + schema = {} + field_type = field_obj.type + + if field_type in self.FIELD_TYPE_MAP: + schema.update(self.FIELD_TYPE_MAP[field_type]) + elif field_type in ("many2one",): + schema = {"type": "integer", "description": "Related record ID"} + elif field_type in ("one2many", "many2many"): + schema = { + "type": "array", + "items": {"type": "integer"}, + "description": "List of related record IDs", + } + else: + schema = {"type": "string"} + + # Add field metadata + if field_obj.string: + schema["title"] = field_obj.string + if field_obj.help: + schema["description"] = field_obj.help + + # Selection choices + if field_type == "selection" and field_obj.selection: + try: + choices = field_obj.selection + if callable(choices): + choices = choices(self.env[field_obj.model_name]) + schema["enum"] = [key for key, _label in choices] + except Exception: + _logger.debug( + "Could not resolve selection choices for field %s", + field_obj.name, + exc_info=True, + ) + + # Nullable for non-required fields + if not field_obj.required: + schema = {"anyOf": [schema, {"type": "null"}]} + + return schema + + def _relational_field_to_schema(self, field_obj, sub_fields): + """Convert a relational field with sub-fields to a JSON Schema property.""" + comodel_name = field_obj.comodel_name + if comodel_name not in self.env: + return {"type": "string"} + + comodel = self.env[comodel_name] + sub_properties, sub_required = self._parser_to_schema_properties( + sub_fields, comodel + ) + + item_schema = { + "type": "object", + "properties": sub_properties, + "additionalProperties": False, + } + if sub_required: + item_schema["required"] = sub_required + + if field_obj.type == "many2one": + # Many2one returns a single object (or null) + schema = { + "anyOf": [item_schema, {"type": "null"}], + "title": field_obj.string or field_obj.name, + } + else: + # One2many / Many2many returns an array + schema = { + "type": "array", + "items": item_schema, + "title": field_obj.string or field_obj.name, + } + return schema + + def _get_parser(self): + """Resolve the ir.exports field selection into a jsonify-compatible parser.""" + self.ensure_one() + if not self.exporter_id: + raise UserError(_("Please select a field selector (exporter) first.")) + raw_parser = self.exporter_id.get_json_parser() + resolved = IrExportsResolver(raw_parser).resolved_parser + if self.include_record_id and "id" not in resolved: + resolved.insert(0, "id") + return resolved + + def _get_serializer(self): + """Return a configured JsonExportSerializer.""" + self.ensure_one() + parser = self._get_parser() + return JsonExportSerializer(parser) + + def _get_domain(self): + """Parse the domain filter string.""" + self.ensure_one() + try: + return safe_eval(self.domain or "[]") + except Exception: + return [] + + def _get_records(self, limit=None, offset=0, extra_domain=None, no_limit=False): + """Search for records matching the schema's domain. + + :param limit: Max records to return. None means use schema default. + :param no_limit: If True, ignore limit and return all matching records. + """ + self.ensure_one() + domain = self._get_domain() + if extra_domain: + domain += extra_domain + model = self.env[self.model_name] + search_limit = None if no_limit else (limit or self.record_limit or 100) + return model.search(domain, limit=search_limit, offset=offset) + + def _serialize_records(self, records): + """Serialize a recordset using this schema's configuration.""" + self.ensure_one() + serializer = self._get_serializer() + return serializer.serialize_many(records) + + def action_preview(self): + """Refresh the preview data.""" + self.ensure_one() + self._compute_preview_data() + return True + + def action_export_json(self): + """Manual export: serialize all matching records and create an attachment.""" + self.ensure_one() + start_time = time.time() + try: + records = self._get_records() + data = self._serialize_records(records) + content = json.dumps(data, indent=2, ensure_ascii=False) + filename = "export_%s_%s.json" % ( + self.model_name.replace(".", "_"), + fields.Datetime.now().strftime("%Y%m%d_%H%M%S"), + ) + attachment = self.env["ir.attachment"].create( + { + "name": filename, + "type": "binary", + "datas": base64.b64encode(content.encode("utf-8")), + "mimetype": "application/json", + "res_model": self._name, + "res_id": self.id, + } + ) + duration = int((time.time() - start_time) * 1000) + self._create_log("manual", "success", len(records), duration) + return { + "type": "ir.actions.act_url", + "url": "/web/content/%s?download=true" % attachment.id, + "target": "new", + } + except Exception as e: + duration = int((time.time() - start_time) * 1000) + self._create_log("manual", "error", 0, duration, error_message=str(e)) + raise UserError(_("Export failed: %s") % str(e)) from e + + def action_view_logs(self): + """Open log entries for this schema.""" + self.ensure_one() + return { + "type": "ir.actions.act_window", + "name": _("Export Logs"), + "res_model": "json.export.log", + "view_mode": "tree,form", + "domain": [("schema_id", "=", self.id)], + "context": {"default_schema_id": self.id}, + } + + def _create_log( + self, + log_type, + status, + records_count=0, + duration_ms=0, + error_message=None, + request_info=None, + ): + """Helper to create a log entry.""" + self.ensure_one() + return ( + self.env["json.export.log"] + .sudo() + .create( + { + "schema_id": self.id, + "log_type": log_type, + "status": status, + "records_count": records_count, + "duration_ms": duration_ms, + "error_message": error_message, + "request_info": request_info, + } + ) + ) + + def _register_hook(self): + """Register webhook triggers on target models.""" + res = super()._register_hook() + schemas = self.sudo().search( + [ + ("active", "=", True), + ("webhook_ids.active", "=", True), + ] + ) + for schema in schemas: + schema._patch_model_for_webhooks() + return res + + def _patch_model_for_webhooks(self): + """Dynamically patch the target model to fire webhooks on CUD operations.""" + self.ensure_one() + model_name = self.model_name + if not model_name or model_name not in self.env: + return + + model_cls = type(self.env[model_name]) + patch_attr = "_json_export_webhook_patched" + + if getattr(model_cls, patch_attr, False): + return + + original_create = model_cls.create + original_write = model_cls.write + original_unlink = model_cls.unlink + + @api.model_create_multi + def patched_create(self_model, vals_list): + records = original_create(self_model, vals_list) + if not self_model.env.context.get("json_export_skip_webhook"): + self_model.env["json.export.webhook"].sudo()._fire_for_model( + self_model._name, "create", records + ) + return records + + def patched_write(self_model, vals): + res = original_write(self_model, vals) + if not self_model.env.context.get("json_export_skip_webhook"): + self_model.env["json.export.webhook"].sudo()._fire_for_model( + self_model._name, "write", self_model + ) + return res + + def patched_unlink(self_model): + # Capture data before deletion + if not self_model.env.context.get("json_export_skip_webhook"): + self_model.env["json.export.webhook"].sudo()._fire_for_model( + self_model._name, "unlink", self_model + ) + return original_unlink(self_model) + + model_cls.create = patched_create + model_cls.write = patched_write + model_cls.unlink = patched_unlink + setattr(model_cls, patch_attr, True) diff --git a/json_export_engine/models/json_export_webhook.py b/json_export_engine/models/json_export_webhook.py new file mode 100644 index 00000000000..24b0b8f44c2 --- /dev/null +++ b/json_export_engine/models/json_export_webhook.py @@ -0,0 +1,180 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import hashlib +import hmac +import json +import logging +import time + +import requests + +from odoo import api, fields, models + +_logger = logging.getLogger(__name__) + + +class JsonExportWebhook(models.Model): + _name = "json.export.webhook" + _description = "JSON Export Webhook" + _order = "name" + + name = fields.Char(required=True) + schema_id = fields.Many2one( + "json.export.schema", + string="Export Schema", + required=True, + ondelete="cascade", + ) + active = fields.Boolean(default=True) + url = fields.Char(required=True) + on_create = fields.Boolean(default=True) + on_write = fields.Boolean(default=True) + on_unlink = fields.Boolean(string="On Delete", default=True) + secret_key = fields.Char( + help="HMAC-SHA256 signing key. If set, a X-Webhook-Signature " + "header will be sent with each request.", + groups="json_export_engine.group_manager", + ) + header_ids = fields.One2many( + "json.export.webhook.header", + "webhook_id", + string="Custom Headers", + ) + max_retries = fields.Integer(default=3) + state = fields.Selection( + [ + ("active", "Active"), + ("error", "Error"), + ("paused", "Paused"), + ], + default="active", + readonly=True, + ) + last_call_date = fields.Datetime(readonly=True) + last_call_status = fields.Char(readonly=True) + + def action_reset_state(self): + """Reset webhook state to active.""" + self.write({"state": "active"}) + return True + + @api.model + def _fire_for_model(self, model_name, event_type, records): + """Find matching webhooks and fire them for the given model and event.""" + event_field = "on_%s" % event_type + webhooks = self.search( + [ + ("active", "=", True), + ("state", "=", "active"), + ("schema_id.active", "=", True), + ("schema_id.model_name", "=", model_name), + (event_field, "=", True), + ] + ) + for webhook in webhooks: + try: + webhook._trigger_webhook(event_type, records) + except Exception: + _logger.exception( + "Webhook '%s' failed for %s on %s", + webhook.name, + event_type, + model_name, + ) + + def _trigger_webhook(self, event_type, records): + """Serialize records and send webhook payload.""" + self.ensure_one() + start_time = time.time() + schema = self.schema_id + try: + if event_type == "unlink": + # For deletions, send IDs only (records will be gone) + data = [{"id": r.id} for r in records] + else: + data = schema._serialize_records(records) + + payload = { + "event": event_type, + "model": schema.model_name, + "schema": schema.name, + "timestamp": fields.Datetime.now().isoformat(), + "records": data, + } + self._send_payload(payload) + duration = int((time.time() - start_time) * 1000) + self.sudo().write( + { + "last_call_date": fields.Datetime.now(), + "last_call_status": "success", + } + ) + schema._create_log( + "webhook", + "success", + len(records), + duration, + request_info=json.dumps( + {"webhook": self.name, "event": event_type, "url": self.url} + ), + ) + except Exception as e: + duration = int((time.time() - start_time) * 1000) + self.sudo().write( + { + "last_call_date": fields.Datetime.now(), + "last_call_status": "error: %s" % str(e)[:200], + "state": "error", + } + ) + schema._create_log( + "webhook", + "error", + len(records), + duration, + error_message=str(e), + request_info=json.dumps( + {"webhook": self.name, "event": event_type, "url": self.url} + ), + ) + _logger.warning("Webhook '%s' failed: %s", self.name, e) + + def _send_payload(self, payload): + """Send HTTP POST with optional HMAC signing and custom headers.""" + self.ensure_one() + body = json.dumps(payload, ensure_ascii=False) + headers = {"Content-Type": "application/json"} + + # Add custom headers + for header in self.header_ids: + headers[header.key] = header.value + + # HMAC-SHA256 signature + if self.secret_key: + signature = hmac.new( + self.secret_key.encode("utf-8"), + body.encode("utf-8"), + hashlib.sha256, + ).hexdigest() + headers["X-Webhook-Signature"] = signature + + # Send with retry + last_error = None + for attempt in range(max(self.max_retries, 1)): + try: + resp = requests.post( + self.url, + data=body, + headers=headers, + timeout=30, + ) + resp.raise_for_status() + return + except requests.RequestException as e: + last_error = e + if attempt < self.max_retries - 1: + # Exponential backoff: 1s, 2s, 4s... + time.sleep(2**attempt) + raise last_error diff --git a/json_export_engine/models/json_export_webhook_header.py b/json_export_engine/models/json_export_webhook_header.py new file mode 100644 index 00000000000..509e389a9fd --- /dev/null +++ b/json_export_engine/models/json_export_webhook_header.py @@ -0,0 +1,19 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +from odoo import fields, models + + +class JsonExportWebhookHeader(models.Model): + _name = "json.export.webhook.header" + _description = "JSON Export Webhook Custom Header" + _order = "key" + + webhook_id = fields.Many2one( + "json.export.webhook", + required=True, + ondelete="cascade", + ) + key = fields.Char(string="Header Name", required=True) + value = fields.Char(string="Header Value", required=True) diff --git a/json_export_engine/readme/CONFIGURE.md b/json_export_engine/readme/CONFIGURE.md new file mode 100644 index 00000000000..6250daed9cd --- /dev/null +++ b/json_export_engine/readme/CONFIGURE.md @@ -0,0 +1,44 @@ +**Creating an Export Schema:** + +1. Navigate to **JSON Export Engine > Configuration > Export Schemas**. +2. Create a new schema: + - Select the target model (e.g., ``res.partner``, ``product.product``) + - Click **Select Fields to Export** to interactively choose fields + (including nested relational fields) + - Optionally set a domain filter to limit which records are exported + - Configure options: record limit, whether to include the record ID, + and preview count +3. Click **Refresh Preview** to see sample JSON output. +4. Check the **JSON Schema** tab to see the auto-generated JSON Schema (draft-07) + describing the structure of the exported data. + +**REST Endpoints:** + +1. In the schema form, go to the **Endpoints** tab. +2. Add an endpoint with a route path (e.g., ``partners``). +3. Choose authentication type and generate an API key if needed. +4. Two URLs are generated for each endpoint: + + - **Data URL**: ``https://your-odoo.com/api/json_export/partners`` + returns paginated JSON data + - **Schema URL**: ``https://your-odoo.com/api/json_export/partners/schema`` + returns the JSON Schema (draft-07) for the endpoint + +5. Pagination parameters: ``?page=1&limit=50`` + (capped by the endpoint's max limit setting). + +**Webhooks:** + +1. In the schema form, go to the **Webhooks** tab. +2. Add a webhook with a destination URL. +3. Select which events trigger the webhook (create, write, delete). +4. Optionally set a secret key for HMAC-SHA256 payload signing. +5. Add custom headers if the receiving system requires them. + +**Scheduled Exports:** + +1. In the schema form, go to the **Schedules** tab. +2. Add a schedule with the desired interval. +3. Choose the output format (JSON or JSON Lines) and destination + (Odoo attachment or HTTP POST). +4. Enable **Incremental** to only export records changed since the last run. diff --git a/json_export_engine/readme/CONTRIBUTORS.md b/json_export_engine/readme/CONTRIBUTORS.md new file mode 100644 index 00000000000..42b501aaecc --- /dev/null +++ b/json_export_engine/readme/CONTRIBUTORS.md @@ -0,0 +1 @@ +- Mohamed Alkobrosli @ [kobros-tech](https://kobros-tech.com) diff --git a/json_export_engine/readme/DESCRIPTION.md b/json_export_engine/readme/DESCRIPTION.md new file mode 100644 index 00000000000..9d00e671465 --- /dev/null +++ b/json_export_engine/readme/DESCRIPTION.md @@ -0,0 +1,30 @@ +Universal JSON / Schema Export Engine for Odoo. + +This module provides a complete framework for exporting data from any Odoo model +as structured JSON, with support for: + +- **Dynamic Schema Builder**: Use Odoo's built-in export field selector to + interactively choose which fields (including nested relational fields) to + include in your JSON output. No code required. + +- **JSON Schema Generation**: Auto-generates a JSON Schema (draft-07) from the + selected fields and model definition, including field types, nullable markers, + selection enums, and nested relational structures. Available both in the UI + and via a dedicated REST endpoint. + +- **REST API Endpoints**: Generate REST-like API endpoints for any schema with + configurable authentication (none, API key, or session-based), pagination, + and CORS support. Each endpoint exposes both a data URL and a schema URL. + +- **Webhooks**: Push data to external systems automatically when records are + created, updated, or deleted. Supports HMAC-SHA256 signing, custom headers, + and retry with exponential backoff. + +- **Scheduled Exports**: Export data on a schedule (minutes, hours, days, weeks) + as JSON or JSON Lines files, delivered as Odoo attachments or HTTP POST to + an external URL. Supports incremental exports (only changed records). + +- **Export Logs**: Full audit trail of all export operations (API calls, webhooks, + scheduled exports, manual exports) with timing and error tracking. + +Minimal dependencies: only requires ``base``, ``web``, and ``jsonifier``. diff --git a/json_export_engine/security/ir.model.access.csv b/json_export_engine/security/ir.model.access.csv new file mode 100644 index 00000000000..686a299b1c6 --- /dev/null +++ b/json_export_engine/security/ir.model.access.csv @@ -0,0 +1,10 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_json_export_schema_manager,json.export.schema manager,model_json_export_schema,group_manager,1,1,1,1 +access_json_export_schema_user,json.export.schema user,model_json_export_schema,group_user,1,0,0,0 +access_json_export_endpoint_manager,json.export.endpoint manager,model_json_export_endpoint,group_manager,1,1,1,1 +access_json_export_endpoint_user,json.export.endpoint user,model_json_export_endpoint,group_user,1,0,0,0 +access_json_export_webhook_manager,json.export.webhook manager,model_json_export_webhook,group_manager,1,1,1,1 +access_json_export_webhook_header_manager,json.export.webhook.header manager,model_json_export_webhook_header,group_manager,1,1,1,1 +access_json_export_schedule_manager,json.export.schedule manager,model_json_export_schedule,group_manager,1,1,1,1 +access_json_export_log_manager,json.export.log manager,model_json_export_log,group_manager,1,1,1,1 +access_json_export_log_user,json.export.log user,model_json_export_log,group_user,1,0,0,0 diff --git a/json_export_engine/security/json_export_engine_security.xml b/json_export_engine/security/json_export_engine_security.xml new file mode 100644 index 00000000000..e696da171d8 --- /dev/null +++ b/json_export_engine/security/json_export_engine_security.xml @@ -0,0 +1,24 @@ + + + + + JSON Export Engine + 100 + + + + User + + + + + Manager + + + + + + diff --git a/json_export_engine/static/description/index.html b/json_export_engine/static/description/index.html new file mode 100644 index 00000000000..5c16c5b44fd --- /dev/null +++ b/json_export_engine/static/description/index.html @@ -0,0 +1,512 @@ + + + + + +JSON Export Engine + + + +
+

JSON Export Engine

+ + +

Alpha License: AGPL-3 OCA/server-tools Translate me on Weblate Try me on Runboat

+

Universal JSON / Schema Export Engine for Odoo.

+

This module provides a complete framework for exporting data from any +Odoo model as structured JSON, with support for:

+
    +
  • Dynamic Schema Builder: Use Odoo’s built-in export field selector +to interactively choose which fields (including nested relational +fields) to include in your JSON output. No code required.
  • +
  • JSON Schema Generation: Auto-generates a JSON Schema (draft-07) +from the selected fields and model definition, including field types, +nullable markers, selection enums, and nested relational structures. +Available both in the UI and via a dedicated REST endpoint.
  • +
  • REST API Endpoints: Generate REST-like API endpoints for any +schema with configurable authentication (none, API key, or +session-based), pagination, and CORS support. Each endpoint exposes +both a data URL and a schema URL.
  • +
  • Webhooks: Push data to external systems automatically when records +are created, updated, or deleted. Supports HMAC-SHA256 signing, custom +headers, and retry with exponential backoff.
  • +
  • Scheduled Exports: Export data on a schedule (minutes, hours, +days, weeks) as JSON or JSON Lines files, delivered as Odoo +attachments or HTTP POST to an external URL. Supports incremental +exports (only changed records).
  • +
  • Export Logs: Full audit trail of all export operations (API calls, +webhooks, scheduled exports, manual exports) with timing and error +tracking.
  • +
+

Minimal dependencies: only requires base, web, and +jsonifier.

+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production. +More details on development status

+
+

Table of contents

+ +
+

Configuration

+

Creating an Export Schema:

+
    +
  1. Navigate to JSON Export Engine > Configuration > Export Schemas.
  2. +
  3. Create a new schema:
      +
    • Select the target model (e.g., res.partner, +product.product)
    • +
    • Click Select Fields to Export to interactively choose fields +(including nested relational fields)
    • +
    • Optionally set a domain filter to limit which records are exported
    • +
    • Configure options: record limit, whether to include the record ID, +and preview count
    • +
    +
  4. +
  5. Click Refresh Preview to see sample JSON output.
  6. +
  7. Check the JSON Schema tab to see the auto-generated JSON Schema +(draft-07) describing the structure of the exported data.
  8. +
+

REST Endpoints:

+
    +
  1. In the schema form, go to the Endpoints tab.
  2. +
  3. Add an endpoint with a route path (e.g., partners).
  4. +
  5. Choose authentication type and generate an API key if needed.
  6. +
  7. Two URLs are generated for each endpoint:
      +
    • Data URL: https://your-odoo.com/api/json_export/partners +returns paginated JSON data
    • +
    • Schema URL: +https://your-odoo.com/api/json_export/partners/schema returns +the JSON Schema (draft-07) for the endpoint
    • +
    +
  8. +
  9. Pagination parameters: ?page=1&limit=50 (capped by the endpoint’s +max limit setting).
  10. +
+

Webhooks:

+
    +
  1. In the schema form, go to the Webhooks tab.
  2. +
  3. Add a webhook with a destination URL.
  4. +
  5. Select which events trigger the webhook (create, write, delete).
  6. +
  7. Optionally set a secret key for HMAC-SHA256 payload signing.
  8. +
  9. Add custom headers if the receiving system requires them.
  10. +
+

Scheduled Exports:

+
    +
  1. In the schema form, go to the Schedules tab.
  2. +
  3. Add a schedule with the desired interval.
  4. +
  5. Choose the output format (JSON or JSON Lines) and destination (Odoo +attachment or HTTP POST).
  6. +
  7. Enable Incremental to only export records changed since the last +run.
  8. +
+
+
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • kobros-tech
  • +
+
+
+

Contributors

+ +
+
+

Maintainers

+

This module is maintained by the OCA.

+ +Odoo Community Association + +

OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use.

+

Current maintainer:

+

kobros-tech

+

This module is part of the OCA/server-tools project on GitHub.

+

You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.

+
+
+
+ + diff --git a/json_export_engine/static/src/json_export_widget.esm.js b/json_export_engine/static/src/json_export_widget.esm.js new file mode 100644 index 00000000000..8c262eed711 --- /dev/null +++ b/json_export_engine/static/src/json_export_widget.esm.js @@ -0,0 +1,161 @@ +/** @odoo-module **/ + +/* Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). + @author Mohamed Alkobrosli + License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). */ + +import {registry} from "@web/core/registry"; +import {Many2OneField} from "@web/views/fields/many2one/many2one_field"; +import {ExportDataDialog} from "@web/views/view_dialogs/export_data_dialog"; +import {useService} from "@web/core/utils/hooks"; + +const {onWillDestroy} = owl; + +class JsonExportDialog extends ExportDataDialog { + setup() { + super.setup(); + Object.assign(this.state, { + showApplyTemplateButton: false, + }); + this.title = this.env._t("Select Fields for JSON Export"); + // Swap the model from props to load the correct export fields + // for the schema's target model, not the schema model itself. + this.swapResModel = this.props.root.resModel; + this.props.root.resModel = this.props.context.resModel; + if (this.props.context.exporter_id && this.props.context.exporter_id[0]) { + this.state.templateId = this.props.context.exporter_id[0]; + } else { + this.state.templateId = "new_template"; + } + // Restore original model when dialog is destroyed + onWillDestroy(() => { + this.props.root.resModel = this.swapResModel; + }); + } + + async onChangeExportList(ev) { + this.loadExportList(ev.target.value); + // Show "Apply" button when user selects a different saved template + const currentId = + this.props.context.exporter_id && this.props.context.exporter_id[0]; + if ( + this.state.templateId === currentId || + this.state.templateId === "new_template" + ) { + this.state.showApplyTemplateButton = false; + } else { + this.state.showApplyTemplateButton = true; + } + } + + onClickApplyTemplateButton() { + const arrayOfTemplates = this.templates.map(({id, name}) => [id, name]); + const templ = arrayOfTemplates.find( + (subArray) => subArray[0] === this.state.templateId + ); + if (templ) { + this.props.context.overlap(templ); + } + this.props.close(); + } + + async onUpdateExportTemplate() { + const oldRec = await this.orm.read( + "ir.exports", + [this.state.templateId], + ["name", "export_fields"] + ); + let oldLines = []; + if ( + oldRec.length && + oldRec[0].export_fields && + oldRec[0].export_fields.length + ) { + oldLines = await this.orm.read("ir.exports.line", oldRec[0].export_fields, [ + "name", + ]); + } + const newFieldNames = this.state.exportList.map((field) => field.id); + const oldFieldMap = Object.fromEntries( + oldLines.map((line) => [line.name, line.id]) + ); + const fieldCommands = []; + // Keep existing or create new lines + for (const field of this.state.exportList) { + if (oldFieldMap[field.id]) { + fieldCommands.push([4, oldFieldMap[field.id]]); + } else { + fieldCommands.push([0, 0, {name: field.id}]); + } + } + // Unlink removed fields + for (const oldLine of oldLines) { + if (!newFieldNames.includes(oldLine.name)) { + fieldCommands.push([3, oldLine.id]); + } + } + await this.orm.write("ir.exports", [this.state.templateId], { + export_fields: fieldCommands, + }); + this.state.isEditingTemplate = false; + } +} +JsonExportDialog.template = "json_export_engine.JsonExportDialog"; + +class JsonExportFieldSelector extends Many2OneField { + setup() { + super.setup(); + this.rpc = useService("rpc"); + this.orm = useService("orm"); + this.dialogService = useService("dialog"); + this.quickOverlap = (templ) => { + if (templ && templ[0] && templ[1]) { + return this.props.update(templ); + } + }; + } + + async downloadExport() { + // No-op: we use the dialog for field selection, not actual export + return true; + } + + async getExportedFields(model, import_compat, parentParams) { + return await this.rpc("/web/export/get_fields", { + ...parentParams, + model, + import_compat, + }); + } + + openFieldSelector() { + const modelName = this.props.record.data.model_name; + if (!modelName) { + // Model not selected yet - cannot open field selector + return; + } + const dialogProps = { + context: { + ...this.props.record.context, + resModel: modelName, + exporter_id: this.props.value || false, + overlap: (templ) => { + this.quickOverlap(templ); + }, + }, + defaultExportList: [], + download: this.downloadExport.bind(this), + getExportedFields: this.getExportedFields.bind(this), + root: this.props.record.model.root, + }; + this.dialogService.add(JsonExportDialog, dialogProps); + } +} + +JsonExportFieldSelector.template = "json_export_engine.JsonExportFieldSelector"; +JsonExportFieldSelector.supportedTypes = ["many2one"]; +JsonExportFieldSelector.fieldDependencies = { + model_name: {type: "char"}, +}; + +registry.category("fields").add("json_export_field_selector", JsonExportFieldSelector); diff --git a/json_export_engine/static/src/json_export_widget.xml b/json_export_engine/static/src/json_export_widget.xml new file mode 100644 index 00000000000..339e1975393 --- /dev/null +++ b/json_export_engine/static/src/json_export_widget.xml @@ -0,0 +1,60 @@ + + + + +
+ +
+
+
+ + + false + + + false + + + false + + + + + + + + + + +
diff --git a/json_export_engine/tests/__init__.py b/json_export_engine/tests/__init__.py new file mode 100644 index 00000000000..05e03a3b806 --- /dev/null +++ b/json_export_engine/tests/__init__.py @@ -0,0 +1,10 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +from . import test_json_export_endpoint +from . import test_json_export_schedule +from . import test_json_export_schema +from . import test_json_export_webhook +from . import test_controller +from . import test_tools diff --git a/json_export_engine/tests/common.py b/json_export_engine/tests/common.py new file mode 100644 index 00000000000..ea83e401869 --- /dev/null +++ b/json_export_engine/tests/common.py @@ -0,0 +1,65 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +from odoo.tests.common import TransactionCase + + +class JsonExportTestCase(TransactionCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.env = cls.env(context=dict(cls.env.context, tracking_disable=True)) + + # Create ir.exports + lines for res.partner + cls.partner_exporter = cls.env["ir.exports"].create( + { + "name": "Test Partner Export", + "resource": "res.partner", + } + ) + for field_name in ["name", "email", "phone"]: + cls.env["ir.exports.line"].create( + { + "export_id": cls.partner_exporter.id, + "name": field_name, + } + ) + # Relational line + cls.env["ir.exports.line"].create( + { + "export_id": cls.partner_exporter.id, + "name": "country_id/name", + } + ) + + # Create schema + cls.partner_model = cls.env.ref("base.model_res_partner") + cls.schema = cls.env["json.export.schema"].create( + { + "name": "Test Partners", + "model_id": cls.partner_model.id, + "exporter_id": cls.partner_exporter.id, + "domain": "[]", + "record_limit": 10, + "include_record_id": True, + "preview_count": 3, + } + ) + + # Create test partners + country_us = cls.env.ref("base.us") + cls.partner1 = cls.env["res.partner"].create( + { + "name": "Test Partner 1", + "email": "test1@example.com", + "phone": "+1234567890", + "country_id": country_us.id, + } + ) + cls.partner2 = cls.env["res.partner"].create( + { + "name": "Test Partner 2", + "email": "test2@example.com", + } + ) diff --git a/json_export_engine/tests/test_controller.py b/json_export_engine/tests/test_controller.py new file mode 100644 index 00000000000..1e64de13ee6 --- /dev/null +++ b/json_export_engine/tests/test_controller.py @@ -0,0 +1,217 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + + +from odoo.tests.common import HttpCase, tagged + + +@tagged("-at_install", "post_install") +class TestJsonExportController(HttpCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.env = cls.env(context=dict(cls.env.context, tracking_disable=True)) + + # Create ir.exports + lines for res.partner + cls.partner_exporter = cls.env["ir.exports"].create( + { + "name": "Test Controller Export", + "resource": "res.partner", + } + ) + for field_name in ["name", "email"]: + cls.env["ir.exports.line"].create( + { + "export_id": cls.partner_exporter.id, + "name": field_name, + } + ) + + # Create schema + cls.partner_model = cls.env.ref("base.model_res_partner") + cls.schema = cls.env["json.export.schema"].create( + { + "name": "Controller Test Schema", + "model_id": cls.partner_model.id, + "exporter_id": cls.partner_exporter.id, + "domain": "[]", + "record_limit": 100, + "include_record_id": True, + } + ) + + # Create test partner + cls.env["res.partner"].create( + { + "name": "Controller Test Partner", + "email": "controller@example.com", + } + ) + + # Endpoint with no auth + cls.endpoint_no_auth = cls.env["json.export.endpoint"].create( + { + "name": "No Auth Endpoint", + "schema_id": cls.schema.id, + "route_path": "ctrl-test-noauth", + "auth_type": "none", + "default_limit": 10, + "max_limit": 50, + } + ) + + # Endpoint with API key auth + cls.endpoint_api_key = cls.env["json.export.endpoint"].create( + { + "name": "API Key Endpoint", + "schema_id": cls.schema.id, + "route_path": "ctrl-test-apikey", + "auth_type": "api_key", + "api_key": "test-api-key-12345", + "default_limit": 10, + "max_limit": 50, + } + ) + + # Endpoint with CORS + cls.endpoint_cors = cls.env["json.export.endpoint"].create( + { + "name": "CORS Endpoint", + "schema_id": cls.schema.id, + "route_path": "ctrl-test-cors", + "auth_type": "none", + "cors_origin": "*", + "default_limit": 10, + "max_limit": 50, + } + ) + + def _get(self, path, headers=None): + """Helper to perform a GET request.""" + url = "/api/json_export/%s" % path + return self.url_open(url, headers=headers or {}) + + # -- No auth tests -- + + def test_export_data_no_auth(self): + """GET with auth_type=none returns JSON data.""" + response = self._get("ctrl-test-noauth") + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertTrue(data["success"]) + self.assertIn("data", data) + self.assertIsInstance(data["data"], list) + + # -- API key auth tests -- + + def test_export_data_api_key_valid(self): + """GET with correct X-API-Key header succeeds.""" + response = self._get( + "ctrl-test-apikey", + headers={"X-API-Key": "test-api-key-12345"}, + ) + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertTrue(data["success"]) + + def test_export_data_api_key_invalid(self): + """GET with wrong key returns 401.""" + response = self._get( + "ctrl-test-apikey", + headers={"X-API-Key": "wrong-key"}, + ) + self.assertEqual(response.status_code, 401) + data = response.json() + self.assertFalse(data["success"]) + + def test_export_data_api_key_missing(self): + """GET without key returns 401.""" + response = self._get("ctrl-test-apikey") + self.assertEqual(response.status_code, 401) + + def test_export_data_api_key_query_param(self): + """API key via ?api_key= query param.""" + response = self.url_open( + "/api/json_export/ctrl-test-apikey?api_key=test-api-key-12345" + ) + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertTrue(data["success"]) + + # -- Pagination tests -- + + def test_export_data_pagination(self): + """Response has correct pagination metadata.""" + response = self._get("ctrl-test-noauth") + data = response.json() + self.assertIn("pagination", data) + pagination = data["pagination"] + self.assertIn("page", pagination) + self.assertIn("limit", pagination) + self.assertIn("total", pagination) + self.assertIn("pages", pagination) + self.assertEqual(pagination["page"], 1) + + def test_export_data_custom_limit(self): + """?limit=2 is respected.""" + response = self.url_open("/api/json_export/ctrl-test-noauth?limit=2") + data = response.json() + self.assertLessEqual(len(data["data"]), 2) + self.assertEqual(data["pagination"]["limit"], 2) + + # -- 404 test -- + + def test_export_data_not_found(self): + """Non-existent path returns 404.""" + response = self._get("nonexistent-path") + self.assertEqual(response.status_code, 404) + data = response.json() + self.assertFalse(data["success"]) + + # -- Schema endpoint tests -- + + def test_export_schema_endpoint(self): + """GET .../schema returns JSON Schema.""" + response = self._get("ctrl-test-noauth/schema") + self.assertEqual(response.status_code, 200) + data = response.json() + self.assertIn("$schema", data) + self.assertEqual(data["type"], "object") + self.assertIn("properties", data) + + # -- CORS tests -- + + def test_export_data_cors_headers(self): + """CORS headers present when cors_origin is set.""" + response = self._get("ctrl-test-cors") + self.assertEqual(response.status_code, 200) + self.assertEqual(response.headers.get("Access-Control-Allow-Origin"), "*") + + # -- Response structure test -- + + def test_export_data_response_structure(self): + """Response has success, data, pagination, meta keys.""" + response = self._get("ctrl-test-noauth") + data = response.json() + self.assertIn("success", data) + self.assertIn("data", data) + self.assertIn("pagination", data) + self.assertIn("meta", data) + meta = data["meta"] + self.assertIn("schema", meta) + self.assertIn("model", meta) + self.assertIn("duration_ms", meta) + + # -- Log creation test -- + + def test_export_data_creates_log(self): + """API call creates a log entry.""" + log_count_before = self.env["json.export.log"].search_count( + [("schema_id", "=", self.schema.id), ("log_type", "=", "api")] + ) + self._get("ctrl-test-noauth") + log_count_after = self.env["json.export.log"].search_count( + [("schema_id", "=", self.schema.id), ("log_type", "=", "api")] + ) + self.assertEqual(log_count_after, log_count_before + 1) diff --git a/json_export_engine/tests/test_json_export_endpoint.py b/json_export_engine/tests/test_json_export_endpoint.py new file mode 100644 index 00000000000..bf0bff49334 --- /dev/null +++ b/json_export_engine/tests/test_json_export_endpoint.py @@ -0,0 +1,122 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +from odoo.exceptions import ValidationError + +from .common import JsonExportTestCase + + +class TestJsonExportEndpoint(JsonExportTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.endpoint = cls.env["json.export.endpoint"].create( + { + "name": "Test Endpoint", + "schema_id": cls.schema.id, + "route_path": "test-partners", + "auth_type": "api_key", + "default_limit": 50, + "max_limit": 500, + } + ) + + # -- URL computation tests -- + + def test_compute_full_url(self): + """Data URL is correctly computed.""" + base_url = self.env["ir.config_parameter"].sudo().get_param("web.base.url") + expected = "%s/api/json_export/test-partners" % base_url + self.assertEqual(self.endpoint.full_url, expected) + + def test_compute_schema_url(self): + """Schema URL is correctly computed.""" + base_url = self.env["ir.config_parameter"].sudo().get_param("web.base.url") + expected = "%s/api/json_export/test-partners/schema" % base_url + self.assertEqual(self.endpoint.schema_url, expected) + + def test_compute_url_empty_path(self): + """Both URLs empty when route_path is not set.""" + endpoint = self.env["json.export.endpoint"].new( + { + "name": "No Path", + "schema_id": self.schema.id, + "route_path": False, + } + ) + endpoint._compute_full_url() + self.assertFalse(endpoint.full_url) + self.assertFalse(endpoint.schema_url) + + # -- Route path validation tests -- + + def test_route_path_valid(self): + """Accepts valid route paths.""" + for path in ["products", "my-products", "v1/products", "under_score"]: + endpoint = self.env["json.export.endpoint"].create( + { + "name": "Valid %s" % path, + "schema_id": self.schema.id, + "route_path": path, + "auth_type": "none", + } + ) + self.assertTrue(endpoint.id) + endpoint.unlink() + + def test_route_path_invalid_chars(self): + """Raises ValidationError for special characters.""" + with self.assertRaises(ValidationError): + self.env["json.export.endpoint"].create( + { + "name": "Invalid Path", + "schema_id": self.schema.id, + "route_path": "products?query=1", + "auth_type": "none", + } + ) + + def test_route_path_unique(self): + """Raises ValidationError for duplicate active paths.""" + with self.assertRaises(ValidationError): + self.env["json.export.endpoint"].create( + { + "name": "Duplicate", + "schema_id": self.schema.id, + "route_path": "test-partners", + "auth_type": "none", + } + ) + + def test_route_path_unique_allows_archived(self): + """Archived endpoint with same path is allowed.""" + self.endpoint.active = False + # Should not raise + endpoint2 = self.env["json.export.endpoint"].create( + { + "name": "Reuse Path", + "schema_id": self.schema.id, + "route_path": "test-partners", + "auth_type": "none", + } + ) + self.assertTrue(endpoint2.id) + + # -- API key tests -- + + def test_generate_api_key(self): + """Generates a 64-char hex string.""" + self.endpoint.action_generate_api_key() + self.assertTrue(self.endpoint.api_key) + self.assertEqual(len(self.endpoint.api_key), 64) + # Verify it's valid hex + int(self.endpoint.api_key, 16) + + def test_generate_api_key_unique(self): + """Two generations produce different keys.""" + self.endpoint.action_generate_api_key() + key1 = self.endpoint.api_key + self.endpoint.action_generate_api_key() + key2 = self.endpoint.api_key + self.assertNotEqual(key1, key2) diff --git a/json_export_engine/tests/test_json_export_schedule.py b/json_export_engine/tests/test_json_export_schedule.py new file mode 100644 index 00000000000..10c6e9c4f54 --- /dev/null +++ b/json_export_engine/tests/test_json_export_schedule.py @@ -0,0 +1,168 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import base64 +import json +from unittest import mock + +from .common import JsonExportTestCase + + +class TestJsonExportSchedule(JsonExportTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.schedule = cls.env["json.export.schedule"].create( + { + "name": "Test Schedule", + "schema_id": cls.schema.id, + "interval_number": 1, + "interval_type": "hours", + "destination_type": "attachment", + "file_format": "json", + "incremental": False, + } + ) + + # -- Cron lifecycle tests -- + + def test_create_schedule_creates_cron(self): + """cron_id is populated after create.""" + self.assertTrue(self.schedule.cron_id) + self.assertTrue(self.schedule.cron_id.active) + + def test_cron_interval_matches(self): + """Cron interval matches schedule config.""" + cron = self.schedule.cron_id + self.assertEqual(cron.interval_number, 1) + self.assertEqual(cron.interval_type, "hours") + + def test_write_schedule_updates_cron(self): + """Changing interval updates the cron.""" + self.schedule.write({"interval_number": 5, "interval_type": "days"}) + cron = self.schedule.cron_id + self.assertEqual(cron.interval_number, 5) + self.assertEqual(cron.interval_type, "days") + + def test_unlink_schedule_removes_cron(self): + """Cron is deleted when schedule is deleted.""" + cron_id = self.schedule.cron_id.id + self.schedule.unlink() + self.assertFalse(self.env["ir.cron"].browse(cron_id).exists()) + + def test_toggle_active_updates_cron(self): + """Deactivating schedule deactivates the cron.""" + self.schedule.write({"active": False}) + self.assertFalse(self.schedule.cron_id.active) + self.schedule.write({"active": True}) + self.assertTrue(self.schedule.cron_id.active) + + # -- Export execution tests -- + + def test_run_export_attachment(self): + """Creates ir.attachment with valid JSON content.""" + self.schedule._run_scheduled_export() + attachments = self.env["ir.attachment"].search( + [ + ("res_model", "=", "json.export.schedule"), + ("res_id", "=", self.schedule.id), + ] + ) + self.assertTrue(attachments) + content = base64.b64decode(attachments[0].datas).decode("utf-8") + data = json.loads(content) + self.assertIsInstance(data, list) + + def test_run_export_jsonl_format(self): + """JSONL format produces one JSON object per line.""" + self.schedule.write({"file_format": "jsonl"}) + self.schedule._run_scheduled_export() + attachments = self.env["ir.attachment"].search( + [ + ("res_model", "=", "json.export.schedule"), + ("res_id", "=", self.schedule.id), + ("mimetype", "=", "application/x-ndjson"), + ] + ) + self.assertTrue(attachments) + content = base64.b64decode(attachments[0].datas).decode("utf-8") + lines = [line for line in content.strip().split("\n") if line] + for line in lines: + parsed = json.loads(line) + self.assertIsInstance(parsed, dict) + + def test_run_export_incremental(self): + """Incremental mode only exports records changed since last run.""" + # Set last_run_date to a point in the past + past = "2000-01-01 00:00:00" + self.schedule.write( + { + "incremental": True, + "last_run_date": past, + } + ) + self.schedule._run_scheduled_export() + # All records should be newer than 2000-01-01 + self.assertEqual(self.schedule.last_run_status, "success") + self.assertGreater(self.schedule.last_run_count, 0) + + def test_run_export_updates_last_run(self): + """Updates last_run_date, last_run_status, last_run_count.""" + self.schedule._run_scheduled_export() + self.assertTrue(self.schedule.last_run_date) + self.assertEqual(self.schedule.last_run_status, "success") + self.assertGreaterEqual(self.schedule.last_run_count, 0) + self.assertFalse(self.schedule.last_run_error) + + def test_run_export_http_post(self): + """HTTP POST delivery sends data to destination_url.""" + self.schedule.write( + { + "destination_type": "http_post", + "destination_url": "https://example.com/receive", + } + ) + mock_response = mock.MagicMock() + mock_response.status_code = 200 + mock_response.raise_for_status.return_value = None + with mock.patch("requests.post", return_value=mock_response) as mock_post: + self.schedule._run_scheduled_export() + mock_post.assert_called_once() + call_args = mock_post.call_args + self.assertEqual(call_args[0][0], "https://example.com/receive") + self.assertEqual(self.schedule.last_run_status, "success") + + def test_run_export_error_handling(self): + """Sets last_run_status='error' on failure.""" + self.schedule.write( + { + "destination_type": "http_post", + "destination_url": "https://example.com/fail", + } + ) + import requests as req + + with mock.patch( + "requests.post", + side_effect=req.ConnectionError("Connection refused"), + ): + self.schedule._run_scheduled_export() + self.assertEqual(self.schedule.last_run_status, "error") + self.assertTrue(self.schedule.last_run_error) + + def test_action_run_now(self): + """Manual trigger works.""" + result = self.schedule.action_run_now() + self.assertTrue(result) + self.assertEqual(self.schedule.last_run_status, "success") + + def test_cron_run_export(self): + """_cron_run_export entry point works for valid schedule.""" + self.env["json.export.schedule"]._cron_run_export(self.schedule.id) + self.assertEqual(self.schedule.last_run_status, "success") + + def test_cron_run_export_invalid_id(self): + """_cron_run_export silently skips non-existent schedule.""" + # Should not raise + self.env["json.export.schedule"]._cron_run_export(99999) diff --git a/json_export_engine/tests/test_json_export_schema.py b/json_export_engine/tests/test_json_export_schema.py new file mode 100644 index 00000000000..fd517144bd8 --- /dev/null +++ b/json_export_engine/tests/test_json_export_schema.py @@ -0,0 +1,235 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import base64 +import json + +from odoo.exceptions import UserError + +from .common import JsonExportTestCase + + +class TestJsonExportSchema(JsonExportTestCase): + # -- Parser tests -- + + def test_get_parser(self): + """Parser resolves from ir.exports and includes 'id' when configured.""" + parser = self.schema._get_parser() + self.assertIsInstance(parser, list) + self.assertIn("id", parser) + self.assertIn("name", parser) + self.assertIn("email", parser) + self.assertIn("phone", parser) + # Relational field should be a tuple + relational = [item for item in parser if isinstance(item, tuple)] + self.assertTrue(relational, "Should have at least one relational field") + country_tuple = relational[0] + self.assertEqual(country_tuple[0], "country_id") + self.assertIn("name", country_tuple[1]) + + def test_get_parser_without_record_id(self): + """'id' excluded when include_record_id is False.""" + self.schema.include_record_id = False + parser = self.schema._get_parser() + self.assertNotIn("id", parser) + + def test_get_parser_no_exporter(self): + """Raises UserError when no exporter is set.""" + schema_no_exp = self.env["json.export.schema"].create( + { + "name": "No Exporter", + "model_id": self.partner_model.id, + } + ) + with self.assertRaises(UserError): + schema_no_exp._get_parser() + + # -- Domain tests -- + + def test_get_domain_valid(self): + """Parses a valid domain string.""" + self.schema.domain = "[('active', '=', True)]" + domain = self.schema._get_domain() + self.assertEqual(domain, [("active", "=", True)]) + + def test_get_domain_empty(self): + """Returns empty list for default domain.""" + self.schema.domain = "[]" + self.assertEqual(self.schema._get_domain(), []) + + def test_get_domain_invalid(self): + """Returns empty list as fallback for invalid syntax.""" + self.schema.domain = "invalid python code" + self.assertEqual(self.schema._get_domain(), []) + + # -- Record retrieval tests -- + + def test_get_records(self): + """Returns records matching domain with limit.""" + records = self.schema._get_records(limit=1) + self.assertTrue(len(records) <= 1) + + def test_get_records_with_offset(self): + """Pagination offset works.""" + all_records = self.schema._get_records(limit=100) + if len(all_records) >= 2: + offset_records = self.schema._get_records(limit=100, offset=1) + self.assertEqual(len(offset_records), len(all_records) - 1) + + def test_get_records_no_limit(self): + """no_limit=True returns all matching records.""" + records_limited = self.schema._get_records(limit=1) + records_all = self.schema._get_records(no_limit=True) + self.assertGreaterEqual(len(records_all), len(records_limited)) + + def test_get_records_extra_domain(self): + """Extra domain filter is applied.""" + records = self.schema._get_records( + extra_domain=[("name", "=", "Test Partner 1")] + ) + for rec in records: + self.assertEqual(rec.name, "Test Partner 1") + + # -- Serialization tests -- + + def test_serialize_records(self): + """Returns list of dicts with expected keys.""" + records = self.schema._get_records(limit=2) + data = self.schema._serialize_records(records) + self.assertIsInstance(data, list) + if data: + first = data[0] + self.assertIn("id", first) + self.assertIn("name", first) + + # -- Preview tests -- + + def test_compute_preview_data(self): + """Preview data computed as valid JSON string.""" + self.schema._compute_preview_data() + self.assertTrue(self.schema.preview_data) + parsed = json.loads(self.schema.preview_data) + self.assertIsInstance(parsed, list) + + def test_compute_preview_data_no_exporter(self): + """Empty string when no exporter is set.""" + schema_no_exp = self.env["json.export.schema"].create( + { + "name": "No Exporter", + "model_id": self.partner_model.id, + } + ) + self.assertFalse(schema_no_exp.preview_data) + + # -- JSON Schema generation tests -- + + def test_generate_json_schema(self): + """Schema has correct draft-07 structure.""" + json_schema = self.schema._generate_json_schema() + self.assertEqual( + json_schema["$schema"], "http://json-schema.org/draft-07/schema#" + ) + self.assertEqual(json_schema["title"], "Test Partners") + self.assertEqual(json_schema["type"], "object") + self.assertIn("properties", json_schema) + self.assertIn("required", json_schema) + self.assertFalse(json_schema["additionalProperties"]) + + def test_json_schema_field_types(self): + """FIELD_TYPE_MAP entries produce correct JSON schema types.""" + schema = self.schema._generate_json_schema() + props = schema["properties"] + # 'name' is Char → string (but may be wrapped in anyOf if nullable) + name_prop = props.get("name", {}) + if "anyOf" in name_prop: + types = [t.get("type") for t in name_prop["anyOf"]] + self.assertIn("string", types) + else: + self.assertEqual(name_prop.get("type"), "string") + + def test_json_schema_relational_many2one(self): + """Many2one with sub-fields → anyOf[object, null].""" + schema = self.schema._generate_json_schema() + props = schema["properties"] + country_prop = props.get("country_id", {}) + self.assertIn("anyOf", country_prop) + types = [t.get("type") for t in country_prop["anyOf"]] + self.assertIn("object", types) + self.assertIn("null", types) + + def test_json_schema_nullable(self): + """Non-required fields are wrapped in anyOf with null.""" + model = self.env["res.partner"] + field_obj = model._fields["email"] + result = self.schema._field_to_schema(field_obj) + if not field_obj.required: + self.assertIn("anyOf", result) + types = [t.get("type") for t in result["anyOf"]] + self.assertIn("null", types) + + def test_json_schema_selection_enum(self): + """Selection fields include enum values.""" + model = self.env["res.partner"] + field_obj = model._fields["type"] + result = self.schema._field_to_schema(field_obj) + # Selection field should have enum, possibly wrapped in anyOf + if "anyOf" in result: + inner = result["anyOf"][0] + self.assertIn("enum", inner) + else: + self.assertIn("enum", result) + + def test_compute_json_schema(self): + """Computed json_schema field is valid JSON.""" + self.assertTrue(self.schema.json_schema) + parsed = json.loads(self.schema.json_schema) + self.assertIn("$schema", parsed) + + # -- Export action tests -- + + def test_action_export_json(self): + """Creates ir.attachment with base64 JSON content.""" + result = self.schema.action_export_json() + self.assertEqual(result["type"], "ir.actions.act_url") + self.assertIn("/web/content/", result["url"]) + + # Verify attachment exists and content is valid JSON + attachments = self.env["ir.attachment"].search( + [("res_model", "=", "json.export.schema"), ("res_id", "=", self.schema.id)] + ) + self.assertTrue(attachments) + content = base64.b64decode(attachments[0].datas).decode("utf-8") + data = json.loads(content) + self.assertIsInstance(data, list) + + # -- Log tests -- + + def test_create_log(self): + """Creates log entry with correct fields.""" + log = self.schema._create_log( + "manual", + "success", + records_count=5, + duration_ms=100, + request_info='{"test": true}', + ) + self.assertEqual(log.schema_id, self.schema) + self.assertEqual(log.log_type, "manual") + self.assertEqual(log.status, "success") + self.assertEqual(log.records_count, 5) + self.assertEqual(log.duration_ms, 100) + + def test_action_view_logs(self): + """Returns correct action dict.""" + result = self.schema.action_view_logs() + self.assertEqual(result["type"], "ir.actions.act_window") + self.assertEqual(result["res_model"], "json.export.log") + self.assertIn(("schema_id", "=", self.schema.id), result["domain"]) + + def test_compute_log_count(self): + """Log count matches actual log records.""" + self.schema._create_log("manual", "success", 1, 10) + self.schema._create_log("api", "error", 0, 5) + self.schema.invalidate_cache() + self.assertEqual(self.schema.log_count, 2) diff --git a/json_export_engine/tests/test_json_export_webhook.py b/json_export_engine/tests/test_json_export_webhook.py new file mode 100644 index 00000000000..0c7a8b8a431 --- /dev/null +++ b/json_export_engine/tests/test_json_export_webhook.py @@ -0,0 +1,172 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import hashlib +import hmac +import json +from unittest import mock + +import requests + +from .common import JsonExportTestCase + + +class TestJsonExportWebhook(JsonExportTestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.webhook = cls.env["json.export.webhook"].create( + { + "name": "Test Webhook", + "schema_id": cls.schema.id, + "url": "https://webhook.example.com/hook", + "on_create": True, + "on_write": True, + "on_unlink": True, + "secret_key": "test-secret-key", + "max_retries": 2, + } + ) + cls.env["json.export.webhook.header"].create( + { + "webhook_id": cls.webhook.id, + "key": "X-Custom-Header", + "value": "custom-value", + } + ) + + def _mock_post_success(self): + """Return a mock for requests.post that succeeds.""" + mock_response = mock.MagicMock() + mock_response.status_code = 200 + mock_response.raise_for_status.return_value = None + return mock.patch("requests.post", return_value=mock_response) + + def _mock_post_failure(self): + """Return a mock for requests.post that always fails.""" + return mock.patch( + "requests.post", + side_effect=requests.ConnectionError("Connection refused"), + ) + + # -- _fire_for_model tests -- + + def test_fire_for_model_create(self): + """Fires webhook on create event.""" + with self._mock_post_success() as mock_post: + self.env["json.export.webhook"]._fire_for_model( + "res.partner", "create", self.partner1 + ) + mock_post.assert_called_once() + + def test_fire_for_model_write(self): + """Fires webhook on write event.""" + with self._mock_post_success() as mock_post: + self.env["json.export.webhook"]._fire_for_model( + "res.partner", "write", self.partner1 + ) + mock_post.assert_called_once() + + def test_fire_for_model_unlink(self): + """Fires webhook on unlink event.""" + with self._mock_post_success() as mock_post: + self.env["json.export.webhook"]._fire_for_model( + "res.partner", "unlink", self.partner1 + ) + mock_post.assert_called_once() + + def test_fire_for_model_disabled_event(self): + """Does NOT fire when event flag is False.""" + self.webhook.on_create = False + with self._mock_post_success() as mock_post: + self.env["json.export.webhook"]._fire_for_model( + "res.partner", "create", self.partner1 + ) + mock_post.assert_not_called() + self.webhook.on_create = True + + def test_fire_for_model_inactive_webhook(self): + """Does NOT fire for inactive webhooks.""" + self.webhook.active = False + with self._mock_post_success() as mock_post: + self.env["json.export.webhook"]._fire_for_model( + "res.partner", "create", self.partner1 + ) + mock_post.assert_not_called() + self.webhook.active = True + + # -- _trigger_webhook tests -- + + def test_trigger_webhook_payload(self): + """Payload contains required fields.""" + with self._mock_post_success() as mock_post: + self.webhook._trigger_webhook("create", self.partner1) + call_args = mock_post.call_args + body = json.loads(call_args.kwargs.get("data", call_args[1].get("data"))) + self.assertEqual(body["event"], "create") + self.assertEqual(body["model"], "res.partner") + self.assertEqual(body["schema"], "Test Partners") + self.assertIn("timestamp", body) + self.assertIn("records", body) + self.assertIsInstance(body["records"], list) + + def test_trigger_webhook_unlink_payload(self): + """Unlink events send only record IDs.""" + with self._mock_post_success() as mock_post: + self.webhook._trigger_webhook("unlink", self.partner1) + call_args = mock_post.call_args + body = json.loads(call_args.kwargs.get("data", call_args[1].get("data"))) + self.assertEqual(body["event"], "unlink") + records = body["records"] + self.assertEqual(len(records), 1) + self.assertIn("id", records[0]) + self.assertEqual(records[0]["id"], self.partner1.id) + + # -- _send_payload tests -- + + def test_send_payload_hmac(self): + """X-Webhook-Signature header contains HMAC-SHA256.""" + with self._mock_post_success() as mock_post: + payload = {"test": True} + self.webhook._send_payload(payload) + call_args = mock_post.call_args + headers = call_args.kwargs.get("headers", call_args[1].get("headers")) + self.assertIn("X-Webhook-Signature", headers) + # Verify the signature matches + body = json.dumps(payload, ensure_ascii=False) + expected_sig = hmac.new( + b"test-secret-key", + body.encode("utf-8"), + hashlib.sha256, + ).hexdigest() + self.assertEqual(headers["X-Webhook-Signature"], expected_sig) + + def test_send_payload_custom_headers(self): + """Custom headers from header_ids are included.""" + with self._mock_post_success() as mock_post: + self.webhook._send_payload({"test": True}) + call_args = mock_post.call_args + headers = call_args.kwargs.get("headers", call_args[1].get("headers")) + self.assertEqual(headers.get("X-Custom-Header"), "custom-value") + + def test_send_payload_retry_on_failure(self): + """Retries up to max_retries on HTTP error.""" + with self._mock_post_failure() as mock_post, mock.patch("time.sleep"): + with self.assertRaises(requests.RequestException): + self.webhook._send_payload({"test": True}) + self.assertEqual(mock_post.call_count, self.webhook.max_retries) + + def test_send_payload_state_on_error(self): + """State changes to 'error' after all retries fail.""" + with self._mock_post_failure(), mock.patch("time.sleep"): + self.webhook._trigger_webhook("create", self.partner1) + self.assertEqual(self.webhook.state, "error") + + # -- action_reset_state test -- + + def test_action_reset_state(self): + """Resets state back to 'active'.""" + self.webhook.sudo().write({"state": "error"}) + self.webhook.action_reset_state() + self.assertEqual(self.webhook.state, "active") diff --git a/json_export_engine/tests/test_tools.py b/json_export_engine/tests/test_tools.py new file mode 100644 index 00000000000..e144c0b357c --- /dev/null +++ b/json_export_engine/tests/test_tools.py @@ -0,0 +1,154 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import base64 +import unittest + +from odoo.addons.json_export_engine.tools.resolver import IrExportsResolver +from odoo.addons.json_export_engine.tools.serializer import JsonExportSerializer + +from .common import JsonExportTestCase + + +class TestIrExportsResolver(unittest.TestCase): + """Pure unit tests for IrExportsResolver (no database needed).""" + + def test_resolve_simple_fields(self): + """Simple dict fields resolve to strings.""" + parser = {"fields": [{"name": "name"}, {"name": "email"}]} + result = IrExportsResolver(parser).resolved_parser + self.assertEqual(result, ["name", "email"]) + + def test_resolve_relational_fields(self): + """Nested tuple fields resolve to (name, [sub_fields]).""" + parser = { + "fields": [ + ({"name": "categ_id"}, [{"name": "name"}, {"name": "id"}]), + ] + } + result = IrExportsResolver(parser).resolved_parser + self.assertEqual(len(result), 1) + self.assertIsInstance(result[0], tuple) + self.assertEqual(result[0][0], "categ_id") + self.assertEqual(result[0][1], ["name", "id"]) + + def test_resolve_mixed_fields(self): + """Mix of simple and relational fields.""" + parser = { + "fields": [ + {"name": "name"}, + ({"name": "country_id"}, [{"name": "name"}, {"name": "code"}]), + {"name": "phone"}, + ] + } + result = IrExportsResolver(parser).resolved_parser + self.assertEqual(len(result), 3) + self.assertEqual(result[0], "name") + self.assertIsInstance(result[1], tuple) + self.assertEqual(result[1][0], "country_id") + self.assertEqual(result[2], "phone") + + def test_resolve_deep_nesting(self): + """Multi-level nested relations.""" + parser = { + "fields": [ + ( + {"name": "partner_id"}, + [ + {"name": "name"}, + ({"name": "country_id"}, [{"name": "name"}]), + ], + ), + ] + } + result = IrExportsResolver(parser).resolved_parser + self.assertEqual(len(result), 1) + partner_tuple = result[0] + self.assertEqual(partner_tuple[0], "partner_id") + sub_fields = partner_tuple[1] + self.assertEqual(sub_fields[0], "name") + self.assertIsInstance(sub_fields[1], tuple) + self.assertEqual(sub_fields[1][0], "country_id") + + def test_resolve_empty(self): + """Empty input returns empty list.""" + self.assertEqual(IrExportsResolver({}).resolved_parser, []) + self.assertEqual(IrExportsResolver({"fields": []}).resolved_parser, []) + + def test_resolve_no_fields_key(self): + """Missing 'fields' key returns empty list.""" + self.assertEqual(IrExportsResolver({"other": "data"}).resolved_parser, []) + + def test_resolve_broken_tuple(self): + """Broken tuple structure is filtered out.""" + parser = { + "fields": [ + {"name": "name"}, + ("not_a_dict", [{"name": "name"}]), + ] + } + result = IrExportsResolver(parser).resolved_parser + # The broken tuple should be filtered out (empty list result) + self.assertEqual(result, ["name"]) + + +class TestJsonExportSerializer(JsonExportTestCase): + """Integration tests for JsonExportSerializer (needs database for jsonify).""" + + def test_serialize_single_record(self): + """Serializes a single record into a dict.""" + parser = self.schema._get_parser() + serializer = JsonExportSerializer(parser) + result = serializer.serialize(self.partner1) + self.assertIsInstance(result, dict) + self.assertIn("name", result) + self.assertEqual(result["name"], "Test Partner 1") + + def test_serialize_many_records(self): + """Serializes multiple records into a list of dicts.""" + parser = self.schema._get_parser() + serializer = JsonExportSerializer(parser) + records = self.partner1 | self.partner2 + result = serializer.serialize_many(records) + self.assertIsInstance(result, list) + self.assertEqual(len(result), 2) + names = [item["name"] for item in result] + self.assertIn("Test Partner 1", names) + self.assertIn("Test Partner 2", names) + + def test_process_binary_values(self): + """bytes values are converted to base64 strings.""" + serializer = JsonExportSerializer([]) + data = {"field": b"hello world"} + result = serializer._process_values(data) + expected = base64.b64encode(b"hello world").decode("utf-8") + self.assertEqual(result["field"], expected) + + def test_process_nested_dicts(self): + """Nested dicts are recursively processed.""" + serializer = JsonExportSerializer([]) + data = { + "outer": { + "inner_bytes": b"nested", + "inner_str": "keep", + } + } + result = serializer._process_values(data) + expected = base64.b64encode(b"nested").decode("utf-8") + self.assertEqual(result["outer"]["inner_bytes"], expected) + self.assertEqual(result["outer"]["inner_str"], "keep") + + def test_process_nested_lists(self): + """Lists of dicts are recursively processed.""" + serializer = JsonExportSerializer([]) + data = { + "items": [ + {"val": b"bytes1"}, + {"val": "string"}, + ] + } + result = serializer._process_values(data) + expected = base64.b64encode(b"bytes1").decode("utf-8") + self.assertEqual(result["items"][0]["val"], expected) + self.assertEqual(result["items"][1]["val"], "string") diff --git a/json_export_engine/tools/__init__.py b/json_export_engine/tools/__init__.py new file mode 100644 index 00000000000..8a203f4496c --- /dev/null +++ b/json_export_engine/tools/__init__.py @@ -0,0 +1,2 @@ +from . import resolver +from . import serializer diff --git a/json_export_engine/tools/resolver.py b/json_export_engine/tools/resolver.py new file mode 100644 index 00000000000..4d5e7a928cf --- /dev/null +++ b/json_export_engine/tools/resolver.py @@ -0,0 +1,61 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + + +class IrExportsResolver: + """ + The role of this class is to convert data of branches + into a the form of data that jsonifer module can jsonify. + + I assume that the data coming from ir.exports record is looking like this: + { + 'fields': [ + {'name': 'name'}, + ( + {'name': 'categ_id'}, + [{'name': 'name'}, {'name': 'sale_ok'}, {'name': 'purchase_ok'}] + ) + ] + } + + The final datastructure should look similar to this structure: + ["id", "name", ("categ_id", ["id", "name", "sale_ok", "purchase_ok"])] + """ + + def __init__(self, parser): + fields = [] + if parser.get("fields") and isinstance(parser["fields"], list): + fields = parser["fields"] + self.resolved_parser = [self.convert(field) for field in fields] + # Remove elements from the list if they are empty lists + self.resolved_parser = [item for item in self.resolved_parser if item] + + def get_dict_key(self, field): + if isinstance(field, dict) and "name" in field: + return field["name"] + else: + return field + + def resolve_tuple_field(self, field): + if isinstance(field, tuple) and len(field) == 2: + parent, children = field + if isinstance(parent, dict): + return ( + self.get_dict_key(parent), + [ + self.get_dict_key(child) + if isinstance(child, dict) + else self.resolve_tuple_field(child) + for child in children + ], + ) + # Safeguarding the structure result if the branch is broken, + # assign this branch empty list to protect other branches and the root + return [] + + def convert(self, field): + if isinstance(field, dict): + return self.get_dict_key(field) + else: + return self.resolve_tuple_field(field) diff --git a/json_export_engine/tools/serializer.py b/json_export_engine/tools/serializer.py new file mode 100644 index 00000000000..5a12a4cf427 --- /dev/null +++ b/json_export_engine/tools/serializer.py @@ -0,0 +1,43 @@ +# Copyright 2026 KOBROS-TECH LTD (https://kobros-tech.com). +# @author Mohamed Alkobrosli +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import base64 + +try: + from odoo.addons.fs_image.fields import FSImageValue +except ImportError: + FSImageValue = None + + +class JsonExportSerializer: + """Generic JSON serializer using jsonifier's record.jsonify().""" + + def __init__(self, parser): + self.parser = parser + + def serialize(self, record): + """Serialize a single record into a dict.""" + data = record.jsonify(self.parser, one=True) + return self._process_values(data) + + def serialize_many(self, records): + """Serialize a recordset into a list of dicts.""" + result = records.jsonify(self.parser) + return [self._process_values(item) for item in result] + + def _process_values(self, data): + """Post-process serialized values to handle binary and special types.""" + for key, value in data.items(): + if isinstance(value, bytes): + data[key] = base64.b64encode(value).decode("utf-8") + elif FSImageValue and isinstance(value, FSImageValue): + data[key] = value.url_path or value.url or value.internal_url + elif isinstance(value, dict): + data[key] = self._process_values(value) + elif isinstance(value, list): + data[key] = [ + self._process_values(item) if isinstance(item, dict) else item + for item in value + ] + return data diff --git a/json_export_engine/views/json_export_endpoint_views.xml b/json_export_engine/views/json_export_endpoint_views.xml new file mode 100644 index 00000000000..a5f7f82dfec --- /dev/null +++ b/json_export_engine/views/json_export_endpoint_views.xml @@ -0,0 +1,83 @@ + + + + + + json.export.endpoint.form + json.export.endpoint + +
+ + + + + + + + + + + + + + + + + +
+

+ +

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ +
+
+ + + + json.export.schema.tree + json.export.schema + + + + + + + + + + + + + json.export.schema.search + json.export.schema + + + + + + + + + + + + + + + + Export Schemas + json.export.schema + tree,form + + {'search_default_active': 1} + + +
diff --git a/json_export_engine/views/json_export_webhook_views.xml b/json_export_engine/views/json_export_webhook_views.xml new file mode 100644 index 00000000000..5ef46d96781 --- /dev/null +++ b/json_export_engine/views/json_export_webhook_views.xml @@ -0,0 +1,95 @@ + + + + + + json.export.webhook.form + json.export.webhook + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ + + + json.export.webhook.tree + json.export.webhook + + + + + + + + + + + + + + + + + + Webhooks + json.export.webhook + tree,form + + +
diff --git a/json_export_engine/views/menu.xml b/json_export_engine/views/menu.xml new file mode 100644 index 00000000000..f73b836741b --- /dev/null +++ b/json_export_engine/views/menu.xml @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/requirements.txt b/requirements.txt index 6dbfaa761a3..f4b6f411250 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,6 +14,7 @@ openupgradelib paramiko<4.0.0 pygount pysftp +requests sentry_sdk<=1.9.0 unidecode unittest-xml-reporting diff --git a/setup/json_export_engine/odoo/addons/json_export_engine b/setup/json_export_engine/odoo/addons/json_export_engine new file mode 120000 index 00000000000..2c97d1f2254 --- /dev/null +++ b/setup/json_export_engine/odoo/addons/json_export_engine @@ -0,0 +1 @@ +../../../../json_export_engine \ No newline at end of file diff --git a/setup/json_export_engine/setup.py b/setup/json_export_engine/setup.py new file mode 100644 index 00000000000..28c57bb6403 --- /dev/null +++ b/setup/json_export_engine/setup.py @@ -0,0 +1,6 @@ +import setuptools + +setuptools.setup( + setup_requires=['setuptools-odoo'], + odoo_addon=True, +) From 69b759be6cad579fc7e68b1e34a6e9babd3290cf Mon Sep 17 00:00:00 2001 From: kobros-tech Date: Tue, 17 Feb 2026 21:32:12 +0300 Subject: [PATCH 2/4] review and fix some issues --- json_export_engine/README.rst | 27 ++++- json_export_engine/controllers/main.py | 67 ++++++++--- .../models/json_export_endpoint.py | 47 +++++++- .../models/json_export_schema.py | 108 +++++++++++++++++- json_export_engine/readme/CONFIGURE.md | 27 ++++- .../static/description/index.html | 41 +++++-- .../static/src/json_export_widget.esm.js | 4 +- json_export_engine/tests/test_controller.py | 85 ++++++++++---- .../tests/test_json_export_endpoint.py | 42 ++++++- .../tests/test_json_export_schema.py | 11 +- .../views/json_export_endpoint_views.xml | 18 ++- .../views/json_export_schema_views.xml | 4 +- 12 files changed, 403 insertions(+), 78 deletions(-) diff --git a/json_export_engine/README.rst b/json_export_engine/README.rst index f9fe50d75df..0395a359021 100644 --- a/json_export_engine/README.rst +++ b/json_export_engine/README.rst @@ -99,18 +99,35 @@ Configuration 2. Add an endpoint with a route path (e.g., ``partners``). -3. Choose authentication type and generate an API key if needed. +3. Choose authentication type: + + - **API Key**: A key is auto-generated when you select this option. + Use the copy button to grab it. Pass via the ``X-API-Key`` HTTP + header. + - **Session (Logged-in User)**: Uses Odoo's session cookie. + - **No Authentication**: Open access (use with caution). 4. Two URLs are generated for each endpoint: - **Data URL**: ``https://your-odoo.com/api/json_export/partners`` - returns paginated JSON data + returns JSON data - **Schema URL**: ``https://your-odoo.com/api/json_export/partners/schema`` returns - the JSON Schema (draft-07) for the endpoint + the full API response schema (JSON Schema draft-07) + +5. **Pagination** is controlled from the endpoint settings: + + - Enable **Paginate** and set **Page Size** to split results into + pages. Navigate with ``?page=2`` or ``?page=last``. + - Disable **Paginate** to return all records in a single response. + - The response includes navigation links (``first``, ``last``, + ``next``, ``prev``) when pagination is enabled. + +6. Example call with API key: + + :: -5. Pagination parameters: ``?page=1&limit=50`` (capped by the endpoint's - max limit setting). + curl -H "X-API-Key: " https://your-odoo.com/api/json_export/partners **Webhooks:** diff --git a/json_export_engine/controllers/main.py b/json_export_engine/controllers/main.py index b1d64367c0a..0f6d08b2f31 100644 --- a/json_export_engine/controllers/main.py +++ b/json_export_engine/controllers/main.py @@ -2,6 +2,7 @@ # @author Mohamed Alkobrosli # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). +import hmac import json import math import time @@ -36,19 +37,47 @@ def export_data(self, path, **kwargs): start_time = time.time() try: - # Parse pagination - page = max(int(kwargs.get("page", 1)), 1) - limit = int(kwargs.get("limit", endpoint.default_limit)) - limit = min(max(limit, 1), endpoint.max_limit) - offset = (page - 1) * limit - # Count total records domain = schema._get_domain() model = request.env[schema.model_name].sudo() total = model.search_count(domain) - # Fetch and serialize - records = schema.sudo()._get_records(limit=limit, offset=offset) + base_path = "/api/json_export/%s" % path.strip("/") + + if endpoint.paginate: + page_size = max(endpoint.page_size, 1) + total_pages = math.ceil(total / page_size) if total else 1 + + # Support ?page=last + raw_page = kwargs.get("page", "1") + if str(raw_page).lower() == "last": + page = total_pages + else: + page = min(max(int(raw_page), 1), total_pages) + + offset = (page - 1) * page_size + records = schema.sudo()._get_records(limit=page_size, offset=offset) + + # Build navigation links + nav = { + "first": "%s?page=1" % base_path, + "last": "%s?page=%d" % (base_path, total_pages), + "next": ( + "%s?page=%d" % (base_path, page + 1) + if page < total_pages + else None + ), + "prev": ( + "%s?page=%d" % (base_path, page - 1) if page > 1 else None + ), + } + else: + page_size = total + page = 1 + total_pages = 1 + nav = {} + records = schema.sudo()._get_records(no_limit=True) + data = schema.sudo()._serialize_records(records) duration = int((time.time() - start_time) * 1000) @@ -58,9 +87,10 @@ def export_data(self, path, **kwargs): "data": data, "pagination": { "page": page, - "limit": limit, + "page_size": page_size, "total": total, - "pages": math.ceil(total / limit) if limit else 0, + "pages": total_pages, + **nav, }, "meta": { "schema": schema.name, @@ -80,7 +110,7 @@ def export_data(self, path, **kwargs): "endpoint": endpoint.name, "path": path, "page": page, - "limit": limit, + "page_size": page_size, } ), ) @@ -114,8 +144,9 @@ def export_schema(self, path, **kwargs): schema = endpoint.schema_id try: - json_schema = schema.sudo()._generate_json_schema() - return self._json_response(json_schema, endpoint) + record_schema = schema.sudo()._generate_json_schema() + api_schema = schema.sudo()._wrap_api_response_schema(record_schema) + return self._json_response(api_schema, endpoint) except Exception: return self._error_response(500, "Failed to generate schema") @@ -141,10 +172,12 @@ def _check_auth(self, endpoint): return None if endpoint.auth_type == "api_key": - api_key = request.httprequest.headers.get( - "X-API-Key" - ) or request.params.get("api_key") - if not api_key or api_key != endpoint.api_key: + api_key = request.httprequest.headers.get("X-API-Key") + if ( + not api_key + or not endpoint.api_key + or not hmac.compare_digest(api_key, endpoint.api_key) + ): return self._error_response(401, "Invalid or missing API key") return None diff --git a/json_export_engine/models/json_export_endpoint.py b/json_export_engine/models/json_export_endpoint.py index 04bdbe18f22..2ff234daff8 100644 --- a/json_export_engine/models/json_export_endpoint.py +++ b/json_export_engine/models/json_export_endpoint.py @@ -44,19 +44,52 @@ class JsonExportEndpoint(models.Model): required=True, ) api_key = fields.Char(groups="json_export_engine.group_manager") - default_limit = fields.Integer( - default=50, - help="Default number of records per page.", + api_key_generated_at = fields.Datetime( + string="API Key Generated At", + readonly=True, + groups="json_export_engine.group_manager", + help="Timestamp of the last API key generation, useful for rotation tracking.", + ) + paginate = fields.Boolean( + default=True, + help="When enabled, results are split into pages. " + "When disabled, all records are returned in a single response.", ) - max_limit = fields.Integer( - default=500, - help="Maximum number of records per page.", + page_size = fields.Integer( + default=50, + help="Number of records per page (used when pagination is enabled).", ) cors_origin = fields.Char( string="CORS Origin", help="Allowed CORS origin, e.g. * or https://example.com", ) + @api.onchange("auth_type") + def _onchange_auth_type(self): + """Auto-generate an API key when switching to API Key auth.""" + if self.auth_type == "api_key" and not self.api_key: + self.api_key = secrets.token_hex(32) + self.api_key_generated_at = fields.Datetime.now() + + @api.model_create_multi + def create(self, vals_list): + for vals in vals_list: + if vals.get("auth_type") == "api_key" and not vals.get("api_key"): + vals["api_key"] = secrets.token_hex(32) + vals["api_key_generated_at"] = fields.Datetime.now() + return super().create(vals_list) + + @api.constrains("auth_type", "api_key") + def _check_api_key_required(self): + for rec in self: + if rec.auth_type == "api_key" and not rec.api_key: + raise ValidationError( + _( + "An API key is required when authentication" + " type is set to 'API Key'. Please generate one." + ) + ) + @api.depends("route_path") def _compute_full_url(self): base_url = self.env["ir.config_parameter"].sudo().get_param("web.base.url") @@ -103,6 +136,8 @@ def _check_route_path(self): def action_generate_api_key(self): """Generate a new random API key.""" + now = fields.Datetime.now() for rec in self: rec.api_key = secrets.token_hex(32) + rec.api_key_generated_at = now return True diff --git a/json_export_engine/models/json_export_schema.py b/json_export_engine/models/json_export_schema.py index 766da7bc854..edee6619256 100644 --- a/json_export_engine/models/json_export_schema.py +++ b/json_export_engine/models/json_export_schema.py @@ -101,13 +101,117 @@ def _compute_json_schema(self): rec.json_schema = "" continue try: - schema = rec._generate_json_schema() - rec.json_schema = json.dumps(schema, indent=2, ensure_ascii=False) + record_schema = rec._generate_json_schema() + api_schema = rec._wrap_api_response_schema(record_schema) + rec.json_schema = json.dumps(api_schema, indent=2, ensure_ascii=False) except Exception as e: rec.json_schema = json.dumps( {"error": str(e)}, indent=2, ensure_ascii=False ) + def _wrap_api_response_schema(self, record_schema): + """Wrap a record-level schema in the full API response envelope.""" + nullable_string = {"anyOf": [{"type": "string"}, {"type": "null"}]} + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "%s — API Response" % record_schema.get("title", "Export"), + "description": "%s Supports ?page=N to navigate pages " + "and ?page=last to jump to the last page." + % record_schema.get("description", ""), + "type": "object", + "required": ["success", "data", "pagination", "meta"], + "additionalProperties": False, + "properties": { + "success": { + "type": "boolean", + "description": "Whether the request was successful.", + }, + "data": { + "type": "array", + "description": "List of exported records.", + "items": record_schema, + }, + "pagination": { + "type": "object", + "description": "Pagination metadata and navigation links.", + "required": [ + "page", + "page_size", + "total", + "pages", + "first", + "last", + "next", + "prev", + ], + "additionalProperties": False, + "properties": { + "page": { + "type": "integer", + "description": "Current page number.", + "minimum": 1, + }, + "page_size": { + "type": "integer", + "description": "Number of records per page.", + "minimum": 1, + }, + "total": { + "type": "integer", + "description": "Total number of records " + "matching the query.", + "minimum": 0, + }, + "pages": { + "type": "integer", + "description": "Total number of pages.", + "minimum": 1, + }, + "first": { + "type": "string", + "description": "URL to the first page.", + }, + "last": { + "type": "string", + "description": "URL to the last page.", + }, + "next": { + **nullable_string, + "description": "URL to the next page, " + "or null if on the last page.", + }, + "prev": { + **nullable_string, + "description": "URL to the previous page, " + "or null if on the first page.", + }, + }, + }, + "meta": { + "type": "object", + "description": "Request metadata.", + "required": ["schema", "model", "duration_ms"], + "additionalProperties": False, + "properties": { + "schema": { + "type": "string", + "description": "Name of the export schema.", + }, + "model": { + "type": "string", + "description": "Odoo model name.", + }, + "duration_ms": { + "type": "integer", + "description": "Server-side processing time " + "in milliseconds.", + "minimum": 0, + }, + }, + }, + }, + } + def _generate_json_schema(self): """Generate a JSON Schema (draft-07) from the resolved parser and model fields.""" self.ensure_one() diff --git a/json_export_engine/readme/CONFIGURE.md b/json_export_engine/readme/CONFIGURE.md index 6250daed9cd..1edc846018a 100644 --- a/json_export_engine/readme/CONFIGURE.md +++ b/json_export_engine/readme/CONFIGURE.md @@ -16,16 +16,33 @@ 1. In the schema form, go to the **Endpoints** tab. 2. Add an endpoint with a route path (e.g., ``partners``). -3. Choose authentication type and generate an API key if needed. +3. Choose authentication type: + + - **API Key**: A key is auto-generated when you select this option. + Use the copy button to grab it. Pass via the ``X-API-Key`` HTTP header. + - **Session (Logged-in User)**: Uses Odoo's session cookie. + - **No Authentication**: Open access (use with caution). + 4. Two URLs are generated for each endpoint: - **Data URL**: ``https://your-odoo.com/api/json_export/partners`` - returns paginated JSON data + returns JSON data - **Schema URL**: ``https://your-odoo.com/api/json_export/partners/schema`` - returns the JSON Schema (draft-07) for the endpoint + returns the full API response schema (JSON Schema draft-07) + +5. **Pagination** is controlled from the endpoint settings: + + - Enable **Paginate** and set **Page Size** to split results into pages. + Navigate with ``?page=2`` or ``?page=last``. + - Disable **Paginate** to return all records in a single response. + - The response includes navigation links (``first``, ``last``, + ``next``, ``prev``) when pagination is enabled. + +6. Example call with API key: -5. Pagination parameters: ``?page=1&limit=50`` - (capped by the endpoint's max limit setting). + ``` + curl -H "X-API-Key: " https://your-odoo.com/api/json_export/partners + ``` **Webhooks:** diff --git a/json_export_engine/static/description/index.html b/json_export_engine/static/description/index.html index 5c16c5b44fd..f2228443006 100644 --- a/json_export_engine/static/description/index.html +++ b/json_export_engine/static/description/index.html @@ -437,20 +437,43 @@

Configuration

(draft-07) describing the structure of the exported data.

REST Endpoints:

-
    -
  1. In the schema form, go to the Endpoints tab.
  2. -
  3. Add an endpoint with a route path (e.g., partners).
  4. -
  5. Choose authentication type and generate an API key if needed.
  6. -
  7. Two URLs are generated for each endpoint: