Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
673525f
[FIX] runbot: sort versions on build error
d-fence Feb 2, 2026
f79688b
[IMP] runbot: add a visual warning on active field
d-fence Feb 2, 2026
781bfbc
[IMP] runbot: recompute error randomness on content update
pparidans Jan 21, 2026
b0a126e
[FIX] runbot: click/middle-click on FrontendUrl fields
pparidans Jan 5, 2026
74e95ad
[FIX] runbot: properly sort version on build error
d-fence Feb 5, 2026
0fbf1aa
[IMP] runbot: add support for check based on semgrep rules
Xavier-Do Jan 22, 2026
dc222a5
[IMP] error management
Xavier-Do Jul 30, 2025
ee5441b
[IMP] runbot: improve error merge
d-fence Feb 11, 2026
ee28177
[IMP] runbot: notify new root files
Xavier-Do Feb 11, 2026
ca30731
[FIX] runbt: adapt for ps dynamic
Xavier-Do Feb 4, 2026
d9a12af
[IMP] runbot: check write rights on runbot error fields
d-fence Feb 6, 2026
bde7732
[FIX] runbot: fetch threehash
Xavier-Do Dec 29, 2025
a1d3185
[FIX] runbot: fix crashing when a batch is preparing
d-fence Jan 26, 2026
20ad0c0
[IMP] runbot: display when triggers depends from another
Xavier-Do Jan 7, 2026
9770d6b
[FIX] runbot: fix drop database timeout
Xavier-Do Feb 12, 2026
2be6ea8
[IMP] runbot: add link to project's "next freeze"
pparidans Dec 18, 2025
d84dd67
[IMP] runbot: add a cache system for dockerfiles
d-fence Jan 6, 2026
f3116cb
[IMP] runbot: allow requests in server actions
Xavier-Do Feb 12, 2026
e07b568
[IMP] runbot: update default Chrome version
pparidans Feb 12, 2026
4a3004b
[FIX] runbot: better error handling during docker builds
d-fence Feb 13, 2026
503b1eb
[FIX] runbot: fix incorrect join
Xavier-Do Feb 16, 2026
d49a4ab
[IMP] runbot: allow to define if a trigger needs the version or not
Xavier-Do Feb 16, 2026
0f2b55a
[IMP] runbot: make params version_id optionnal
Xavier-Do Feb 18, 2026
c57a673
[IMP] runbot: allow a trigger to use an extra slot
Xavier-Do Feb 23, 2026
1fdf95a
[FIX] runbot: don't link rebase on builds
Xavier-Do Feb 16, 2026
b41e813
[IMP] runbot: add priority level to build_views
Xavier-Do Feb 26, 2026
14bf876
[IMP] runbot: add cache to some layers
Xavier-Do Feb 27, 2026
e186c48
[IMP] runbot: add authors and teams on bundles
d-fence Dec 9, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion runbot/__manifest__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
'author': "Odoo SA",
'website': "http://runbot.odoo.com",
'category': 'Website',
'version': '5.14',
'version': '5.16',
'application': True,
'depends': ['base', 'base_automation', 'website', 'auth_oauth'],
'data': [
Expand Down Expand Up @@ -57,6 +57,7 @@
'views/oauth_provider_views.xml',
'views/repo_views.xml',
'views/res_config_settings_views.xml',
'views/semgrep_rules.xml',
'views/stat_views.xml',
'views/upgrade.xml',
'views/upgrade_matrix_views.xml',
Expand Down
2 changes: 1 addition & 1 deletion runbot/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def _docker_build(build_dir, image_tag, pull=False):
"""Build the docker image
:param build_dir: the build directory that contains Dockerfile.
:param image_tag: name used to tag the resulting docker image
:return: tuple(success, msg) where success is a boolean and msg is the error message or None
:return: dict
"""

with DockerManager(image_tag) as dm:
Expand Down
19 changes: 12 additions & 7 deletions runbot/controllers/frontend.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,12 @@ def batch(self, batch_id=None, **kwargs):
}
return request.render('runbot.batch', context)

@route(['/runbot/batch/<int:batch_id>/prioritize'], website=True, auth='user', type='http', sitemap=False)
def batch_priority(self, batch_id=None, **kwargs):
batch = request.env['runbot.batch'].browse(batch_id)
batch.sudo().priority_level = int(batch.create_date.timestamp() - 3600)
return werkzeug.utils.redirect('/runbot/batch/%s' % batch_id)

@route(['/runbot/batch/slot/<model("runbot.batch.slot"):slot>/build'], auth='user', type='http')
def slot_create_build(self, slot=None, **kwargs):
build = slot.sudo()._create_missing_build()
Expand Down Expand Up @@ -852,21 +858,20 @@ def repos_heads(self, project_id=None, bundle_name=None, **kwargs):
else:
domain = Domain.AND([domain, [('sticky', '=', True)]])
bundles = request.env['runbot.bundle'].search(domain, order='id desc, name')

last_batches_infos = {
bundle.name: {
last_batches_infos = dict()
for bundle in bundles:
batch = bundle.last_batch if bundle.last_batch.state != 'preparing' else bundle.last_done_batch
last_batches_infos[bundle.name] = {
"commits": [
{
"repo": commit_link.commit_id.repo_id.name,
"head": commit_link.commit_id.name,
"match_type": commit_link.match_type,
}
for commit_link in bundle.last_batch.commit_link_ids
for commit_link in batch.commit_link_ids
],
"autotags": request.env["runbot.build.error"].sudo()._disabling_tags(build_id=bundle.last_batch.slot_ids.build_id[0]),
"autotags": request.env["runbot.build.error"].sudo()._disabling_tags(build_id=batch.slot_ids.build_id[0]),
}
for bundle in bundles
}
return request.make_json_response(last_batches_infos)

@route([
Expand Down
6 changes: 4 additions & 2 deletions runbot/data/dockerfile_data.xml
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@
<field name="name">Install branch debian/control with latest postgresql-client</field>
<field name="values" eval="{'odoo_branch': 'master', 'os_release_name': '`lsb_release -s -c`'}"/>
<field name="content"># This layer updates the repository list to get the latest postgresql-client, mainly needed if the host postgresql version is higher than the default version of the docker os
# CACHE 60
ADD https://raw.githubusercontent.com/odoo/odoo/{odoo_branch}/debian/control /tmp/control.txt
RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc -o /etc/apt/trusted.gpg.d/psql_client.asc \
&amp;&amp; echo "deb http://apt.postgresql.org/pub/repos/apt/ {os_release_name}-pgdg main" &gt; /etc/apt/sources.list.d/pgclient.list \
Expand All @@ -136,7 +137,7 @@ RUN curl -sSL https://www.postgresql.org/media/keys/ACCC4CF8.asc -o /etc/apt/tru
<field name="dockerfile_id" ref="runbot.docker_default"/>
<field name="layer_type">template</field>
<field name="name">Install chrome</field>
<field name="values" eval="{'chrome_version': '126.0.6478.182-1'}"/>
<field name="values" eval="{'chrome_version': '141.0.7390.54-1'}"/>
<field name="content">RUN curl -sSL https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_{chrome_version}_amd64.deb -o /tmp/chrome.deb \
&amp;&amp; apt-get update \
&amp;&amp; apt-get -y install --no-install-recommends /tmp/chrome.deb \
Expand Down Expand Up @@ -195,7 +196,8 @@ ENV PIP_BREAK_SYSTEM_PACKAGES=1</field>
<field name="layer_type">template</field>
<field name="name">Install branch requirements</field>
<field name="values" eval="{'odoo_branch': 'master'}"/>
<field name="content">ADD --chown={USERNAME} https://raw.githubusercontent.com/odoo/odoo/{odoo_branch}/requirements.txt /tmp/requirements.txt
<field name="content"># CACHE 60
ADD --chown={USERNAME} https://raw.githubusercontent.com/odoo/odoo/{odoo_branch}/requirements.txt /tmp/requirements.txt
RUN python3 -m pip install --no-cache-dir -r /tmp/requirements.txt</field>
</record>

Expand Down
2 changes: 1 addition & 1 deletion runbot/documentation/dynamic_config.md
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ The config steps are mainly defined by their `job_type`. The `name` key is also
```
The `db_name` is optionnal, usually set to all as a convention on runbot for databases that contains *almost* all modules. If not defined the sanitized version of the name will be used.

`install_modules` and `install_default_modules` behave the same way except that `install_modules` will consider that we start with no module (prepends `.*` filter) while `install_default_modules` will be based on the runbot default module list (all available modules minus the repo blacklist)
`install_modules` and `install_default_modules` behave the same way except that `install_modules` will consider that we start with no module (prepends `-*` filter) while `install_default_modules` will be based on the runbot default module list (all available modules minus the repo blacklist)

Both entries will use the value as a runbot module filter, and then passed as the -i, [see corresponding section](#module-selection) for more info.

Expand Down
42 changes: 42 additions & 0 deletions runbot/migrations/19.0.5.15/post-migration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import logging

_logger = logging.getLogger(__name__)


def migrate(cr, version):
cr.execute("""
SELECT to_regclass('public.x_runbot_semgrep_rules');
""")
if not cr.fetchone()[0]:
return

cr.execute("""SELECT "x_checker", "x_language", "x_maxver", "x_message", "x_minver", "x_name", "x_rule", "x_severity" FROM x_runbot_semgrep_rules""")
results = cr.dictfetchall()
_logger.info('Migrating %d semgrep rules', len(results))
categories = []
for result in results:
categories.append(result['x_checker'])

category_map = {}
for category in sorted(set(categories)):
cr.execute("""
INSERT INTO runbot_checker_category (name)
VALUES (%s)
RETURNING id
""", (category,))
category_map[category] = cr.fetchone()[0]

for result in results:
cr.execute("""
INSERT INTO runbot_semgrep_rule (name, category_id, language, max_version_number, min_version_number, message, rule, severity)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
""", (
result['x_name'],
category_map[result['x_checker']],
result['x_language'],
result['x_maxver'],
result['x_minver'],
result['x_message'],
result['x_rule'],
result['x_severity'],
))
8 changes: 8 additions & 0 deletions runbot/migrations/19.0.5.16/pre-migration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import logging

_logger = logging.getLogger(__name__)


def migrate(cr, version):
cr.execute("""ALTER TABLE runbot_batch ADD COLUMN priority_level integer""")
cr.execute("""ALTER TABLE runbot_build ADD COLUMN priority_level integer""")
2 changes: 2 additions & 0 deletions runbot/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from . import database
from . import docker
from . import host
from . import ir_action
from . import ir_cron
from . import ir_http
from . import ir_model_fields_converter
Expand All @@ -25,6 +26,7 @@
from . import res_config_settings
from . import res_users
from . import runbot
from . import semgrep_rule
from . import team
from . import upgrade
from . import user
Expand Down
11 changes: 9 additions & 2 deletions runbot/models/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ class Batch(models.Model):
slot_ids = fields.One2many('runbot.batch.slot', 'batch_id')
all_build_ids = fields.Many2many('runbot.build', compute='_compute_all_build_ids', help="Recursive builds")
state = fields.Selection([('preparing', 'Preparing'), ('ready', 'Ready'), ('done', 'Done'), ('skipped', 'Skipped')])
priority_level = fields.Integer("Priority level", help="Priority level of the batch, determined from the create date and the bundle priority offset. The lower, the higher priority.")
hidden = fields.Boolean('Hidden', default=False)
age = fields.Integer(compute='_compute_age', string='Build age')
category_id = fields.Many2one('runbot.category', index=True, default=lambda self: self.env.ref('runbot.default_category', raise_if_not_found=False))
Expand Down Expand Up @@ -163,7 +164,7 @@ def _create_build(self, params, slot):
build_type = 'normal'
if self.category_id != self.env.ref('runbot.default_category'):
build_type = 'scheduled'
elif self.bundle_id.priority:
elif self.bundle_id.priority or params.trigger_id.use_extra_slot:
build_type = 'priority'

build = self.env['runbot.build'].create({
Expand All @@ -182,6 +183,11 @@ def _create_build(self, params, slot):

def _prepare(self, auto_rebase=False, use_base_commits=False):
_logger.info('Preparing batch %s', self.id)

priority_offset = self.bundle_id.priority_offset
if not priority_offset and self.bundle_id.branch_ids.forwardport_of_id and self.bundle_id.last_batchs == self: # this is the only batch of a forwardported pr.
priority_offset = - 3600 * 5
self.priority_level = int(self.create_date.timestamp() - priority_offset)
if use_base_commits:
self._warning('This batch will use base commits instead of bundle commits')
if not self.bundle_id.base_id:
Expand Down Expand Up @@ -364,7 +370,7 @@ def _fill_missing(branch_commits, match_type):
base_commit_link_by_repos = {commit_link.commit_id.repo_id.id: commit_link for commit_link in self.base_reference_batch_id.commit_link_ids}
if use_base_commits:
commit_link_by_repos = base_commit_link_by_repos
version_id = self.bundle_id.version_id.id
bundle_version_id = self.bundle_id.version_id.id
project_id = self.bundle_id.project_id.id
trigger_customs = {}
for trigger_custom in self.bundle_id.all_trigger_custom_ids:
Expand All @@ -384,6 +390,7 @@ def _fill_missing(branch_commits, match_type):
self._warning(f'This batch will use base commits instead of bundle commits for trigger {trigger.name}')
trigger_commit_link_by_repos = base_commit_link_by_repos
commits_links = [trigger_commit_link_by_repos[repo.id].id for repo in trigger_repos]
version_id = bundle_version_id if (trigger.version_dependent or trigger.batch_dependent) else False
params_value = {
'version_id': version_id,
'extra_params': extra_params,
Expand Down
24 changes: 18 additions & 6 deletions runbot/models/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class BuildParameters(models.Model):
# execution parametter
commit_link_ids = fields.Many2many('runbot.commit.link', copy=True)
commit_ids = fields.Many2many('runbot.commit', compute='_compute_commit_ids')
version_id = fields.Many2one('runbot.version', required=True, index=True)
version_id = fields.Many2one('runbot.version', index=True)
project_id = fields.Many2one('runbot.project', required=True, index=True) # for access rights
trigger_id = fields.Many2one('runbot.trigger', index=True) # for access rights
create_batch_id = fields.Many2one('runbot.batch', index=True)
Expand Down Expand Up @@ -105,8 +105,18 @@ class BuildParameters(models.Model):

# @api.depends('version_id', 'project_id', 'extra_params', 'config_id', 'config_data', 'modules', 'commit_link_ids', 'builds_reference_ids')
def _compute_fingerprint(self):
def get_commit_links_ident(commit_link):
commit_idents = []
for c in commit_link.commit_id:
commit_ident = c.tree_hash or c.name
if c.rebase_on_id:
commit_ident += (c.rebase_on_id.tree_hash or c.rebase_on_id.name)
# in a ideal world, we would be able to determine what the real threehash would be
commit_idents.append(commit_ident)
return sorted(commit_idents)

for param in self:
commit_ident = sorted([c.tree_hash or '' for c in param.commit_link_ids.commit_id])
commit_ident = get_commit_links_ident(param.commit_link_ids)
if param.trigger_id.batch_dependent:
commit_ident = sorted(param.commit_link_ids.commit_id.ids)
cleaned_vals = {
Expand All @@ -125,7 +135,7 @@ def _compute_fingerprint(self):
}
if param.upgrade_to_build_id:
cleaned_vals['upgrade_to_build_dockerfile_id'] = param.upgrade_to_build_id.params_id.dockerfile_id.id
cleaned_vals['upgrade_to_build_commits'] = sorted([c.tree_hash or c.id for c in param.upgrade_to_build_id.params_id.commit_link_ids.commit_id])
cleaned_vals['upgrade_to_build_commits'] = get_commit_links_ident(param.upgrade_to_build_id.params_id.commit_link_ids)
if param.upgrade_from_build_id:
cleaned_vals['upgrade_from_build_id'] = param.upgrade_from_build_id.id
if param.trigger_id.batch_dependent:
Expand Down Expand Up @@ -266,6 +276,7 @@ class BuildResult(models.Model):
create_batch_id = fields.Many2one('runbot.batch', related='params_id.create_batch_id', store=True, index=True)
create_bundle_id = fields.Many2one('runbot.bundle', related='params_id.create_batch_id.bundle_id', index=True)
dynamic_config = JsonDictField('Dynamic Config', related='params_id.dynamic_config')
priority_level = fields.Integer('Priority', related='create_batch_id.priority_level', store=True, index=True)

# state machine
global_state = fields.Selection(make_selection(state_order), string='Status', compute='_compute_global_state', store=True, recursive=True)
Expand Down Expand Up @@ -512,6 +523,7 @@ def _add_child(self, param_values, orphan=False, description=False, additionnal_
'params_id': self.params_id.copy(param_values).id,
'parent_id': self.id,
'build_type': self.build_type,
'priority_level': self.priority_level,
'description': description,
'orphan_result': orphan,
'keep_host': self.keep_host,
Expand All @@ -534,7 +546,7 @@ def _result_multi(self):
def _compute_dest(self):
for build in self:
if build.id:
nickname = build.params_id.version_id.name
nickname = build.params_id.version_id.name or 'build'
nickname = re.sub(r'"|\'|~|\:', '', nickname)
nickname = re.sub(r'_|/|\.', '-', nickname)
build.dest = ("%05d-%s" % (build.id or 0, nickname[:32])).lower()
Expand Down Expand Up @@ -982,7 +994,7 @@ def _docker_run(self, step, cmd=None, ro_volumes=None, env_variables=None, **kwa
for dest, source in _ro_volumes.items():
ro_volumes[f'/data/build/{dest}'] = source
if 'image_tag' not in kwargs:
kwargs.update({'image_tag': self.params_id.dockerfile_id.image_tag})
kwargs.update({'image_tag': step.dockerfile_id.image_tag or self.params_id.dockerfile_id.image_tag})
dockerfile_variant = self.params_id.config_data.get('dockerfile_variant', step.dockerfile_variant)
if dockerfile_variant and f'.{dockerfile_variant.lower()}' not in kwargs['image_tag']:
kwargs['image_tag'] += f'.{dockerfile_variant.lower()}'
Expand Down Expand Up @@ -1114,7 +1126,7 @@ def _local_pg_dropdb(self, dbname):
with local_pgadmin_cursor() as local_cr:
query = 'SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname=%s'
local_cr.execute(query, [dbname])
local_cr.execute('SET LOCAL statement_timeout=10000') # avoid to be stuck if the dropdb is locked
local_cr.execute('SET statement_timeout=10000') # avoid to be stuck if the dropdb is locked
local_cr.execute('DROP DATABASE IF EXISTS "%s"' % dbname)
except Exception as e:
msg = f"Failed to drop local logs database : {dbname} with exception: {e}"
Expand Down
Loading